about summary refs log tree commit diff
path: root/compiler/rustc_const_eval/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_const_eval/src')
-rw-r--r--compiler/rustc_const_eval/src/const_eval/error.rs4
-rw-r--r--compiler/rustc_const_eval/src/const_eval/eval_queries.rs82
-rw-r--r--compiler/rustc_const_eval/src/const_eval/machine.rs15
-rw-r--r--compiler/rustc_const_eval/src/interpret/cast.rs4
-rw-r--r--compiler/rustc_const_eval/src/interpret/eval_context.rs14
-rw-r--r--compiler/rustc_const_eval/src/interpret/intern.rs32
-rw-r--r--compiler/rustc_const_eval/src/interpret/intrinsics.rs14
-rw-r--r--compiler/rustc_const_eval/src/interpret/machine.rs15
-rw-r--r--compiler/rustc_const_eval/src/interpret/memory.rs24
-rw-r--r--compiler/rustc_const_eval/src/interpret/mod.rs1
-rw-r--r--compiler/rustc_const_eval/src/interpret/place.rs47
-rw-r--r--compiler/rustc_const_eval/src/interpret/step.rs8
-rw-r--r--compiler/rustc_const_eval/src/interpret/terminator.rs2
-rw-r--r--compiler/rustc_const_eval/src/interpret/util.rs28
-rw-r--r--compiler/rustc_const_eval/src/interpret/validity.rs19
-rw-r--r--compiler/rustc_const_eval/src/lib.rs1
16 files changed, 246 insertions, 64 deletions
diff --git a/compiler/rustc_const_eval/src/const_eval/error.rs b/compiler/rustc_const_eval/src/const_eval/error.rs
index 80d02589900..935329f1189 100644
--- a/compiler/rustc_const_eval/src/const_eval/error.rs
+++ b/compiler/rustc_const_eval/src/const_eval/error.rs
@@ -19,6 +19,7 @@ use crate::interpret::{ErrorHandled, InterpError, InterpErrorInfo, MachineStopTy
 pub enum ConstEvalErrKind {
     ConstAccessesMutGlobal,
     ModifiedGlobal,
+    RecursiveStatic,
     AssertFailure(AssertKind<ConstInt>),
     Panic { msg: Symbol, line: u32, col: u32, file: Symbol },
 }
@@ -31,13 +32,14 @@ impl MachineStopType for ConstEvalErrKind {
             ConstAccessesMutGlobal => const_eval_const_accesses_mut_global,
             ModifiedGlobal => const_eval_modified_global,
             Panic { .. } => const_eval_panic,
+            RecursiveStatic => const_eval_recursive_static,
             AssertFailure(x) => x.diagnostic_message(),
         }
     }
     fn add_args(self: Box<Self>, adder: &mut dyn FnMut(DiagnosticArgName, DiagnosticArgValue)) {
         use ConstEvalErrKind::*;
         match *self {
-            ConstAccessesMutGlobal | ModifiedGlobal => {}
+            RecursiveStatic | ConstAccessesMutGlobal | ModifiedGlobal => {}
             AssertFailure(kind) => kind.add_args(adder),
             Panic { msg, line, col, file } => {
                 adder("msg".into(), msg.into_diagnostic_arg());
diff --git a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs
index c55d899e4d5..7099cdd5a75 100644
--- a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs
+++ b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs
@@ -2,12 +2,12 @@ use either::{Left, Right};
 
 use rustc_hir::def::DefKind;
 use rustc_middle::mir::interpret::{AllocId, ErrorHandled, InterpErrorInfo};
-use rustc_middle::mir::pretty::write_allocation_bytes;
 use rustc_middle::mir::{self, ConstAlloc, ConstValue};
 use rustc_middle::traits::Reveal;
 use rustc_middle::ty::layout::LayoutOf;
 use rustc_middle::ty::print::with_no_trimmed_paths;
 use rustc_middle::ty::{self, TyCtxt};
+use rustc_span::def_id::LocalDefId;
 use rustc_span::Span;
 use rustc_target::abi::{self, Abi};
 
@@ -17,8 +17,9 @@ use crate::errors;
 use crate::errors::ConstEvalError;
 use crate::interpret::eval_nullary_intrinsic;
 use crate::interpret::{
-    intern_const_alloc_recursive, CtfeValidationMode, GlobalId, Immediate, InternKind, InterpCx,
-    InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, StackPopCleanup,
+    create_static_alloc, intern_const_alloc_recursive, take_static_root_alloc, CtfeValidationMode,
+    GlobalId, Immediate, InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind,
+    OpTy, RefTracking, StackPopCleanup,
 };
 
 // Returns a pointer to where the result lives
@@ -46,7 +47,21 @@ fn eval_body_using_ecx<'mir, 'tcx>(
     );
     let layout = ecx.layout_of(body.bound_return_ty().instantiate(tcx, cid.instance.args))?;
     assert!(layout.is_sized());
-    let ret = ecx.allocate(layout, MemoryKind::Stack)?;
+
+    let intern_kind = if cid.promoted.is_some() {
+        InternKind::Promoted
+    } else {
+        match tcx.static_mutability(cid.instance.def_id()) {
+            Some(m) => InternKind::Static(m),
+            None => InternKind::Constant,
+        }
+    };
+
+    let ret = if let InternKind::Static(_) = intern_kind {
+        create_static_alloc(ecx, cid.instance.def_id(), layout)?
+    } else {
+        ecx.allocate(layout, MemoryKind::Stack)?
+    };
 
     trace!(
         "eval_body_using_ecx: pushing stack frame for global: {}{}",
@@ -66,14 +81,6 @@ fn eval_body_using_ecx<'mir, 'tcx>(
     while ecx.step()? {}
 
     // Intern the result
-    let intern_kind = if cid.promoted.is_some() {
-        InternKind::Promoted
-    } else {
-        match tcx.static_mutability(cid.instance.def_id()) {
-            Some(m) => InternKind::Static(m),
-            None => InternKind::Constant,
-        }
-    };
     intern_const_alloc_recursive(ecx, intern_kind, &ret)?;
 
     Ok(ret)
@@ -250,10 +257,36 @@ pub fn eval_to_const_value_raw_provider<'tcx>(
 }
 
 #[instrument(skip(tcx), level = "debug")]
+pub fn eval_static_initializer_provider<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    def_id: LocalDefId,
+) -> ::rustc_middle::mir::interpret::EvalStaticInitializerRawResult<'tcx> {
+    assert!(tcx.is_static(def_id.to_def_id()));
+
+    let instance = ty::Instance::mono(tcx, def_id.to_def_id());
+    let cid = rustc_middle::mir::interpret::GlobalId { instance, promoted: None };
+    let mut ecx = InterpCx::new(
+        tcx,
+        tcx.def_span(def_id),
+        ty::ParamEnv::reveal_all(),
+        // Statics (and promoteds inside statics) may access other statics, because unlike consts
+        // they do not have to behave "as if" they were evaluated at runtime.
+        CompileTimeInterpreter::new(CanAccessMutGlobal::Yes, CheckAlignment::Error),
+    );
+    let alloc_id = eval_in_interpreter(&mut ecx, cid, true)?.alloc_id;
+    let alloc = take_static_root_alloc(&mut ecx, alloc_id);
+    let alloc = tcx.mk_const_alloc(alloc);
+    Ok(alloc)
+}
+
+#[instrument(skip(tcx), level = "debug")]
 pub fn eval_to_allocation_raw_provider<'tcx>(
     tcx: TyCtxt<'tcx>,
     key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
 ) -> ::rustc_middle::mir::interpret::EvalToAllocationRawResult<'tcx> {
+    // This shouldn't be used for statics, since statics are conceptually places,
+    // not values -- so what we do here could break pointer identity.
+    assert!(key.value.promoted.is_some() || !tcx.is_static(key.value.instance.def_id()));
     // Const eval always happens in Reveal::All mode in order to be able to use the hidden types of
     // opaque types. This is needed for trivial things like `size_of`, but also for using associated
     // types that are not specified in the opaque type.
@@ -273,7 +306,7 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
     let def = cid.instance.def.def_id();
     let is_static = tcx.is_static(def);
 
-    let ecx = InterpCx::new(
+    let mut ecx = InterpCx::new(
         tcx,
         tcx.def_span(def),
         key.param_env,
@@ -283,11 +316,11 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
         // so we have to reject reading mutable global memory.
         CompileTimeInterpreter::new(CanAccessMutGlobal::from(is_static), CheckAlignment::Error),
     );
-    eval_in_interpreter(ecx, cid, is_static)
+    eval_in_interpreter(&mut ecx, cid, is_static)
 }
 
 pub fn eval_in_interpreter<'mir, 'tcx>(
-    mut ecx: InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>,
+    ecx: &mut InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>,
     cid: GlobalId<'tcx>,
     is_static: bool,
 ) -> ::rustc_middle::mir::interpret::EvalToAllocationRawResult<'tcx> {
@@ -295,7 +328,7 @@ pub fn eval_in_interpreter<'mir, 'tcx>(
     debug_assert_eq!(is_static, ecx.tcx.static_mutability(cid.instance.def_id()).is_some());
 
     let res = ecx.load_mir(cid.instance.def, cid.promoted);
-    match res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, body)) {
+    match res.and_then(|body| eval_body_using_ecx(ecx, cid, body)) {
         Err(error) => {
             let (error, backtrace) = error.into_parts();
             backtrace.print_backtrace();
@@ -330,8 +363,11 @@ pub fn eval_in_interpreter<'mir, 'tcx>(
         }
         Ok(mplace) => {
             // Since evaluation had no errors, validate the resulting constant.
-            // This is a separate `try` block to provide more targeted error reporting.
+
+            // Temporarily allow access to the static_root_alloc_id for the purpose of validation.
+            let static_root_alloc_id = ecx.machine.static_root_alloc_id.take();
             let validation = const_validate_mplace(&ecx, &mplace, cid);
+            ecx.machine.static_root_alloc_id = static_root_alloc_id;
 
             let alloc_id = mplace.ptr().provenance.unwrap().alloc_id();
 
@@ -383,15 +419,9 @@ pub fn const_report_error<'mir, 'tcx>(
 
     let ub_note = matches!(error, InterpError::UndefinedBehavior(_)).then(|| {});
 
-    let alloc = ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner();
-    let mut bytes = String::new();
-    if alloc.size() != abi::Size::ZERO {
-        bytes = "\n".into();
-        // FIXME(translation) there might be pieces that are translatable.
-        write_allocation_bytes(*ecx.tcx, alloc, &mut bytes, "    ").unwrap();
-    }
-    let raw_bytes =
-        errors::RawBytesNote { size: alloc.size().bytes(), align: alloc.align.bytes(), bytes };
+    let bytes = ecx.print_alloc_bytes_for_diagnostics(alloc_id);
+    let (size, align, _) = ecx.get_alloc_info(alloc_id);
+    let raw_bytes = errors::RawBytesNote { size: size.bytes(), align: align.bytes(), bytes };
 
     crate::const_eval::report(
         *ecx.tcx,
diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs
index 5019bec388c..2c60ede7975 100644
--- a/compiler/rustc_const_eval/src/const_eval/machine.rs
+++ b/compiler/rustc_const_eval/src/const_eval/machine.rs
@@ -58,6 +58,9 @@ pub struct CompileTimeInterpreter<'mir, 'tcx> {
 
     /// Whether to check alignment during evaluation.
     pub(super) check_alignment: CheckAlignment,
+
+    /// Used to prevent reads from a static's base allocation, as that may allow for self-initialization.
+    pub(crate) static_root_alloc_id: Option<AllocId>,
 }
 
 #[derive(Copy, Clone)]
@@ -91,6 +94,7 @@ impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
             stack: Vec::new(),
             can_access_mut_global,
             check_alignment,
+            static_root_alloc_id: None,
         }
     }
 }
@@ -746,6 +750,17 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
         // Everything else is fine.
         Ok(())
     }
+
+    fn before_alloc_read(
+        ecx: &InterpCx<'mir, 'tcx, Self>,
+        alloc_id: AllocId,
+    ) -> InterpResult<'tcx> {
+        if Some(alloc_id) == ecx.machine.static_root_alloc_id {
+            Err(ConstEvalErrKind::RecursiveStatic.into())
+        } else {
+            Ok(())
+        }
+    }
 }
 
 // Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
diff --git a/compiler/rustc_const_eval/src/interpret/cast.rs b/compiler/rustc_const_eval/src/interpret/cast.rs
index 6d470ff162e..a88e130cd4b 100644
--- a/compiler/rustc_const_eval/src/interpret/cast.rs
+++ b/compiler/rustc_const_eval/src/interpret/cast.rs
@@ -153,7 +153,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
                     );
                 }
 
-                self.copy_op(src, dest, /*allow_transmute*/ true)?;
+                self.copy_op_allow_transmute(src, dest)?;
             }
         }
         Ok(())
@@ -441,7 +441,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
                     if src_field.layout.is_1zst() && cast_ty_field.is_1zst() {
                         // Skip 1-ZST fields.
                     } else if src_field.layout.ty == cast_ty_field.ty {
-                        self.copy_op(&src_field, &dst_field, /*allow_transmute*/ false)?;
+                        self.copy_op(&src_field, &dst_field)?;
                     } else {
                         if found_cast_field {
                             span_bug!(self.cur_span(), "unsize_into: more than one field to cast");
diff --git a/compiler/rustc_const_eval/src/interpret/eval_context.rs b/compiler/rustc_const_eval/src/interpret/eval_context.rs
index 8af69220e03..517994d4741 100644
--- a/compiler/rustc_const_eval/src/interpret/eval_context.rs
+++ b/compiler/rustc_const_eval/src/interpret/eval_context.rs
@@ -899,7 +899,19 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
                 .local_to_op(self.frame(), mir::RETURN_PLACE, None)
                 .expect("return place should always be live");
             let dest = self.frame().return_place.clone();
-            let err = self.copy_op(&op, &dest, /*allow_transmute*/ true);
+            let err = if self.stack().len() == 1 {
+                // The initializer of constants and statics will get validated separately
+                // after the constant has been fully evaluated. While we could fall back to the default
+                // code path, that will cause -Zenforce-validity to cycle on static initializers.
+                // Reading from a static's memory is not allowed during its evaluation, and will always
+                // trigger a cycle error. Validation must read from the memory of the current item.
+                // For Miri this means we do not validate the root frame return value,
+                // but Miri anyway calls `read_target_isize` on that so separate validation
+                // is not needed.
+                self.copy_op_no_dest_validation(&op, &dest)
+            } else {
+                self.copy_op_allow_transmute(&op, &dest)
+            };
             trace!("return value: {:?}", self.dump_place(&dest));
             // We delay actually short-circuiting on this error until *after* the stack frame is
             // popped, since we want this error to be attributed to the caller, whose type defines
diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs
index 7feac6156bc..959ec2ca865 100644
--- a/compiler/rustc_const_eval/src/interpret/intern.rs
+++ b/compiler/rustc_const_eval/src/interpret/intern.rs
@@ -85,6 +85,8 @@ pub enum InternKind {
 ///
 /// This *cannot raise an interpreter error*. Doing so is left to validation, which
 /// tracks where in the value we are and thus can show much better error messages.
+///
+/// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
 #[instrument(level = "debug", skip(ecx))]
 pub fn intern_const_alloc_recursive<
     'mir,
@@ -97,12 +99,12 @@ pub fn intern_const_alloc_recursive<
 ) -> Result<(), ErrorGuaranteed> {
     // We are interning recursively, and for mutability we are distinguishing the "root" allocation
     // that we are starting in, and all other allocations that we are encountering recursively.
-    let (base_mutability, inner_mutability) = match intern_kind {
+    let (base_mutability, inner_mutability, is_static) = match intern_kind {
         InternKind::Constant | InternKind::Promoted => {
             // Completely immutable. Interning anything mutably here can only lead to unsoundness,
             // since all consts are conceptually independent values but share the same underlying
             // memory.
-            (Mutability::Not, Mutability::Not)
+            (Mutability::Not, Mutability::Not, false)
         }
         InternKind::Static(Mutability::Not) => {
             (
@@ -115,22 +117,31 @@ pub fn intern_const_alloc_recursive<
                 // Inner allocations are never mutable. They can only arise via the "tail
                 // expression" / "outer scope" rule, and we treat them consistently with `const`.
                 Mutability::Not,
+                true,
             )
         }
         InternKind::Static(Mutability::Mut) => {
             // Just make everything mutable. We accept code like
             // `static mut X = &mut [42]`, so even inner allocations need to be mutable.
-            (Mutability::Mut, Mutability::Mut)
+            (Mutability::Mut, Mutability::Mut, true)
         }
     };
 
     // Intern the base allocation, and initialize todo list for recursive interning.
     let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
+    trace!(?base_alloc_id, ?base_mutability);
     // First we intern the base allocation, as it requires a different mutability.
     // This gives us the initial set of nested allocations, which will then all be processed
     // recursively in the loop below.
-    let mut todo: Vec<_> =
-        intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().map(|prov| prov).collect();
+    let mut todo: Vec<_> = if is_static {
+        // Do not steal the root allocation, we need it later for `take_static_root_alloc`
+        // But still change its mutability to match the requested one.
+        let alloc = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap();
+        alloc.1.mutability = base_mutability;
+        alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect()
+    } else {
+        intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().map(|prov| prov).collect()
+    };
     // We need to distinguish "has just been interned" from "was already in `tcx`",
     // so we track this in a separate set.
     let mut just_interned: FxHashSet<_> = std::iter::once(base_alloc_id).collect();
@@ -148,7 +159,17 @@ pub fn intern_const_alloc_recursive<
     // before validation, and interning doesn't know the type of anything, this means we can't show
     // better errors. Maybe we should consider doing validation before interning in the future.
     while let Some(prov) = todo.pop() {
+        trace!(?prov);
         let alloc_id = prov.alloc_id();
+
+        if base_alloc_id == alloc_id && is_static {
+            // This is a pointer to the static itself. It's ok for a static to refer to itself,
+            // even mutably. Whether that mutable pointer is legal at all is checked in validation.
+            // See tests/ui/statics/recursive_interior_mut.rs for how such a situation can occur.
+            // We also already collected all the nested allocations, so there's no need to do that again.
+            continue;
+        }
+
         // Crucially, we check this *before* checking whether the `alloc_id`
         // has already been interned. The point of this check is to ensure that when
         // there are multiple pointers to the same allocation, they are *all* immutable.
@@ -176,6 +197,7 @@ pub fn intern_const_alloc_recursive<
             // `&None::<Cell<i32>>` lead to promotion that can produce mutable pointers. We rely
             // on the promotion analysis not screwing up to ensure that it is sound to intern
             // promoteds as immutable.
+            trace!("found bad mutable pointer");
             found_bad_mutable_pointer = true;
         }
         if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics.rs b/compiler/rustc_const_eval/src/interpret/intrinsics.rs
index 7991f90b815..f020616f6d8 100644
--- a/compiler/rustc_const_eval/src/interpret/intrinsics.rs
+++ b/compiler/rustc_const_eval/src/interpret/intrinsics.rs
@@ -120,7 +120,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
                 let val = self.tcx.span_as_caller_location(span);
                 let val =
                     self.const_val_to_op(val, self.tcx.caller_location_ty(), Some(dest.layout))?;
-                self.copy_op(&val, dest, /* allow_transmute */ false)?;
+                self.copy_op(&val, dest)?;
             }
 
             sym::min_align_of_val | sym::size_of_val => {
@@ -157,7 +157,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
                     tcx.const_eval_global_id(self.param_env, gid, Some(tcx.span))
                 })?;
                 let val = self.const_val_to_op(val, ty, Some(dest.layout))?;
-                self.copy_op(&val, dest, /*allow_transmute*/ false)?;
+                self.copy_op(&val, dest)?;
             }
 
             sym::ctpop
@@ -391,7 +391,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
                     } else {
                         self.project_index(&input, i)?.into()
                     };
-                    self.copy_op(&value, &place, /*allow_transmute*/ false)?;
+                    self.copy_op(&value, &place)?;
                 }
             }
             sym::simd_extract => {
@@ -401,15 +401,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
                     index < input_len,
                     "index `{index}` must be in bounds of vector with length {input_len}"
                 );
-                self.copy_op(
-                    &self.project_index(&input, index)?,
-                    dest,
-                    /*allow_transmute*/ false,
-                )?;
+                self.copy_op(&self.project_index(&input, index)?, dest)?;
             }
             sym::likely | sym::unlikely | sym::black_box => {
                 // These just return their argument
-                self.copy_op(&args[0], dest, /*allow_transmute*/ false)?;
+                self.copy_op(&args[0], dest)?;
             }
             sym::raw_eq => {
                 let result = self.raw_eq_intrinsic(&args[0], &args[1])?;
diff --git a/compiler/rustc_const_eval/src/interpret/machine.rs b/compiler/rustc_const_eval/src/interpret/machine.rs
index b981a1ee2ca..0106ec425bc 100644
--- a/compiler/rustc_const_eval/src/interpret/machine.rs
+++ b/compiler/rustc_const_eval/src/interpret/machine.rs
@@ -388,6 +388,8 @@ pub trait Machine<'mir, 'tcx: 'mir>: Sized {
     /// Takes read-only access to the allocation so we can keep all the memory read
     /// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
     /// need to mutate.
+    ///
+    /// This is not invoked for ZST accesses, as no read actually happens.
     #[inline(always)]
     fn before_memory_read(
         _tcx: TyCtxtAt<'tcx>,
@@ -399,7 +401,20 @@ pub trait Machine<'mir, 'tcx: 'mir>: Sized {
         Ok(())
     }
 
+    /// Hook for performing extra checks on any memory read access,
+    /// that involves an allocation, even ZST reads.
+    ///
+    /// Used to prevent statics from self-initializing by reading from their own memory
+    /// as it is being initialized.
+    fn before_alloc_read(
+        _ecx: &InterpCx<'mir, 'tcx, Self>,
+        _alloc_id: AllocId,
+    ) -> InterpResult<'tcx> {
+        Ok(())
+    }
+
     /// Hook for performing extra checks on a memory write access.
+    /// This is not invoked for ZST accesses, as no write actually happens.
     #[inline(always)]
     fn before_memory_write(
         _tcx: TyCtxtAt<'tcx>,
diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs
index 4acf4ed893c..2e642ac15b4 100644
--- a/compiler/rustc_const_eval/src/interpret/memory.rs
+++ b/compiler/rustc_const_eval/src/interpret/memory.rs
@@ -624,19 +624,20 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
             size,
             CheckInAllocMsg::MemoryAccessTest,
             |alloc_id, offset, prov| {
+                // We want to call the hook on *all* accesses that involve an AllocId,
+                // including zero-sized accesses. That means we have to do it here
+                // rather than below in the `Some` branch.
+                M::before_alloc_read(self, alloc_id)?;
                 let alloc = self.get_alloc_raw(alloc_id)?;
                 Ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc)))
             },
         )?;
+
         if let Some((alloc_id, offset, prov, alloc)) = ptr_and_alloc {
             let range = alloc_range(offset, size);
             M::before_memory_read(self.tcx, &self.machine, &alloc.extra, (alloc_id, prov), range)?;
             Ok(Some(AllocRef { alloc, range, tcx: *self.tcx, alloc_id }))
         } else {
-            // Even in this branch we have to be sure that we actually access the allocation, in
-            // order to ensure that `static FOO: Type = FOO;` causes a cycle error instead of
-            // magically pulling *any* ZST value from the ether. However, the `get_raw` above is
-            // always called when `ptr` has an `AllocId`.
             Ok(None)
         }
     }
@@ -855,6 +856,21 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
         DumpAllocs { ecx: self, allocs }
     }
 
+    /// Print the allocation's bytes, without any nested allocations.
+    pub fn print_alloc_bytes_for_diagnostics(&self, id: AllocId) -> String {
+        // Using the "raw" access to avoid the `before_alloc_read` hook, we specifically
+        // want to be able to read all memory for diagnostics, even if that is cyclic.
+        let alloc = self.get_alloc_raw(id).unwrap();
+        let mut bytes = String::new();
+        if alloc.size() != Size::ZERO {
+            bytes = "\n".into();
+            // FIXME(translation) there might be pieces that are translatable.
+            rustc_middle::mir::pretty::write_allocation_bytes(*self.tcx, alloc, &mut bytes, "    ")
+                .unwrap();
+        }
+        bytes
+    }
+
     /// Find leaked allocations. Allocations reachable from `static_roots` or a `Global` allocation
     /// are not considered leaked, as well as leaks whose kind's `may_leak()` returns true.
     pub fn find_leaked_allocations(
diff --git a/compiler/rustc_const_eval/src/interpret/mod.rs b/compiler/rustc_const_eval/src/interpret/mod.rs
index c1b6ce4eb4e..a15e52d07e6 100644
--- a/compiler/rustc_const_eval/src/interpret/mod.rs
+++ b/compiler/rustc_const_eval/src/interpret/mod.rs
@@ -39,4 +39,5 @@ use self::{
 };
 
 pub(crate) use self::intrinsics::eval_nullary_intrinsic;
+pub(crate) use self::util::{create_static_alloc, take_static_root_alloc};
 use eval_context::{from_known_layout, mir_assign_valid_types};
diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs
index 03d1dc9fd3d..6e987784ff9 100644
--- a/compiler/rustc_const_eval/src/interpret/place.rs
+++ b/compiler/rustc_const_eval/src/interpret/place.rs
@@ -759,14 +759,57 @@ where
     }
 
     /// Copies the data from an operand to a place.
+    /// The layouts of the `src` and `dest` may disagree.
+    /// Does not perform validation of the destination.
+    /// The only known use case for this function is checking the return
+    /// value of a static during stack frame popping.
+    #[inline(always)]
+    pub(super) fn copy_op_no_dest_validation(
+        &mut self,
+        src: &impl Readable<'tcx, M::Provenance>,
+        dest: &impl Writeable<'tcx, M::Provenance>,
+    ) -> InterpResult<'tcx> {
+        self.copy_op_inner(
+            src, dest, /* allow_transmute */ true, /* validate_dest */ false,
+        )
+    }
+
+    /// Copies the data from an operand to a place.
+    /// The layouts of the `src` and `dest` may disagree.
+    #[inline(always)]
+    pub fn copy_op_allow_transmute(
+        &mut self,
+        src: &impl Readable<'tcx, M::Provenance>,
+        dest: &impl Writeable<'tcx, M::Provenance>,
+    ) -> InterpResult<'tcx> {
+        self.copy_op_inner(
+            src, dest, /* allow_transmute */ true, /* validate_dest */ true,
+        )
+    }
+
+    /// Copies the data from an operand to a place.
+    /// `src` and `dest` must have the same layout and the copied value will be validated.
+    #[inline(always)]
+    pub fn copy_op(
+        &mut self,
+        src: &impl Readable<'tcx, M::Provenance>,
+        dest: &impl Writeable<'tcx, M::Provenance>,
+    ) -> InterpResult<'tcx> {
+        self.copy_op_inner(
+            src, dest, /* allow_transmute */ false, /* validate_dest */ true,
+        )
+    }
+
+    /// Copies the data from an operand to a place.
     /// `allow_transmute` indicates whether the layouts may disagree.
     #[inline(always)]
     #[instrument(skip(self), level = "debug")]
-    pub fn copy_op(
+    fn copy_op_inner(
         &mut self,
         src: &impl Readable<'tcx, M::Provenance>,
         dest: &impl Writeable<'tcx, M::Provenance>,
         allow_transmute: bool,
+        validate_dest: bool,
     ) -> InterpResult<'tcx> {
         // Generally for transmutation, data must be valid both at the old and new type.
         // But if the types are the same, the 2nd validation below suffices.
@@ -777,7 +820,7 @@ where
         // Do the actual copy.
         self.copy_op_no_validate(src, dest, allow_transmute)?;
 
-        if M::enforce_validity(self, dest.layout()) {
+        if validate_dest && M::enforce_validity(self, dest.layout()) {
             // Data got changed, better make sure it matches the type!
             self.validate_operand(&dest.to_op(self)?)?;
         }
diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs
index 23f3d7eb67d..d4c96f4573d 100644
--- a/compiler/rustc_const_eval/src/interpret/step.rs
+++ b/compiler/rustc_const_eval/src/interpret/step.rs
@@ -151,12 +151,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
             Use(ref operand) => {
                 // Avoid recomputing the layout
                 let op = self.eval_operand(operand, Some(dest.layout))?;
-                self.copy_op(&op, &dest, /*allow_transmute*/ false)?;
+                self.copy_op(&op, &dest)?;
             }
 
             CopyForDeref(place) => {
                 let op = self.eval_place_to_op(place, Some(dest.layout))?;
-                self.copy_op(&op, &dest, /* allow_transmute*/ false)?;
+                self.copy_op(&op, &dest)?;
             }
 
             BinaryOp(bin_op, box (ref left, ref right)) => {
@@ -316,7 +316,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
             let field_index = active_field_index.unwrap_or(field_index);
             let field_dest = self.project_field(&variant_dest, field_index.as_usize())?;
             let op = self.eval_operand(operand, Some(field_dest.layout))?;
-            self.copy_op(&op, &field_dest, /*allow_transmute*/ false)?;
+            self.copy_op(&op, &field_dest)?;
         }
         self.write_discriminant(variant_index, dest)
     }
@@ -339,7 +339,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
         } else {
             // Write the src to the first element.
             let first = self.project_index(&dest, 0)?;
-            self.copy_op(&src, &first, /*allow_transmute*/ false)?;
+            self.copy_op(&src, &first)?;
 
             // This is performance-sensitive code for big static/const arrays! So we
             // avoid writing each operand individually and instead just make many copies
diff --git a/compiler/rustc_const_eval/src/interpret/terminator.rs b/compiler/rustc_const_eval/src/interpret/terminator.rs
index 4037220e5ed..b2207c3d310 100644
--- a/compiler/rustc_const_eval/src/interpret/terminator.rs
+++ b/compiler/rustc_const_eval/src/interpret/terminator.rs
@@ -481,7 +481,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
         // FIXME: Depending on the PassMode, this should reset some padding to uninitialized. (This
         // is true for all `copy_op`, but there are a lot of special cases for argument passing
         // specifically.)
-        self.copy_op(&caller_arg_copy, &callee_arg, /*allow_transmute*/ true)?;
+        self.copy_op_allow_transmute(&caller_arg_copy, &callee_arg)?;
         // If this was an in-place pass, protect the place it comes from for the duration of the call.
         if let FnArg::InPlace(place) = caller_arg {
             M::protect_in_place_function_argument(self, place)?;
diff --git a/compiler/rustc_const_eval/src/interpret/util.rs b/compiler/rustc_const_eval/src/interpret/util.rs
index 3a9ee904734..2a13671a829 100644
--- a/compiler/rustc_const_eval/src/interpret/util.rs
+++ b/compiler/rustc_const_eval/src/interpret/util.rs
@@ -1,9 +1,15 @@
-use rustc_middle::mir::interpret::InterpResult;
+use crate::const_eval::CompileTimeEvalContext;
+use crate::interpret::{MemPlaceMeta, MemoryKind};
+use rustc_middle::mir::interpret::{AllocId, Allocation, InterpResult, Pointer};
+use rustc_middle::ty::layout::TyAndLayout;
 use rustc_middle::ty::{
     self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor,
 };
+use rustc_span::def_id::DefId;
 use std::ops::ControlFlow;
 
+use super::MPlaceTy;
+
 /// Checks whether a type contains generic parameters which must be instantiated.
 ///
 /// In case it does, returns a `TooGeneric` const eval error. Note that due to polymorphization
@@ -73,3 +79,23 @@ where
         Ok(())
     }
 }
+
+pub(crate) fn take_static_root_alloc<'mir, 'tcx: 'mir>(
+    ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
+    alloc_id: AllocId,
+) -> Allocation {
+    ecx.memory.alloc_map.swap_remove(&alloc_id).unwrap().1
+}
+
+pub(crate) fn create_static_alloc<'mir, 'tcx: 'mir>(
+    ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
+    static_def_id: DefId,
+    layout: TyAndLayout<'tcx>,
+) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
+    let alloc = Allocation::try_uninit(layout.size, layout.align.abi)?;
+    let alloc_id = ecx.tcx.reserve_and_set_static_alloc(static_def_id);
+    assert_eq!(ecx.machine.static_root_alloc_id, None);
+    ecx.machine.static_root_alloc_id = Some(alloc_id);
+    assert!(ecx.memory.alloc_map.insert(alloc_id, (MemoryKind::Stack, alloc)).is_none());
+    Ok(ecx.ptr_with_meta_to_mplace(Pointer::from(alloc_id).into(), MemPlaceMeta::None, layout))
+}
diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs
index eb9f3fee165..08a2e38bfa1 100644
--- a/compiler/rustc_const_eval/src/interpret/validity.rs
+++ b/compiler/rustc_const_eval/src/interpret/validity.rs
@@ -27,9 +27,9 @@ use rustc_target::abi::{
 use std::hash::Hash;
 
 use super::{
-    format_interp_error, AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx,
-    InterpResult, MPlaceTy, Machine, MemPlaceMeta, OpTy, Pointer, Projectable, Scalar,
-    ValueVisitor,
+    format_interp_error, machine::AllocMap, AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy,
+    Immediate, InterpCx, InterpResult, MPlaceTy, Machine, MemPlaceMeta, OpTy, Pointer, Projectable,
+    Scalar, ValueVisitor,
 };
 
 // for the validation errors
@@ -712,11 +712,14 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
     fn in_mutable_memory(&self, op: &OpTy<'tcx, M::Provenance>) -> bool {
         if let Some(mplace) = op.as_mplace_or_imm().left() {
             if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {
-                if self.ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().mutability
-                    == Mutability::Mut
-                {
-                    return true;
-                }
+                let mutability = match self.ecx.tcx.global_alloc(alloc_id) {
+                    GlobalAlloc::Static(_) => {
+                        self.ecx.memory.alloc_map.get(alloc_id).unwrap().1.mutability
+                    }
+                    GlobalAlloc::Memory(alloc) => alloc.inner().mutability,
+                    _ => span_bug!(self.ecx.tcx.span, "not a memory allocation"),
+                };
+                return mutability == Mutability::Mut;
             }
         }
         false
diff --git a/compiler/rustc_const_eval/src/lib.rs b/compiler/rustc_const_eval/src/lib.rs
index 839cfd8d85a..e33f374c359 100644
--- a/compiler/rustc_const_eval/src/lib.rs
+++ b/compiler/rustc_const_eval/src/lib.rs
@@ -40,6 +40,7 @@ pub fn provide(providers: &mut Providers) {
     const_eval::provide(providers);
     providers.eval_to_const_value_raw = const_eval::eval_to_const_value_raw_provider;
     providers.eval_to_allocation_raw = const_eval::eval_to_allocation_raw_provider;
+    providers.eval_static_initializer = const_eval::eval_static_initializer_provider;
     providers.hooks.const_caller_location = util::caller_location::const_caller_location_provider;
     providers.eval_to_valtree = |tcx, param_env_and_value| {
         let (param_env, raw) = param_env_and_value.into_parts();