about summary refs log tree commit diff
path: root/compiler/rustc_mir/src/const_eval
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_mir/src/const_eval')
-rw-r--r--compiler/rustc_mir/src/const_eval/error.rs206
-rw-r--r--compiler/rustc_mir/src/const_eval/eval_queries.rs398
-rw-r--r--compiler/rustc_mir/src/const_eval/fn_queries.rs167
-rw-r--r--compiler/rustc_mir/src/const_eval/machine.rs372
-rw-r--r--compiler/rustc_mir/src/const_eval/mod.rs69
5 files changed, 1212 insertions, 0 deletions
diff --git a/compiler/rustc_mir/src/const_eval/error.rs b/compiler/rustc_mir/src/const_eval/error.rs
new file mode 100644
index 00000000000..044d27a6a9d
--- /dev/null
+++ b/compiler/rustc_mir/src/const_eval/error.rs
@@ -0,0 +1,206 @@
+use std::error::Error;
+use std::fmt;
+
+use rustc_errors::{DiagnosticBuilder, ErrorReported};
+use rustc_hir as hir;
+use rustc_middle::mir::AssertKind;
+use rustc_middle::ty::{layout::LayoutError, query::TyCtxtAt, ConstInt};
+use rustc_span::{Span, Symbol};
+
+use super::InterpCx;
+use crate::interpret::{
+    struct_error, ErrorHandled, FrameInfo, InterpError, InterpErrorInfo, Machine,
+};
+
+/// The CTFE machine has some custom error kinds.
+#[derive(Clone, Debug)]
+pub enum ConstEvalErrKind {
+    NeedsRfc(String),
+    ConstAccessesStatic,
+    ModifiedGlobal,
+    AssertFailure(AssertKind<ConstInt>),
+    Panic { msg: Symbol, line: u32, col: u32, file: Symbol },
+}
+
+// The errors become `MachineStop` with plain strings when being raised.
+// `ConstEvalErr` (in `librustc_middle/mir/interpret/error.rs`) knows to
+// handle these.
+impl<'tcx> Into<InterpErrorInfo<'tcx>> for ConstEvalErrKind {
+    fn into(self) -> InterpErrorInfo<'tcx> {
+        err_machine_stop!(self.to_string()).into()
+    }
+}
+
+impl fmt::Display for ConstEvalErrKind {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        use self::ConstEvalErrKind::*;
+        match *self {
+            NeedsRfc(ref msg) => {
+                write!(f, "\"{}\" needs an rfc before being allowed inside constants", msg)
+            }
+            ConstAccessesStatic => write!(f, "constant accesses static"),
+            ModifiedGlobal => {
+                write!(f, "modifying a static's initial value from another static's initializer")
+            }
+            AssertFailure(ref msg) => write!(f, "{:?}", msg),
+            Panic { msg, line, col, file } => {
+                write!(f, "the evaluated program panicked at '{}', {}:{}:{}", msg, file, line, col)
+            }
+        }
+    }
+}
+
+impl Error for ConstEvalErrKind {}
+
+/// When const-evaluation errors, this type is constructed with the resulting information,
+/// and then used to emit the error as a lint or hard error.
+#[derive(Debug)]
+pub struct ConstEvalErr<'tcx> {
+    pub span: Span,
+    pub error: InterpError<'tcx>,
+    pub stacktrace: Vec<FrameInfo<'tcx>>,
+}
+
+impl<'tcx> ConstEvalErr<'tcx> {
+    /// Turn an interpreter error into something to report to the user.
+    /// As a side-effect, if RUSTC_CTFE_BACKTRACE is set, this prints the backtrace.
+    /// Should be called only if the error is actually going to to be reported!
+    pub fn new<'mir, M: Machine<'mir, 'tcx>>(
+        ecx: &InterpCx<'mir, 'tcx, M>,
+        error: InterpErrorInfo<'tcx>,
+        span: Option<Span>,
+    ) -> ConstEvalErr<'tcx>
+    where
+        'tcx: 'mir,
+    {
+        error.print_backtrace();
+        let stacktrace = ecx.generate_stacktrace();
+        ConstEvalErr { error: error.kind, stacktrace, span: span.unwrap_or_else(|| ecx.cur_span()) }
+    }
+
+    pub fn struct_error(
+        &self,
+        tcx: TyCtxtAt<'tcx>,
+        message: &str,
+        emit: impl FnOnce(DiagnosticBuilder<'_>),
+    ) -> ErrorHandled {
+        self.struct_generic(tcx, message, emit, None)
+    }
+
+    pub fn report_as_error(&self, tcx: TyCtxtAt<'tcx>, message: &str) -> ErrorHandled {
+        self.struct_error(tcx, message, |mut e| e.emit())
+    }
+
+    pub fn report_as_lint(
+        &self,
+        tcx: TyCtxtAt<'tcx>,
+        message: &str,
+        lint_root: hir::HirId,
+        span: Option<Span>,
+    ) -> ErrorHandled {
+        self.struct_generic(
+            tcx,
+            message,
+            |mut lint: DiagnosticBuilder<'_>| {
+                // Apply the span.
+                if let Some(span) = span {
+                    let primary_spans = lint.span.primary_spans().to_vec();
+                    // point at the actual error as the primary span
+                    lint.replace_span_with(span);
+                    // point to the `const` statement as a secondary span
+                    // they don't have any label
+                    for sp in primary_spans {
+                        if sp != span {
+                            lint.span_label(sp, "");
+                        }
+                    }
+                }
+                lint.emit();
+            },
+            Some(lint_root),
+        )
+    }
+
+    /// Create a diagnostic for this const eval error.
+    ///
+    /// Sets the message passed in via `message` and adds span labels with detailed error
+    /// information before handing control back to `emit` to do any final processing.
+    /// It's the caller's responsibility to call emit(), stash(), etc. within the `emit`
+    /// function to dispose of the diagnostic properly.
+    ///
+    /// If `lint_root.is_some()` report it as a lint, else report it as a hard error.
+    /// (Except that for some errors, we ignore all that -- see `must_error` below.)
+    fn struct_generic(
+        &self,
+        tcx: TyCtxtAt<'tcx>,
+        message: &str,
+        emit: impl FnOnce(DiagnosticBuilder<'_>),
+        lint_root: Option<hir::HirId>,
+    ) -> ErrorHandled {
+        let must_error = match self.error {
+            err_inval!(Layout(LayoutError::Unknown(_))) | err_inval!(TooGeneric) => {
+                return ErrorHandled::TooGeneric;
+            }
+            err_inval!(TypeckError(error_reported)) => {
+                return ErrorHandled::Reported(error_reported);
+            }
+            // We must *always* hard error on these, even if the caller wants just a lint.
+            err_inval!(Layout(LayoutError::SizeOverflow(_))) => true,
+            _ => false,
+        };
+        trace!("reporting const eval failure at {:?}", self.span);
+
+        let err_msg = match &self.error {
+            InterpError::MachineStop(msg) => {
+                // A custom error (`ConstEvalErrKind` in `librustc_mir/interp/const_eval/error.rs`).
+                // Should be turned into a string by now.
+                msg.downcast_ref::<String>().expect("invalid MachineStop payload").clone()
+            }
+            err => err.to_string(),
+        };
+
+        let finish = |mut err: DiagnosticBuilder<'_>, span_msg: Option<String>| {
+            if let Some(span_msg) = span_msg {
+                err.span_label(self.span, span_msg);
+            }
+            // Add spans for the stacktrace. Don't print a single-line backtrace though.
+            if self.stacktrace.len() > 1 {
+                for frame_info in &self.stacktrace {
+                    err.span_label(frame_info.span, frame_info.to_string());
+                }
+            }
+            // Let the caller finish the job.
+            emit(err)
+        };
+
+        if must_error {
+            // The `message` makes little sense here, this is a more serious error than the
+            // caller thinks anyway.
+            // See <https://github.com/rust-lang/rust/pull/63152>.
+            finish(struct_error(tcx, &err_msg), None);
+            ErrorHandled::Reported(ErrorReported)
+        } else {
+            // Regular case.
+            if let Some(lint_root) = lint_root {
+                // Report as lint.
+                let hir_id = self
+                    .stacktrace
+                    .iter()
+                    .rev()
+                    .find_map(|frame| frame.lint_root)
+                    .unwrap_or(lint_root);
+                tcx.struct_span_lint_hir(
+                    rustc_session::lint::builtin::CONST_ERR,
+                    hir_id,
+                    tcx.span,
+                    |lint| finish(lint.build(message), Some(err_msg)),
+                );
+                ErrorHandled::Linted
+            } else {
+                // Report as hard error.
+                finish(struct_error(tcx, message), Some(err_msg));
+                ErrorHandled::Reported(ErrorReported)
+            }
+        }
+    }
+}
diff --git a/compiler/rustc_mir/src/const_eval/eval_queries.rs b/compiler/rustc_mir/src/const_eval/eval_queries.rs
new file mode 100644
index 00000000000..291b42c12d7
--- /dev/null
+++ b/compiler/rustc_mir/src/const_eval/eval_queries.rs
@@ -0,0 +1,398 @@
+use super::{CompileTimeEvalContext, CompileTimeInterpreter, ConstEvalErr, MemoryExtra};
+use crate::interpret::eval_nullary_intrinsic;
+use crate::interpret::{
+    intern_const_alloc_recursive, Allocation, ConstValue, GlobalId, Immediate, InternKind,
+    InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RawConst, RefTracking, Scalar,
+    ScalarMaybeUninit, StackPopCleanup,
+};
+
+use rustc_hir::def::DefKind;
+use rustc_middle::mir;
+use rustc_middle::mir::interpret::ErrorHandled;
+use rustc_middle::traits::Reveal;
+use rustc_middle::ty::{self, subst::Subst, TyCtxt};
+use rustc_span::source_map::Span;
+use rustc_target::abi::{Abi, LayoutOf};
+use std::convert::TryInto;
+
+pub fn note_on_undefined_behavior_error() -> &'static str {
+    "The rules on what exactly is undefined behavior aren't clear, \
+     so this check might be overzealous. Please open an issue on the rustc \
+     repository if you believe it should not be considered undefined behavior."
+}
+
+// Returns a pointer to where the result lives
+fn eval_body_using_ecx<'mir, 'tcx>(
+    ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
+    cid: GlobalId<'tcx>,
+    body: &'mir mir::Body<'tcx>,
+) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
+    debug!("eval_body_using_ecx: {:?}, {:?}", cid, ecx.param_env);
+    let tcx = *ecx.tcx;
+    let layout = ecx.layout_of(body.return_ty().subst(tcx, cid.instance.substs))?;
+    assert!(!layout.is_unsized());
+    let ret = ecx.allocate(layout, MemoryKind::Stack);
+
+    let name = ty::tls::with(|tcx| tcx.def_path_str(cid.instance.def_id()));
+    let prom = cid.promoted.map_or(String::new(), |p| format!("::promoted[{:?}]", p));
+    trace!("eval_body_using_ecx: pushing stack frame for global: {}{}", name, prom);
+
+    // Assert all args (if any) are zero-sized types; `eval_body_using_ecx` doesn't
+    // make sense if the body is expecting nontrivial arguments.
+    // (The alternative would be to use `eval_fn_call` with an args slice.)
+    for arg in body.args_iter() {
+        let decl = body.local_decls.get(arg).expect("arg missing from local_decls");
+        let layout = ecx.layout_of(decl.ty.subst(tcx, cid.instance.substs))?;
+        assert!(layout.is_zst())
+    }
+
+    ecx.push_stack_frame(
+        cid.instance,
+        body,
+        Some(ret.into()),
+        StackPopCleanup::None { cleanup: false },
+    )?;
+
+    // The main interpreter loop.
+    ecx.run()?;
+
+    // Intern the result
+    // FIXME: since the DefId of a promoted is the DefId of its owner, this
+    // means that promoteds in statics are actually interned like statics!
+    // However, this is also currently crucial because we promote mutable
+    // non-empty slices in statics to extend their lifetime, and this
+    // ensures that they are put into a mutable allocation.
+    // For other kinds of promoteds in statics (like array initializers), this is rather silly.
+    let intern_kind = match tcx.static_mutability(cid.instance.def_id()) {
+        Some(m) => InternKind::Static(m),
+        None if cid.promoted.is_some() => InternKind::Promoted,
+        _ => InternKind::Constant,
+    };
+    intern_const_alloc_recursive(
+        ecx,
+        intern_kind,
+        ret,
+        body.ignore_interior_mut_in_const_validation,
+    );
+
+    debug!("eval_body_using_ecx done: {:?}", *ret);
+    Ok(ret)
+}
+
+/// The `InterpCx` is only meant to be used to do field and index projections into constants for
+/// `simd_shuffle` and const patterns in match arms.
+///
+/// The function containing the `match` that is currently being analyzed may have generic bounds
+/// that inform us about the generic bounds of the constant. E.g., using an associated constant
+/// of a function's generic parameter will require knowledge about the bounds on the generic
+/// parameter. These bounds are passed to `mk_eval_cx` via the `ParamEnv` argument.
+pub(super) fn mk_eval_cx<'mir, 'tcx>(
+    tcx: TyCtxt<'tcx>,
+    root_span: Span,
+    param_env: ty::ParamEnv<'tcx>,
+    can_access_statics: bool,
+) -> CompileTimeEvalContext<'mir, 'tcx> {
+    debug!("mk_eval_cx: {:?}", param_env);
+    InterpCx::new(
+        tcx,
+        root_span,
+        param_env,
+        CompileTimeInterpreter::new(tcx.sess.const_eval_limit()),
+        MemoryExtra { can_access_statics },
+    )
+}
+
+pub(super) fn op_to_const<'tcx>(
+    ecx: &CompileTimeEvalContext<'_, 'tcx>,
+    op: OpTy<'tcx>,
+) -> ConstValue<'tcx> {
+    // We do not have value optimizations for everything.
+    // Only scalars and slices, since they are very common.
+    // Note that further down we turn scalars of uninitialized bits back to `ByRef`. These can result
+    // from scalar unions that are initialized with one of their zero sized variants. We could
+    // instead allow `ConstValue::Scalar` to store `ScalarMaybeUninit`, but that would affect all
+    // the usual cases of extracting e.g. a `usize`, without there being a real use case for the
+    // `Undef` situation.
+    let try_as_immediate = match op.layout.abi {
+        Abi::Scalar(..) => true,
+        Abi::ScalarPair(..) => match op.layout.ty.kind {
+            ty::Ref(_, inner, _) => match inner.kind {
+                ty::Slice(elem) => elem == ecx.tcx.types.u8,
+                ty::Str => true,
+                _ => false,
+            },
+            _ => false,
+        },
+        _ => false,
+    };
+    let immediate = if try_as_immediate {
+        Err(ecx.read_immediate(op).expect("normalization works on validated constants"))
+    } else {
+        // It is guaranteed that any non-slice scalar pair is actually ByRef here.
+        // When we come back from raw const eval, we are always by-ref. The only way our op here is
+        // by-val is if we are in destructure_const, i.e., if this is (a field of) something that we
+        // "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
+        // structs containing such.
+        op.try_as_mplace(ecx)
+    };
+
+    let to_const_value = |mplace: MPlaceTy<'_>| match mplace.ptr {
+        Scalar::Ptr(ptr) => {
+            let alloc = ecx.tcx.global_alloc(ptr.alloc_id).unwrap_memory();
+            ConstValue::ByRef { alloc, offset: ptr.offset }
+        }
+        Scalar::Raw { data, .. } => {
+            assert!(mplace.layout.is_zst());
+            assert_eq!(
+                data,
+                mplace.layout.align.abi.bytes().into(),
+                "this MPlaceTy must come from `try_as_mplace` being used on a zst, so we know what
+                 value this integer address must have",
+            );
+            ConstValue::Scalar(Scalar::zst())
+        }
+    };
+    match immediate {
+        Ok(mplace) => to_const_value(mplace),
+        // see comment on `let try_as_immediate` above
+        Err(imm) => match *imm {
+            Immediate::Scalar(x) => match x {
+                ScalarMaybeUninit::Scalar(s) => ConstValue::Scalar(s),
+                ScalarMaybeUninit::Uninit => to_const_value(op.assert_mem_place(ecx)),
+            },
+            Immediate::ScalarPair(a, b) => {
+                let (data, start) = match a.check_init().unwrap() {
+                    Scalar::Ptr(ptr) => {
+                        (ecx.tcx.global_alloc(ptr.alloc_id).unwrap_memory(), ptr.offset.bytes())
+                    }
+                    Scalar::Raw { .. } => (
+                        ecx.tcx
+                            .intern_const_alloc(Allocation::from_byte_aligned_bytes(b"" as &[u8])),
+                        0,
+                    ),
+                };
+                let len = b.to_machine_usize(ecx).unwrap();
+                let start = start.try_into().unwrap();
+                let len: usize = len.try_into().unwrap();
+                ConstValue::Slice { data, start, end: start + len }
+            }
+        },
+    }
+}
+
+fn validate_and_turn_into_const<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    constant: RawConst<'tcx>,
+    key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
+) -> ::rustc_middle::mir::interpret::ConstEvalResult<'tcx> {
+    let cid = key.value;
+    let def_id = cid.instance.def.def_id();
+    let is_static = tcx.is_static(def_id);
+    let ecx = mk_eval_cx(tcx, tcx.def_span(key.value.instance.def_id()), key.param_env, is_static);
+    let val = (|| {
+        let mplace = ecx.raw_const_to_mplace(constant)?;
+
+        // FIXME do not validate promoteds until a decision on
+        // https://github.com/rust-lang/rust/issues/67465 is made
+        if cid.promoted.is_none() {
+            let mut ref_tracking = RefTracking::new(mplace);
+            while let Some((mplace, path)) = ref_tracking.todo.pop() {
+                ecx.const_validate_operand(
+                    mplace.into(),
+                    path,
+                    &mut ref_tracking,
+                    /*may_ref_to_static*/ ecx.memory.extra.can_access_statics,
+                )?;
+            }
+        }
+        // Now that we validated, turn this into a proper constant.
+        // Statics/promoteds are always `ByRef`, for the rest `op_to_const` decides
+        // whether they become immediates.
+        if is_static || cid.promoted.is_some() {
+            let ptr = mplace.ptr.assert_ptr();
+            Ok(ConstValue::ByRef {
+                alloc: ecx.tcx.global_alloc(ptr.alloc_id).unwrap_memory(),
+                offset: ptr.offset,
+            })
+        } else {
+            Ok(op_to_const(&ecx, mplace.into()))
+        }
+    })();
+
+    val.map_err(|error| {
+        let err = ConstEvalErr::new(&ecx, error, None);
+        err.struct_error(ecx.tcx, "it is undefined behavior to use this value", |mut diag| {
+            diag.note(note_on_undefined_behavior_error());
+            diag.emit();
+        })
+    })
+}
+
+pub fn const_eval_validated_provider<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
+) -> ::rustc_middle::mir::interpret::ConstEvalResult<'tcx> {
+    // see comment in const_eval_raw_provider for what we're doing here
+    if key.param_env.reveal() == Reveal::All {
+        let mut key = key;
+        key.param_env = key.param_env.with_user_facing();
+        match tcx.const_eval_validated(key) {
+            // try again with reveal all as requested
+            Err(ErrorHandled::TooGeneric) => {}
+            // deduplicate calls
+            other => return other,
+        }
+    }
+
+    // We call `const_eval` for zero arg intrinsics, too, in order to cache their value.
+    // Catch such calls and evaluate them instead of trying to load a constant's MIR.
+    if let ty::InstanceDef::Intrinsic(def_id) = key.value.instance.def {
+        let ty = key.value.instance.ty(tcx, key.param_env);
+        let substs = match ty.kind {
+            ty::FnDef(_, substs) => substs,
+            _ => bug!("intrinsic with type {:?}", ty),
+        };
+        return eval_nullary_intrinsic(tcx, key.param_env, def_id, substs).map_err(|error| {
+            let span = tcx.def_span(def_id);
+            let error = ConstEvalErr { error: error.kind, stacktrace: vec![], span };
+            error.report_as_error(tcx.at(span), "could not evaluate nullary intrinsic")
+        });
+    }
+
+    tcx.const_eval_raw(key).and_then(|val| validate_and_turn_into_const(tcx, val, key))
+}
+
+pub fn const_eval_raw_provider<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
+) -> ::rustc_middle::mir::interpret::ConstEvalRawResult<'tcx> {
+    // Because the constant is computed twice (once per value of `Reveal`), we are at risk of
+    // reporting the same error twice here. To resolve this, we check whether we can evaluate the
+    // constant in the more restrictive `Reveal::UserFacing`, which most likely already was
+    // computed. For a large percentage of constants that will already have succeeded. Only
+    // associated constants of generic functions will fail due to not enough monomorphization
+    // information being available.
+
+    // In case we fail in the `UserFacing` variant, we just do the real computation.
+    if key.param_env.reveal() == Reveal::All {
+        let mut key = key;
+        key.param_env = key.param_env.with_user_facing();
+        match tcx.const_eval_raw(key) {
+            // try again with reveal all as requested
+            Err(ErrorHandled::TooGeneric) => {}
+            // deduplicate calls
+            other => return other,
+        }
+    }
+    if cfg!(debug_assertions) {
+        // Make sure we format the instance even if we do not print it.
+        // This serves as a regression test against an ICE on printing.
+        // The next two lines concatenated contain some discussion:
+        // https://rust-lang.zulipchat.com/#narrow/stream/146212-t-compiler.2Fconst-eval/
+        // subject/anon_const_instance_printing/near/135980032
+        let instance = key.value.instance.to_string();
+        trace!("const eval: {:?} ({})", key, instance);
+    }
+
+    let cid = key.value;
+    let def = cid.instance.def.with_opt_param();
+
+    if let Some(def) = def.as_local() {
+        if tcx.has_typeck_results(def.did) {
+            if let Some(error_reported) = tcx.typeck_opt_const_arg(def).tainted_by_errors {
+                return Err(ErrorHandled::Reported(error_reported));
+            }
+        }
+    }
+
+    let is_static = tcx.is_static(def.did);
+
+    let mut ecx = InterpCx::new(
+        tcx,
+        tcx.def_span(def.did),
+        key.param_env,
+        CompileTimeInterpreter::new(tcx.sess.const_eval_limit()),
+        MemoryExtra { can_access_statics: is_static },
+    );
+
+    let res = ecx.load_mir(cid.instance.def, cid.promoted);
+    res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, &body))
+        .map(|place| RawConst { alloc_id: place.ptr.assert_ptr().alloc_id, ty: place.layout.ty })
+        .map_err(|error| {
+            let err = ConstEvalErr::new(&ecx, error, None);
+            // errors in statics are always emitted as fatal errors
+            if is_static {
+                // Ensure that if the above error was either `TooGeneric` or `Reported`
+                // an error must be reported.
+                let v = err.report_as_error(
+                    ecx.tcx.at(ecx.cur_span()),
+                    "could not evaluate static initializer",
+                );
+
+                // If this is `Reveal:All`, then we need to make sure an error is reported but if
+                // this is `Reveal::UserFacing`, then it's expected that we could get a
+                // `TooGeneric` error. When we fall back to `Reveal::All`, then it will either
+                // succeed or we'll report this error then.
+                if key.param_env.reveal() == Reveal::All {
+                    tcx.sess.delay_span_bug(
+                        err.span,
+                        &format!("static eval failure did not emit an error: {:#?}", v),
+                    );
+                }
+
+                v
+            } else if let Some(def) = def.as_local() {
+                // constant defined in this crate, we can figure out a lint level!
+                match tcx.def_kind(def.did.to_def_id()) {
+                    // constants never produce a hard error at the definition site. Anything else is
+                    // a backwards compatibility hazard (and will break old versions of winapi for
+                    // sure)
+                    //
+                    // note that validation may still cause a hard error on this very same constant,
+                    // because any code that existed before validation could not have failed
+                    // validation thus preventing such a hard error from being a backwards
+                    // compatibility hazard
+                    DefKind::Const | DefKind::AssocConst => {
+                        let hir_id = tcx.hir().local_def_id_to_hir_id(def.did);
+                        err.report_as_lint(
+                            tcx.at(tcx.def_span(def.did)),
+                            "any use of this value will cause an error",
+                            hir_id,
+                            Some(err.span),
+                        )
+                    }
+                    // promoting runtime code is only allowed to error if it references broken
+                    // constants any other kind of error will be reported to the user as a
+                    // deny-by-default lint
+                    _ => {
+                        if let Some(p) = cid.promoted {
+                            let span = tcx.promoted_mir_of_opt_const_arg(def.to_global())[p].span;
+                            if let err_inval!(ReferencedConstant) = err.error {
+                                err.report_as_error(
+                                    tcx.at(span),
+                                    "evaluation of constant expression failed",
+                                )
+                            } else {
+                                err.report_as_lint(
+                                    tcx.at(span),
+                                    "reaching this expression at runtime will panic or abort",
+                                    tcx.hir().local_def_id_to_hir_id(def.did),
+                                    Some(err.span),
+                                )
+                            }
+                        // anything else (array lengths, enum initializers, constant patterns) are
+                        // reported as hard errors
+                        } else {
+                            err.report_as_error(
+                                ecx.tcx.at(ecx.cur_span()),
+                                "evaluation of constant value failed",
+                            )
+                        }
+                    }
+                }
+            } else {
+                // use of broken constant from other crate
+                err.report_as_error(ecx.tcx.at(ecx.cur_span()), "could not evaluate constant")
+            }
+        })
+}
diff --git a/compiler/rustc_mir/src/const_eval/fn_queries.rs b/compiler/rustc_mir/src/const_eval/fn_queries.rs
new file mode 100644
index 00000000000..9ef63b3322d
--- /dev/null
+++ b/compiler/rustc_mir/src/const_eval/fn_queries.rs
@@ -0,0 +1,167 @@
+use rustc_attr as attr;
+use rustc_hir as hir;
+use rustc_hir::def_id::{DefId, LocalDefId};
+use rustc_middle::hir::map::blocks::FnLikeNode;
+use rustc_middle::ty::query::Providers;
+use rustc_middle::ty::TyCtxt;
+use rustc_span::symbol::Symbol;
+use rustc_target::spec::abi::Abi;
+
+/// Whether the `def_id` counts as const fn in your current crate, considering all active
+/// feature gates
+pub fn is_const_fn(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
+    tcx.is_const_fn_raw(def_id)
+        && match is_unstable_const_fn(tcx, def_id) {
+            Some(feature_name) => {
+                // has a `rustc_const_unstable` attribute, check whether the user enabled the
+                // corresponding feature gate.
+                tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == feature_name)
+            }
+            // functions without const stability are either stable user written
+            // const fn or the user is using feature gates and we thus don't
+            // care what they do
+            None => true,
+        }
+}
+
+/// Whether the `def_id` is an unstable const fn and what feature gate is necessary to enable it
+pub fn is_unstable_const_fn(tcx: TyCtxt<'_>, def_id: DefId) -> Option<Symbol> {
+    if tcx.is_const_fn_raw(def_id) {
+        let const_stab = tcx.lookup_const_stability(def_id)?;
+        if const_stab.level.is_unstable() { Some(const_stab.feature) } else { None }
+    } else {
+        None
+    }
+}
+
+/// Returns `true` if this function must conform to `min_const_fn`
+pub fn is_min_const_fn(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
+    // Bail out if the signature doesn't contain `const`
+    if !tcx.is_const_fn_raw(def_id) {
+        return false;
+    }
+
+    if tcx.features().staged_api {
+        // In order for a libstd function to be considered min_const_fn
+        // it needs to be stable and have no `rustc_const_unstable` attribute.
+        match tcx.lookup_const_stability(def_id) {
+            // `rustc_const_unstable` functions don't need to conform.
+            Some(&attr::ConstStability { ref level, .. }) if level.is_unstable() => false,
+            None => {
+                if let Some(stab) = tcx.lookup_stability(def_id) {
+                    if stab.level.is_stable() {
+                        tcx.sess.span_err(
+                            tcx.def_span(def_id),
+                            "stable const functions must have either `rustc_const_stable` or \
+                             `rustc_const_unstable` attribute",
+                        );
+                        // While we errored above, because we don't know if we need to conform, we
+                        // err on the "safe" side and require min_const_fn.
+                        true
+                    } else {
+                        // Unstable functions need not conform to min_const_fn.
+                        false
+                    }
+                } else {
+                    // Internal functions are forced to conform to min_const_fn.
+                    // Annotate the internal function with a const stability attribute if
+                    // you need to use unstable features.
+                    // Note: this is an arbitrary choice that does not affect stability or const
+                    // safety or anything, it just changes whether we need to annotate some
+                    // internal functions with `rustc_const_stable` or with `rustc_const_unstable`
+                    true
+                }
+            }
+            // Everything else needs to conform, because it would be callable from
+            // other `min_const_fn` functions.
+            _ => true,
+        }
+    } else {
+        // users enabling the `const_fn` feature gate can do what they want
+        !tcx.features().const_fn
+    }
+}
+
+pub fn is_parent_const_impl_raw(tcx: TyCtxt<'_>, hir_id: hir::HirId) -> bool {
+    let parent_id = tcx.hir().get_parent_did(hir_id);
+    if !parent_id.is_top_level_module() { is_const_impl_raw(tcx, parent_id) } else { false }
+}
+
+/// Checks whether the function has a `const` modifier or, in case it is an intrinsic, whether
+/// said intrinsic has a `rustc_const_{un,}stable` attribute.
+fn is_const_fn_raw(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
+    let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
+
+    let node = tcx.hir().get(hir_id);
+
+    if let hir::Node::ForeignItem(hir::ForeignItem { kind: hir::ForeignItemKind::Fn(..), .. }) =
+        node
+    {
+        // Intrinsics use `rustc_const_{un,}stable` attributes to indicate constness. All other
+        // foreign items cannot be evaluated at compile-time.
+        if let Abi::RustIntrinsic | Abi::PlatformIntrinsic = tcx.hir().get_foreign_abi(hir_id) {
+            tcx.lookup_const_stability(def_id).is_some()
+        } else {
+            false
+        }
+    } else if let Some(fn_like) = FnLikeNode::from_node(node) {
+        if fn_like.constness() == hir::Constness::Const {
+            return true;
+        }
+
+        // If the function itself is not annotated with `const`, it may still be a `const fn`
+        // if it resides in a const trait impl.
+        is_parent_const_impl_raw(tcx, hir_id)
+    } else if let hir::Node::Ctor(_) = node {
+        true
+    } else {
+        false
+    }
+}
+
+/// Checks whether the given item is an `impl` that has a `const` modifier.
+fn is_const_impl_raw(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
+    let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
+    let node = tcx.hir().get(hir_id);
+    matches!(
+        node,
+        hir::Node::Item(hir::Item {
+            kind: hir::ItemKind::Impl { constness: hir::Constness::Const, .. },
+            ..
+        })
+    )
+}
+
+fn is_promotable_const_fn(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
+    is_const_fn(tcx, def_id)
+        && match tcx.lookup_const_stability(def_id) {
+            Some(stab) => {
+                if cfg!(debug_assertions) && stab.promotable {
+                    let sig = tcx.fn_sig(def_id);
+                    assert_eq!(
+                        sig.unsafety(),
+                        hir::Unsafety::Normal,
+                        "don't mark const unsafe fns as promotable",
+                        // https://github.com/rust-lang/rust/pull/53851#issuecomment-418760682
+                    );
+                }
+                stab.promotable
+            }
+            None => false,
+        }
+}
+
+fn const_fn_is_allowed_fn_ptr(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
+    is_const_fn(tcx, def_id)
+        && tcx.lookup_const_stability(def_id).map(|stab| stab.allow_const_fn_ptr).unwrap_or(false)
+}
+
+pub fn provide(providers: &mut Providers) {
+    *providers = Providers {
+        is_const_fn_raw,
+        is_const_impl_raw: |tcx, def_id| is_const_impl_raw(tcx, def_id.expect_local()),
+        is_promotable_const_fn,
+        const_fn_is_allowed_fn_ptr,
+        ..*providers
+    };
+}
diff --git a/compiler/rustc_mir/src/const_eval/machine.rs b/compiler/rustc_mir/src/const_eval/machine.rs
new file mode 100644
index 00000000000..b0357c508a3
--- /dev/null
+++ b/compiler/rustc_mir/src/const_eval/machine.rs
@@ -0,0 +1,372 @@
+use rustc_middle::mir;
+use rustc_middle::ty::layout::HasTyCtxt;
+use rustc_middle::ty::{self, Ty};
+use std::borrow::Borrow;
+use std::collections::hash_map::Entry;
+use std::hash::Hash;
+
+use rustc_data_structures::fx::FxHashMap;
+
+use rustc_ast::Mutability;
+use rustc_hir::def_id::DefId;
+use rustc_middle::mir::AssertMessage;
+use rustc_session::Limit;
+use rustc_span::symbol::Symbol;
+
+use crate::interpret::{
+    self, compile_time_machine, AllocId, Allocation, Frame, GlobalId, ImmTy, InterpCx,
+    InterpResult, Memory, OpTy, PlaceTy, Pointer, Scalar,
+};
+
+use super::error::*;
+
+impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> {
+    /// Evaluate a const function where all arguments (if any) are zero-sized types.
+    /// The evaluation is memoized thanks to the query system.
+    ///
+    /// Returns `true` if the call has been evaluated.
+    fn try_eval_const_fn_call(
+        &mut self,
+        instance: ty::Instance<'tcx>,
+        ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
+        args: &[OpTy<'tcx>],
+    ) -> InterpResult<'tcx, bool> {
+        trace!("try_eval_const_fn_call: {:?}", instance);
+        // Because `#[track_caller]` adds an implicit non-ZST argument, we also cannot
+        // perform this optimization on items tagged with it.
+        if instance.def.requires_caller_location(self.tcx()) {
+            return Ok(false);
+        }
+        // For the moment we only do this for functions which take no arguments
+        // (or all arguments are ZSTs) so that we don't memoize too much.
+        if args.iter().any(|a| !a.layout.is_zst()) {
+            return Ok(false);
+        }
+
+        let dest = match ret {
+            Some((dest, _)) => dest,
+            // Don't memoize diverging function calls.
+            None => return Ok(false),
+        };
+
+        let gid = GlobalId { instance, promoted: None };
+
+        let place = self.const_eval_raw(gid)?;
+
+        self.copy_op(place.into(), dest)?;
+
+        self.return_to_block(ret.map(|r| r.1))?;
+        trace!("{:?}", self.dump_place(*dest));
+        Ok(true)
+    }
+
+    /// "Intercept" a function call to a panic-related function
+    /// because we have something special to do for it.
+    /// If this returns successfully (`Ok`), the function should just be evaluated normally.
+    fn hook_panic_fn(
+        &mut self,
+        instance: ty::Instance<'tcx>,
+        args: &[OpTy<'tcx>],
+    ) -> InterpResult<'tcx> {
+        let def_id = instance.def_id();
+        if Some(def_id) == self.tcx.lang_items().panic_fn()
+            || Some(def_id) == self.tcx.lang_items().begin_panic_fn()
+        {
+            // &'static str
+            assert!(args.len() == 1);
+
+            let msg_place = self.deref_operand(args[0])?;
+            let msg = Symbol::intern(self.read_str(msg_place)?);
+            let span = self.find_closest_untracked_caller_location();
+            let (file, line, col) = self.location_triple_for_span(span);
+            Err(ConstEvalErrKind::Panic { msg, file, line, col }.into())
+        } else {
+            Ok(())
+        }
+    }
+}
+
+/// Extra machine state for CTFE, and the Machine instance
+pub struct CompileTimeInterpreter<'mir, 'tcx> {
+    /// For now, the number of terminators that can be evaluated before we throw a resource
+    /// exhuastion error.
+    ///
+    /// Setting this to `0` disables the limit and allows the interpreter to run forever.
+    pub steps_remaining: usize,
+
+    /// The virtual call stack.
+    pub(crate) stack: Vec<Frame<'mir, 'tcx, (), ()>>,
+}
+
+#[derive(Copy, Clone, Debug)]
+pub struct MemoryExtra {
+    /// We need to make sure consts never point to anything mutable, even recursively. That is
+    /// relied on for pattern matching on consts with references.
+    /// To achieve this, two pieces have to work together:
+    /// * Interning makes everything outside of statics immutable.
+    /// * Pointers to allocations inside of statics can never leak outside, to a non-static global.
+    /// This boolean here controls the second part.
+    pub(super) can_access_statics: bool,
+}
+
+impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
+    pub(super) fn new(const_eval_limit: Limit) -> Self {
+        CompileTimeInterpreter { steps_remaining: const_eval_limit.0, stack: Vec::new() }
+    }
+}
+
+impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
+    #[inline(always)]
+    fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
+    where
+        K: Borrow<Q>,
+    {
+        FxHashMap::contains_key(self, k)
+    }
+
+    #[inline(always)]
+    fn insert(&mut self, k: K, v: V) -> Option<V> {
+        FxHashMap::insert(self, k, v)
+    }
+
+    #[inline(always)]
+    fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
+    where
+        K: Borrow<Q>,
+    {
+        FxHashMap::remove(self, k)
+    }
+
+    #[inline(always)]
+    fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> {
+        self.iter().filter_map(move |(k, v)| f(k, &*v)).collect()
+    }
+
+    #[inline(always)]
+    fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> {
+        match self.get(&k) {
+            Some(v) => Ok(v),
+            None => {
+                vacant()?;
+                bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
+            }
+        }
+    }
+
+    #[inline(always)]
+    fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> {
+        match self.entry(k) {
+            Entry::Occupied(e) => Ok(e.into_mut()),
+            Entry::Vacant(e) => {
+                let v = vacant()?;
+                Ok(e.insert(v))
+            }
+        }
+    }
+}
+
+crate type CompileTimeEvalContext<'mir, 'tcx> =
+    InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;
+
+impl interpret::MayLeak for ! {
+    #[inline(always)]
+    fn may_leak(self) -> bool {
+        // `self` is uninhabited
+        self
+    }
+}
+
+impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> {
+    compile_time_machine!(<'mir, 'tcx>);
+
+    type MemoryExtra = MemoryExtra;
+
+    fn find_mir_or_eval_fn(
+        ecx: &mut InterpCx<'mir, 'tcx, Self>,
+        instance: ty::Instance<'tcx>,
+        args: &[OpTy<'tcx>],
+        ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
+        _unwind: Option<mir::BasicBlock>, // unwinding is not supported in consts
+    ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
+        debug!("find_mir_or_eval_fn: {:?}", instance);
+
+        // Only check non-glue functions
+        if let ty::InstanceDef::Item(def) = instance.def {
+            // Execution might have wandered off into other crates, so we cannot do a stability-
+            // sensitive check here.  But we can at least rule out functions that are not const
+            // at all.
+            if ecx.tcx.is_const_fn_raw(def.did) {
+                // If this function is a `const fn` then under certain circumstances we
+                // can evaluate call via the query system, thus memoizing all future calls.
+                if ecx.try_eval_const_fn_call(instance, ret, args)? {
+                    return Ok(None);
+                }
+            } else {
+                // Some functions we support even if they are non-const -- but avoid testing
+                // that for const fn!
+                ecx.hook_panic_fn(instance, args)?;
+                // We certainly do *not* want to actually call the fn
+                // though, so be sure we return here.
+                throw_unsup_format!("calling non-const function `{}`", instance)
+            }
+        }
+        // This is a const fn. Call it.
+        Ok(Some(match ecx.load_mir(instance.def, None) {
+            Ok(body) => body,
+            Err(err) => {
+                if let err_unsup!(NoMirFor(did)) = err.kind {
+                    let path = ecx.tcx.def_path_str(did);
+                    return Err(ConstEvalErrKind::NeedsRfc(format!(
+                        "calling extern function `{}`",
+                        path
+                    ))
+                    .into());
+                }
+                return Err(err);
+            }
+        }))
+    }
+
+    fn call_intrinsic(
+        ecx: &mut InterpCx<'mir, 'tcx, Self>,
+        instance: ty::Instance<'tcx>,
+        args: &[OpTy<'tcx>],
+        ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
+        _unwind: Option<mir::BasicBlock>,
+    ) -> InterpResult<'tcx> {
+        if ecx.emulate_intrinsic(instance, args, ret)? {
+            return Ok(());
+        }
+        // An intrinsic that we do not support
+        let intrinsic_name = ecx.tcx.item_name(instance.def_id());
+        Err(ConstEvalErrKind::NeedsRfc(format!("calling intrinsic `{}`", intrinsic_name)).into())
+    }
+
+    fn assert_panic(
+        ecx: &mut InterpCx<'mir, 'tcx, Self>,
+        msg: &AssertMessage<'tcx>,
+        _unwind: Option<mir::BasicBlock>,
+    ) -> InterpResult<'tcx> {
+        use rustc_middle::mir::AssertKind::*;
+        // Convert `AssertKind<Operand>` to `AssertKind<Scalar>`.
+        let eval_to_int =
+            |op| ecx.read_immediate(ecx.eval_operand(op, None)?).map(|x| x.to_const_int());
+        let err = match msg {
+            BoundsCheck { ref len, ref index } => {
+                let len = eval_to_int(len)?;
+                let index = eval_to_int(index)?;
+                BoundsCheck { len, index }
+            }
+            Overflow(op, l, r) => Overflow(*op, eval_to_int(l)?, eval_to_int(r)?),
+            OverflowNeg(op) => OverflowNeg(eval_to_int(op)?),
+            DivisionByZero(op) => DivisionByZero(eval_to_int(op)?),
+            RemainderByZero(op) => RemainderByZero(eval_to_int(op)?),
+            ResumedAfterReturn(generator_kind) => ResumedAfterReturn(*generator_kind),
+            ResumedAfterPanic(generator_kind) => ResumedAfterPanic(*generator_kind),
+        };
+        Err(ConstEvalErrKind::AssertFailure(err).into())
+    }
+
+    fn ptr_to_int(_mem: &Memory<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx, u64> {
+        Err(ConstEvalErrKind::NeedsRfc("pointer-to-integer cast".to_string()).into())
+    }
+
+    fn binary_ptr_op(
+        _ecx: &InterpCx<'mir, 'tcx, Self>,
+        _bin_op: mir::BinOp,
+        _left: ImmTy<'tcx>,
+        _right: ImmTy<'tcx>,
+    ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> {
+        Err(ConstEvalErrKind::NeedsRfc("pointer arithmetic or comparison".to_string()).into())
+    }
+
+    fn box_alloc(
+        _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+        _dest: PlaceTy<'tcx>,
+    ) -> InterpResult<'tcx> {
+        Err(ConstEvalErrKind::NeedsRfc("heap allocations via `box` keyword".to_string()).into())
+    }
+
+    fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
+        // The step limit has already been hit in a previous call to `before_terminator`.
+        if ecx.machine.steps_remaining == 0 {
+            return Ok(());
+        }
+
+        ecx.machine.steps_remaining -= 1;
+        if ecx.machine.steps_remaining == 0 {
+            throw_exhaust!(StepLimitReached)
+        }
+
+        Ok(())
+    }
+
+    #[inline(always)]
+    fn init_frame_extra(
+        ecx: &mut InterpCx<'mir, 'tcx, Self>,
+        frame: Frame<'mir, 'tcx>,
+    ) -> InterpResult<'tcx, Frame<'mir, 'tcx>> {
+        // Enforce stack size limit. Add 1 because this is run before the new frame is pushed.
+        if !ecx.tcx.sess.recursion_limit().value_within_limit(ecx.stack().len() + 1) {
+            throw_exhaust!(StackFrameLimitReached)
+        } else {
+            Ok(frame)
+        }
+    }
+
+    #[inline(always)]
+    fn stack(
+        ecx: &'a InterpCx<'mir, 'tcx, Self>,
+    ) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
+        &ecx.machine.stack
+    }
+
+    #[inline(always)]
+    fn stack_mut(
+        ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
+    ) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
+        &mut ecx.machine.stack
+    }
+
+    fn before_access_global(
+        memory_extra: &MemoryExtra,
+        alloc_id: AllocId,
+        allocation: &Allocation,
+        static_def_id: Option<DefId>,
+        is_write: bool,
+    ) -> InterpResult<'tcx> {
+        if is_write {
+            // Write access. These are never allowed, but we give a targeted error message.
+            if allocation.mutability == Mutability::Not {
+                Err(err_ub!(WriteToReadOnly(alloc_id)).into())
+            } else {
+                Err(ConstEvalErrKind::ModifiedGlobal.into())
+            }
+        } else {
+            // Read access. These are usually allowed, with some exceptions.
+            if memory_extra.can_access_statics {
+                // Machine configuration allows us read from anything (e.g., `static` initializer).
+                Ok(())
+            } else if static_def_id.is_some() {
+                // Machine configuration does not allow us to read statics
+                // (e.g., `const` initializer).
+                // See const_eval::machine::MemoryExtra::can_access_statics for why
+                // this check is so important: if we could read statics, we could read pointers
+                // to mutable allocations *inside* statics. These allocations are not themselves
+                // statics, so pointers to them can get around the check in `validity.rs`.
+                Err(ConstEvalErrKind::ConstAccessesStatic.into())
+            } else {
+                // Immutable global, this read is fine.
+                // But make sure we never accept a read from something mutable, that would be
+                // unsound. The reason is that as the content of this allocation may be different
+                // now and at run-time, so if we permit reading now we might return the wrong value.
+                assert_eq!(allocation.mutability, Mutability::Not);
+                Ok(())
+            }
+        }
+    }
+}
+
+// Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
+// so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
+// at the bottom of this file.
diff --git a/compiler/rustc_mir/src/const_eval/mod.rs b/compiler/rustc_mir/src/const_eval/mod.rs
new file mode 100644
index 00000000000..e7eeb4b4de4
--- /dev/null
+++ b/compiler/rustc_mir/src/const_eval/mod.rs
@@ -0,0 +1,69 @@
+// Not in interpret to make sure we do not use private implementation details
+
+use std::convert::TryFrom;
+
+use rustc_middle::mir;
+use rustc_middle::ty::{self, TyCtxt};
+use rustc_span::{source_map::DUMMY_SP, symbol::Symbol};
+
+use crate::interpret::{intern_const_alloc_recursive, ConstValue, InternKind, InterpCx};
+
+mod error;
+mod eval_queries;
+mod fn_queries;
+mod machine;
+
+pub use error::*;
+pub use eval_queries::*;
+pub use fn_queries::*;
+pub use machine::*;
+
+pub(crate) fn const_caller_location(
+    tcx: TyCtxt<'tcx>,
+    (file, line, col): (Symbol, u32, u32),
+) -> ConstValue<'tcx> {
+    trace!("const_caller_location: {}:{}:{}", file, line, col);
+    let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all(), false);
+
+    let loc_place = ecx.alloc_caller_location(file, line, col);
+    intern_const_alloc_recursive(&mut ecx, InternKind::Constant, loc_place, false);
+    ConstValue::Scalar(loc_place.ptr)
+}
+
+/// This function uses `unwrap` copiously, because an already validated constant
+/// must have valid fields and can thus never fail outside of compiler bugs. However, it is
+/// invoked from the pretty printer, where it can receive enums with no variants and e.g.
+/// `read_discriminant` needs to be able to handle that.
+pub(crate) fn destructure_const<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    param_env: ty::ParamEnv<'tcx>,
+    val: &'tcx ty::Const<'tcx>,
+) -> mir::DestructuredConst<'tcx> {
+    trace!("destructure_const: {:?}", val);
+    let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
+    let op = ecx.const_to_op(val, None).unwrap();
+
+    // We go to `usize` as we cannot allocate anything bigger anyway.
+    let (field_count, variant, down) = match val.ty.kind {
+        ty::Array(_, len) => (usize::try_from(len.eval_usize(tcx, param_env)).unwrap(), None, op),
+        ty::Adt(def, _) if def.variants.is_empty() => {
+            return mir::DestructuredConst { variant: None, fields: tcx.arena.alloc_slice(&[]) };
+        }
+        ty::Adt(def, _) => {
+            let variant = ecx.read_discriminant(op).unwrap().1;
+            let down = ecx.operand_downcast(op, variant).unwrap();
+            (def.variants[variant].fields.len(), Some(variant), down)
+        }
+        ty::Tuple(substs) => (substs.len(), None, op),
+        _ => bug!("cannot destructure constant {:?}", val),
+    };
+
+    let fields_iter = (0..field_count).map(|i| {
+        let field_op = ecx.operand_field(down, i).unwrap();
+        let val = op_to_const(&ecx, field_op);
+        ty::Const::from_value(tcx, val, field_op.layout.ty)
+    });
+    let fields = tcx.arena.alloc_from_iter(fields_iter);
+
+    mir::DestructuredConst { variant, fields }
+}