about summary refs log tree commit diff
path: root/compiler
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2022-08-30 08:29:42 +0000
committerbors <bors@rust-lang.org>2022-08-30 08:29:42 +0000
commit0631ea5d73f4a3199c776687b12c20c50a91f0d2 (patch)
tree59e751484047303e15b3c32cf0b545eac02e59af /compiler
parenta0d07093f80a0206f42d3dbada66212eda52b694 (diff)
parentec95a929044afad3cfad99231378a27870a9e28c (diff)
downloadrust-0631ea5d73f4a3199c776687b12c20c50a91f0d2.tar.gz
rust-0631ea5d73f4a3199c776687b12c20c50a91f0d2.zip
Auto merge of #101183 - Dylan-DPC:rollup-6kewixv, r=Dylan-DPC
Rollup of 9 pull requests

Successful merges:

 - #95376 (Add `vec::Drain{,Filter}::keep_rest`)
 - #100092 (Fall back when relating two opaques by substs in MIR typeck)
 - #101019 (Suggest returning closure as `impl Fn`)
 - #101022 (Erase late bound regions before comparing types in `suggest_dereferences`)
 - #101101 (interpret: make read-pointer-as-bytes a CTFE-only error with extra information)
 - #101123 (Remove `register_attr` feature)
 - #101175 (Don't --bless in pre-push hook)
 - #101176 (rustdoc: remove unused CSS selectors for `.table-display`)
 - #101180 (Add another MaybeUninit array test with const)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
Diffstat (limited to 'compiler')
-rw-r--r--compiler/rustc_codegen_cranelift/src/constant.rs2
-rw-r--r--compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs5
-rw-r--r--compiler/rustc_codegen_gcc/src/consts.rs12
-rw-r--r--compiler/rustc_codegen_llvm/src/consts.rs22
-rw-r--r--compiler/rustc_const_eval/src/const_eval/error.rs13
-rw-r--r--compiler/rustc_const_eval/src/const_eval/eval_queries.rs8
-rw-r--r--compiler/rustc_const_eval/src/interpret/intern.rs14
-rw-r--r--compiler/rustc_const_eval/src/interpret/intrinsics.rs21
-rw-r--r--compiler/rustc_const_eval/src/interpret/machine.rs2
-rw-r--r--compiler/rustc_const_eval/src/interpret/memory.rs56
-rw-r--r--compiler/rustc_const_eval/src/interpret/operand.rs2
-rw-r--r--compiler/rustc_const_eval/src/interpret/place.rs4
-rw-r--r--compiler/rustc_const_eval/src/interpret/projection.rs4
-rw-r--r--compiler/rustc_const_eval/src/interpret/validity.rs72
-rw-r--r--compiler/rustc_feature/src/active.rs2
-rw-r--r--compiler/rustc_feature/src/builtin_attrs.rs4
-rw-r--r--compiler/rustc_feature/src/removed.rs3
-rw-r--r--compiler/rustc_hir/src/def.rs10
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/mod.rs5
-rw-r--r--compiler/rustc_infer/src/infer/nll_relate/mod.rs58
-rw-r--r--compiler/rustc_middle/src/mir/interpret/allocation.rs295
-rw-r--r--compiler/rustc_middle/src/mir/interpret/error.rs13
-rw-r--r--compiler/rustc_middle/src/mir/interpret/mod.rs2
-rw-r--r--compiler/rustc_middle/src/mir/interpret/pointer.rs15
-rw-r--r--compiler/rustc_middle/src/mir/interpret/value.rs6
-rw-r--r--compiler/rustc_middle/src/mir/mod.rs10
-rw-r--r--compiler/rustc_middle/src/mir/pretty.rs30
-rw-r--r--compiler/rustc_middle/src/ty/impls_ty.rs2
-rw-r--r--compiler/rustc_middle/src/ty/print/pretty.rs39
-rw-r--r--compiler/rustc_middle/src/ty/sty.rs4
-rw-r--r--compiler/rustc_monomorphize/src/collector.rs6
-rw-r--r--compiler/rustc_resolve/src/diagnostics.rs10
-rw-r--r--compiler/rustc_resolve/src/ident.rs12
-rw-r--r--compiler/rustc_resolve/src/lib.rs6
-rw-r--r--compiler/rustc_resolve/src/macros.rs29
-rw-r--r--compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs12
-rw-r--r--compiler/rustc_typeck/src/check/fn_ctxt/suggestions.rs45
-rw-r--r--compiler/rustc_typeck/src/check/mod.rs6
-rw-r--r--compiler/rustc_typeck/src/errors.rs4
39 files changed, 444 insertions, 421 deletions
diff --git a/compiler/rustc_codegen_cranelift/src/constant.rs b/compiler/rustc_codegen_cranelift/src/constant.rs
index e2b68f24a21..cb5d73a7e0b 100644
--- a/compiler/rustc_codegen_cranelift/src/constant.rs
+++ b/compiler/rustc_codegen_cranelift/src/constant.rs
@@ -430,7 +430,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
         let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len()).to_vec();
         data_ctx.define(bytes.into_boxed_slice());
 
-        for &(offset, alloc_id) in alloc.relocations().iter() {
+        for &(offset, alloc_id) in alloc.provenance().iter() {
             let addend = {
                 let endianness = tcx.data_layout.endian;
                 let offset = offset.bytes() as usize;
diff --git a/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs b/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs
index a32b413d45f..1f358b1bbb9 100644
--- a/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs
+++ b/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs
@@ -186,7 +186,10 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
                         let size = Size::from_bytes(
                             4 * ret_lane_count, /* size_of([u32; ret_lane_count]) */
                         );
-                        alloc.inner().get_bytes(fx, alloc_range(offset, size)).unwrap()
+                        alloc
+                            .inner()
+                            .get_bytes_strip_provenance(fx, alloc_range(offset, size))
+                            .unwrap()
                     }
                     _ => unreachable!("{:?}", idx_const),
                 };
diff --git a/compiler/rustc_codegen_gcc/src/consts.rs b/compiler/rustc_codegen_gcc/src/consts.rs
index c0b8d21818f..356c03ee3c1 100644
--- a/compiler/rustc_codegen_gcc/src/consts.rs
+++ b/compiler/rustc_codegen_gcc/src/consts.rs
@@ -127,7 +127,7 @@ impl<'gcc, 'tcx> StaticMethods for CodegenCx<'gcc, 'tcx> {
             //
             // We could remove this hack whenever we decide to drop macOS 10.10 support.
             if self.tcx.sess.target.options.is_like_osx {
-                // The `inspect` method is okay here because we checked relocations, and
+                // The `inspect` method is okay here because we checked for provenance, and
                 // because we are doing this access to inspect the final interpreter state
                 // (not as part of the interpreter execution).
                 //
@@ -296,17 +296,17 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
 
 pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAllocation<'tcx>) -> RValue<'gcc> {
     let alloc = alloc.inner();
-    let mut llvals = Vec::with_capacity(alloc.relocations().len() + 1);
+    let mut llvals = Vec::with_capacity(alloc.provenance().len() + 1);
     let dl = cx.data_layout();
     let pointer_size = dl.pointer_size.bytes() as usize;
 
     let mut next_offset = 0;
-    for &(offset, alloc_id) in alloc.relocations().iter() {
+    for &(offset, alloc_id) in alloc.provenance().iter() {
         let offset = offset.bytes();
         assert_eq!(offset as usize as u64, offset);
         let offset = offset as usize;
         if offset > next_offset {
-            // This `inspect` is okay since we have checked that it is not within a relocation, it
+            // This `inspect` is okay since we have checked that it is not within a pointer with provenance, it
             // is within the bounds of the allocation, and it doesn't affect interpreter execution
             // (we inspect the result after interpreter execution). Any undef byte is replaced with
             // some arbitrary byte value.
@@ -319,7 +319,7 @@ pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAl
             read_target_uint( dl.endian,
                 // This `inspect` is okay since it is within the bounds of the allocation, it doesn't
                 // affect interpreter execution (we inspect the result after interpreter execution),
-                // and we properly interpret the relocation as a relocation pointer offset.
+                // and we properly interpret the provenance as a relocation pointer offset.
                 alloc.inspect_with_uninit_and_ptr_outside_interpreter(offset..(offset + pointer_size)),
             )
             .expect("const_alloc_to_llvm: could not read relocation pointer")
@@ -336,7 +336,7 @@ pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAl
     }
     if alloc.len() >= next_offset {
         let range = next_offset..alloc.len();
-        // This `inspect` is okay since we have check that it is after all relocations, it is
+        // This `inspect` is okay since we have check that it is after all provenance, it is
         // within the bounds of the allocation, and it doesn't affect interpreter execution (we
         // inspect the result after interpreter execution). Any undef byte is replaced with some
         // arbitrary byte value.
diff --git a/compiler/rustc_codegen_llvm/src/consts.rs b/compiler/rustc_codegen_llvm/src/consts.rs
index f41ff325590..d3e33da2799 100644
--- a/compiler/rustc_codegen_llvm/src/consts.rs
+++ b/compiler/rustc_codegen_llvm/src/consts.rs
@@ -27,12 +27,12 @@ use tracing::debug;
 
 pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation<'_>) -> &'ll Value {
     let alloc = alloc.inner();
-    let mut llvals = Vec::with_capacity(alloc.relocations().len() + 1);
+    let mut llvals = Vec::with_capacity(alloc.provenance().len() + 1);
     let dl = cx.data_layout();
     let pointer_size = dl.pointer_size.bytes() as usize;
 
-    // Note: this function may call `inspect_with_uninit_and_ptr_outside_interpreter`,
-    // so `range` must be within the bounds of `alloc` and not contain or overlap a relocation.
+    // Note: this function may call `inspect_with_uninit_and_ptr_outside_interpreter`, so `range`
+    // must be within the bounds of `alloc` and not contain or overlap a pointer provenance.
     fn append_chunks_of_init_and_uninit_bytes<'ll, 'a, 'b>(
         llvals: &mut Vec<&'ll Value>,
         cx: &'a CodegenCx<'ll, 'b>,
@@ -79,12 +79,12 @@ pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation<
     }
 
     let mut next_offset = 0;
-    for &(offset, alloc_id) in alloc.relocations().iter() {
+    for &(offset, alloc_id) in alloc.provenance().iter() {
         let offset = offset.bytes();
         assert_eq!(offset as usize as u64, offset);
         let offset = offset as usize;
         if offset > next_offset {
-            // This `inspect` is okay since we have checked that it is not within a relocation, it
+            // This `inspect` is okay since we have checked that there is no provenance, it
             // is within the bounds of the allocation, and it doesn't affect interpreter execution
             // (we inspect the result after interpreter execution).
             append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, next_offset..offset);
@@ -93,7 +93,7 @@ pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation<
             dl.endian,
             // This `inspect` is okay since it is within the bounds of the allocation, it doesn't
             // affect interpreter execution (we inspect the result after interpreter execution),
-            // and we properly interpret the relocation as a relocation pointer offset.
+            // and we properly interpret the provenance as a relocation pointer offset.
             alloc.inspect_with_uninit_and_ptr_outside_interpreter(offset..(offset + pointer_size)),
         )
         .expect("const_alloc_to_llvm: could not read relocation pointer")
@@ -121,7 +121,7 @@ pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation<
     }
     if alloc.len() >= next_offset {
         let range = next_offset..alloc.len();
-        // This `inspect` is okay since we have check that it is after all relocations, it is
+        // This `inspect` is okay since we have check that it is after all provenance, it is
         // within the bounds of the allocation, and it doesn't affect interpreter execution (we
         // inspect the result after interpreter execution).
         append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, range);
@@ -479,7 +479,7 @@ impl<'ll> StaticMethods for CodegenCx<'ll, '_> {
                 //
                 // We could remove this hack whenever we decide to drop macOS 10.10 support.
                 if self.tcx.sess.target.is_like_osx {
-                    // The `inspect` method is okay here because we checked relocations, and
+                    // The `inspect` method is okay here because we checked for provenance, and
                     // because we are doing this access to inspect the final interpreter state
                     // (not as part of the interpreter execution).
                     //
@@ -487,7 +487,7 @@ impl<'ll> StaticMethods for CodegenCx<'ll, '_> {
                     // happens to be zero. Instead, we should only check the value of defined bytes
                     // and set all undefined bytes to zero if this allocation is headed for the
                     // BSS.
-                    let all_bytes_are_zero = alloc.relocations().is_empty()
+                    let all_bytes_are_zero = alloc.provenance().is_empty()
                         && alloc
                             .inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len())
                             .iter()
@@ -511,9 +511,9 @@ impl<'ll> StaticMethods for CodegenCx<'ll, '_> {
                         section.as_str().as_ptr().cast(),
                         section.as_str().len() as c_uint,
                     );
-                    assert!(alloc.relocations().is_empty());
+                    assert!(alloc.provenance().is_empty());
 
-                    // The `inspect` method is okay here because we checked relocations, and
+                    // The `inspect` method is okay here because we checked for provenance, and
                     // because we are doing this access to inspect the final interpreter state (not
                     // as part of the interpreter execution).
                     let bytes =
diff --git a/compiler/rustc_const_eval/src/const_eval/error.rs b/compiler/rustc_const_eval/src/const_eval/error.rs
index bba4b1815b4..09d53331b5b 100644
--- a/compiler/rustc_const_eval/src/const_eval/error.rs
+++ b/compiler/rustc_const_eval/src/const_eval/error.rs
@@ -10,6 +10,7 @@ use rustc_span::{Span, Symbol};
 use super::InterpCx;
 use crate::interpret::{
     struct_error, ErrorHandled, FrameInfo, InterpError, InterpErrorInfo, Machine, MachineStopType,
+    UnsupportedOpInfo,
 };
 
 /// The CTFE machine has some custom error kinds.
@@ -149,6 +150,18 @@ impl<'tcx> ConstEvalErr<'tcx> {
             if let Some(span_msg) = span_msg {
                 err.span_label(self.span, span_msg);
             }
+            // Add some more context for select error types.
+            match self.error {
+                InterpError::Unsupported(
+                    UnsupportedOpInfo::ReadPointerAsBytes
+                    | UnsupportedOpInfo::PartialPointerOverwrite(_)
+                    | UnsupportedOpInfo::PartialPointerCopy(_),
+                ) => {
+                    err.help("this code performed an operation that depends on the underlying bytes representing a pointer");
+                    err.help("the absolute address of a pointer is not known at compile-time, so such operations are not supported");
+                }
+                _ => {}
+            }
             // Add spans for the stacktrace. Don't print a single-line backtrace though.
             if self.stacktrace.len() > 1 {
                 // Helper closure to print duplicated lines.
diff --git a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs
index 4601914c25f..b46f71fc78a 100644
--- a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs
+++ b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs
@@ -2,8 +2,8 @@ use super::{CompileTimeEvalContext, CompileTimeInterpreter, ConstEvalErr};
 use crate::interpret::eval_nullary_intrinsic;
 use crate::interpret::{
     intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
-    Immediate, InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking,
-    StackPopCleanup,
+    Immediate, InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy,
+    RefTracking, StackPopCleanup,
 };
 
 use rustc_hir::def::DefKind;
@@ -387,7 +387,9 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
                     ecx.tcx,
                     "it is undefined behavior to use this value",
                     |diag| {
-                        diag.note(NOTE_ON_UNDEFINED_BEHAVIOR_ERROR);
+                        if matches!(err.error, InterpError::UndefinedBehavior(_)) {
+                            diag.note(NOTE_ON_UNDEFINED_BEHAVIOR_ERROR);
+                        }
                         diag.note(&format!(
                             "the raw bytes of the constant ({}",
                             display_allocation(
diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs
index 376b8872c90..66ab3f15716 100644
--- a/compiler/rustc_const_eval/src/interpret/intern.rs
+++ b/compiler/rustc_const_eval/src/interpret/intern.rs
@@ -134,7 +134,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
         alloc.mutability = Mutability::Not;
     };
     // link the alloc id to the actual allocation
-    leftover_allocations.extend(alloc.relocations().iter().map(|&(_, alloc_id)| alloc_id));
+    leftover_allocations.extend(alloc.provenance().iter().map(|&(_, alloc_id)| alloc_id));
     let alloc = tcx.intern_const_alloc(alloc);
     tcx.set_alloc_id_memory(alloc_id, alloc);
     None
@@ -191,10 +191,10 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
                     return Ok(true);
                 };
 
-                // If there are no relocations in this allocation, it does not contain references
+                // If there is no provenance in this allocation, it does not contain references
                 // that point to another allocation, and we can avoid the interning walk.
                 if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr, size, align)? {
-                    if !alloc.has_relocations() {
+                    if !alloc.has_provenance() {
                         return Ok(false);
                     }
                 } else {
@@ -233,8 +233,8 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
     }
 
     fn visit_value(&mut self, mplace: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
-        // Handle Reference types, as these are the only relocations supported by const eval.
-        // Raw pointers (and boxes) are handled by the `leftover_relocations` logic.
+        // Handle Reference types, as these are the only types with provenance supported by const eval.
+        // Raw pointers (and boxes) are handled by the `leftover_allocations` logic.
         let tcx = self.ecx.tcx;
         let ty = mplace.layout.ty;
         if let ty::Ref(_, referenced_ty, ref_mutability) = *ty.kind() {
@@ -410,7 +410,7 @@ pub fn intern_const_alloc_recursive<
             // references and a `leftover_allocations` set (where we only have a todo-list here).
             // So we hand-roll the interning logic here again.
             match intern_kind {
-                // Statics may contain mutable allocations even behind relocations.
+                // Statics may point to mutable allocations.
                 // Even for immutable statics it would be ok to have mutable allocations behind
                 // raw pointers, e.g. for `static FOO: *const AtomicUsize = &AtomicUsize::new(42)`.
                 InternKind::Static(_) => {}
@@ -441,7 +441,7 @@ pub fn intern_const_alloc_recursive<
             }
             let alloc = tcx.intern_const_alloc(alloc);
             tcx.set_alloc_id_memory(alloc_id, alloc);
-            for &(_, alloc_id) in alloc.inner().relocations().iter() {
+            for &(_, alloc_id) in alloc.inner().provenance().iter() {
                 if leftover_allocations.insert(alloc_id) {
                     todo.push(alloc_id);
                 }
diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics.rs b/compiler/rustc_const_eval/src/interpret/intrinsics.rs
index 6f3bd3bf4c5..a8ec8447f64 100644
--- a/compiler/rustc_const_eval/src/interpret/intrinsics.rs
+++ b/compiler/rustc_const_eval/src/interpret/intrinsics.rs
@@ -687,10 +687,23 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
         let layout = self.layout_of(lhs.layout.ty.builtin_deref(true).unwrap().ty)?;
         assert!(!layout.is_unsized());
 
-        let lhs = self.read_pointer(lhs)?;
-        let rhs = self.read_pointer(rhs)?;
-        let lhs_bytes = self.read_bytes_ptr(lhs, layout.size)?;
-        let rhs_bytes = self.read_bytes_ptr(rhs, layout.size)?;
+        let get_bytes = |this: &InterpCx<'mir, 'tcx, M>,
+                         op: &OpTy<'tcx, <M as Machine<'mir, 'tcx>>::Provenance>,
+                         size|
+         -> InterpResult<'tcx, &[u8]> {
+            let ptr = this.read_pointer(op)?;
+            let Some(alloc_ref) = self.get_ptr_alloc(ptr, size, Align::ONE)? else {
+                // zero-sized access
+                return Ok(&[]);
+            };
+            if alloc_ref.has_provenance() {
+                throw_ub_format!("`raw_eq` on bytes with provenance");
+            }
+            alloc_ref.get_bytes_strip_provenance()
+        };
+
+        let lhs_bytes = get_bytes(self, lhs, layout.size)?;
+        let rhs_bytes = get_bytes(self, rhs, layout.size)?;
         Ok(Scalar::from_bool(lhs_bytes == rhs_bytes))
     }
 }
diff --git a/compiler/rustc_const_eval/src/interpret/machine.rs b/compiler/rustc_const_eval/src/interpret/machine.rs
index 6ca98371497..5aabb14fba8 100644
--- a/compiler/rustc_const_eval/src/interpret/machine.rs
+++ b/compiler/rustc_const_eval/src/interpret/machine.rs
@@ -315,7 +315,7 @@ pub trait Machine<'mir, 'tcx>: Sized {
     /// cache the result. (This relies on `AllocMap::get_or` being able to add the
     /// owned allocation to the map even when the map is shared.)
     ///
-    /// This must only fail if `alloc` contains relocations.
+    /// This must only fail if `alloc` contains provenance.
     fn adjust_allocation<'b>(
         ecx: &InterpCx<'mir, 'tcx, Self>,
         id: AllocId,
diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs
index c4e93770292..69dbc9592fa 100644
--- a/compiler/rustc_const_eval/src/interpret/memory.rs
+++ b/compiler/rustc_const_eval/src/interpret/memory.rs
@@ -214,7 +214,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
         self.allocate_raw_ptr(alloc, kind).unwrap()
     }
 
-    /// This can fail only of `alloc` contains relocations.
+    /// This can fail only of `alloc` contains provenance.
     pub fn allocate_raw_ptr(
         &mut self,
         alloc: Allocation,
@@ -794,10 +794,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
             todo.extend(static_roots);
             while let Some(id) = todo.pop() {
                 if reachable.insert(id) {
-                    // This is a new allocation, add its relocations to `todo`.
+                    // This is a new allocation, add the allocation it points to to `todo`.
                     if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
                         todo.extend(
-                            alloc.relocations().values().filter_map(|prov| prov.get_alloc_id()),
+                            alloc.provenance().values().filter_map(|prov| prov.get_alloc_id()),
                         );
                     }
                 }
@@ -833,7 +833,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> std::fmt::Debug for DumpAllocs<'a,
             allocs_to_print: &mut VecDeque<AllocId>,
             alloc: &Allocation<Prov, Extra>,
         ) -> std::fmt::Result {
-            for alloc_id in alloc.relocations().values().filter_map(|prov| prov.get_alloc_id()) {
+            for alloc_id in alloc.provenance().values().filter_map(|prov| prov.get_alloc_id()) {
                 allocs_to_print.push_back(alloc_id);
             }
             write!(fmt, "{}", display_allocation(tcx, alloc))
@@ -953,24 +953,25 @@ impl<'tcx, 'a, Prov: Provenance, Extra> AllocRef<'a, 'tcx, Prov, Extra> {
     }
 
     /// `range` is relative to this allocation reference, not the base of the allocation.
-    pub fn check_bytes(&self, range: AllocRange) -> InterpResult<'tcx> {
+    pub fn get_bytes_strip_provenance<'b>(&'b self) -> InterpResult<'tcx, &'a [u8]> {
         Ok(self
             .alloc
-            .check_bytes(&self.tcx, self.range.subrange(range))
+            .get_bytes_strip_provenance(&self.tcx, self.range)
             .map_err(|e| e.to_interp_error(self.alloc_id))?)
     }
 
-    /// Returns whether the allocation has relocations for the entire range of the `AllocRef`.
-    pub(crate) fn has_relocations(&self) -> bool {
-        self.alloc.has_relocations(&self.tcx, self.range)
+    /// Returns whether the allocation has provenance anywhere in the range of the `AllocRef`.
+    pub(crate) fn has_provenance(&self) -> bool {
+        self.alloc.range_has_provenance(&self.tcx, self.range)
     }
 }
 
 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
-    /// Reads the given number of bytes from memory. Returns them as a slice.
+    /// Reads the given number of bytes from memory, and strips their provenance if possible.
+    /// Returns them as a slice.
     ///
     /// Performs appropriate bounds checks.
-    pub fn read_bytes_ptr(
+    pub fn read_bytes_ptr_strip_provenance(
         &self,
         ptr: Pointer<Option<M::Provenance>>,
         size: Size,
@@ -983,7 +984,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
         // (We are staying inside the bounds here so all is good.)
         Ok(alloc_ref
             .alloc
-            .get_bytes(&alloc_ref.tcx, alloc_ref.range)
+            .get_bytes_strip_provenance(&alloc_ref.tcx, alloc_ref.range)
             .map_err(|e| e.to_interp_error(alloc_ref.alloc_id))?)
     }
 
@@ -1078,17 +1079,20 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
             return Ok(());
         };
 
-        // This checks relocation edges on the src, which needs to happen before
-        // `prepare_relocation_copy`.
-        let src_bytes = src_alloc
-            .get_bytes_with_uninit_and_ptr(&tcx, src_range)
-            .map_err(|e| e.to_interp_error(src_alloc_id))?
-            .as_ptr(); // raw ptr, so we can also get a ptr to the destination allocation
-        // first copy the relocations to a temporary buffer, because
-        // `get_bytes_mut` will clear the relocations, which is correct,
-        // since we don't want to keep any relocations at the target.
-        let relocations =
-            src_alloc.prepare_relocation_copy(self, src_range, dest_offset, num_copies);
+        // Checks provenance edges on the src, which needs to happen before
+        // `prepare_provenance_copy`.
+        if src_alloc.range_has_provenance(&tcx, alloc_range(src_range.start, Size::ZERO)) {
+            throw_unsup!(PartialPointerCopy(Pointer::new(src_alloc_id, src_range.start)));
+        }
+        if src_alloc.range_has_provenance(&tcx, alloc_range(src_range.end(), Size::ZERO)) {
+            throw_unsup!(PartialPointerCopy(Pointer::new(src_alloc_id, src_range.end())));
+        }
+        let src_bytes = src_alloc.get_bytes_unchecked(src_range).as_ptr(); // raw ptr, so we can also get a ptr to the destination allocation
+        // first copy the provenance to a temporary buffer, because
+        // `get_bytes_mut` will clear the provenance, which is correct,
+        // since we don't want to keep any provenance at the target.
+        let provenance =
+            src_alloc.prepare_provenance_copy(self, src_range, dest_offset, num_copies);
         // Prepare a copy of the initialization mask.
         let compressed = src_alloc.compress_uninit_range(src_range);
 
@@ -1117,7 +1121,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
             dest_alloc
                 .write_uninit(&tcx, dest_range)
                 .map_err(|e| e.to_interp_error(dest_alloc_id))?;
-            // We can forget about the relocations, this is all not initialized anyway.
+            // We can forget about the provenance, this is all not initialized anyway.
             return Ok(());
         }
 
@@ -1161,8 +1165,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
             alloc_range(dest_offset, size), // just a single copy (i.e., not full `dest_range`)
             num_copies,
         );
-        // copy the relocations to the destination
-        dest_alloc.mark_relocation_range(relocations);
+        // copy the provenance to the destination
+        dest_alloc.mark_provenance_range(provenance);
 
         Ok(())
     }
diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs
index 91a97fe4d4d..35c2cf8102d 100644
--- a/compiler/rustc_const_eval/src/interpret/operand.rs
+++ b/compiler/rustc_const_eval/src/interpret/operand.rs
@@ -415,7 +415,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
     /// Turn the wide MPlace into a string (must already be dereferenced!)
     pub fn read_str(&self, mplace: &MPlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx, &str> {
         let len = mplace.len(self)?;
-        let bytes = self.read_bytes_ptr(mplace.ptr, Size::from_bytes(len))?;
+        let bytes = self.read_bytes_ptr_strip_provenance(mplace.ptr, Size::from_bytes(len))?;
         let str = std::str::from_utf8(bytes).map_err(|err| err_ub!(InvalidStr(err)))?;
         Ok(str)
     }
diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs
index d56323448ce..a03b0dfb603 100644
--- a/compiler/rustc_const_eval/src/interpret/place.rs
+++ b/compiler/rustc_const_eval/src/interpret/place.rs
@@ -2,8 +2,6 @@
 //! into a place.
 //! All high-level functions to write to memory work on places as destinations.
 
-use std::hash::Hash;
-
 use rustc_ast::Mutability;
 use rustc_middle::mir;
 use rustc_middle::ty;
@@ -290,7 +288,7 @@ impl<'tcx, Prov: Provenance> PlaceTy<'tcx, Prov> {
 // FIXME: Working around https://github.com/rust-lang/rust/issues/54385
 impl<'mir, 'tcx: 'mir, Prov, M> InterpCx<'mir, 'tcx, M>
 where
-    Prov: Provenance + Eq + Hash + 'static,
+    Prov: Provenance + 'static,
     M: Machine<'mir, 'tcx, Provenance = Prov>,
 {
     /// Take a value, which represents a (thin or wide) reference, and make it a place.
diff --git a/compiler/rustc_const_eval/src/interpret/projection.rs b/compiler/rustc_const_eval/src/interpret/projection.rs
index 16ce5bc7175..67dc9011ea2 100644
--- a/compiler/rustc_const_eval/src/interpret/projection.rs
+++ b/compiler/rustc_const_eval/src/interpret/projection.rs
@@ -7,8 +7,6 @@
 //! but we still need to do bounds checking and adjust the layout. To not duplicate that with MPlaceTy, we actually
 //! implement the logic on OpTy, and MPlaceTy calls that.
 
-use std::hash::Hash;
-
 use rustc_middle::mir;
 use rustc_middle::ty;
 use rustc_middle::ty::layout::LayoutOf;
@@ -22,7 +20,7 @@ use super::{
 // FIXME: Working around https://github.com/rust-lang/rust/issues/54385
 impl<'mir, 'tcx: 'mir, Prov, M> InterpCx<'mir, 'tcx, M>
 where
-    Prov: Provenance + Eq + Hash + 'static,
+    Prov: Provenance + 'static,
     M: Machine<'mir, 'tcx, Provenance = Prov>,
 {
     //# Field access
diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs
index 5f77c9b8892..0382e2d5805 100644
--- a/compiler/rustc_const_eval/src/interpret/validity.rs
+++ b/compiler/rustc_const_eval/src/interpret/validity.rs
@@ -20,9 +20,11 @@ use rustc_target::abi::{Abi, Scalar as ScalarAbi, Size, VariantIdx, Variants, Wr
 
 use std::hash::Hash;
 
+// for the validation errors
+use super::UndefinedBehaviorInfo::*;
 use super::{
-    alloc_range, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy,
-    Machine, MemPlaceMeta, OpTy, Scalar, ValueVisitor,
+    CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy, Machine,
+    MemPlaceMeta, OpTy, Scalar, ValueVisitor,
 };
 
 macro_rules! throw_validation_failure {
@@ -60,6 +62,7 @@ macro_rules! throw_validation_failure {
 /// });
 /// ```
 ///
+/// The patterns must be of type `UndefinedBehaviorInfo`.
 /// An additional expected parameter can also be added to the failure message:
 ///
 /// ```
@@ -87,7 +90,7 @@ macro_rules! try_validation {
             // allocation here as this can only slow down builds that fail anyway.
             Err(e) => match e.kind() {
                 $(
-                    $($p)|+ =>
+                    InterpError::UndefinedBehavior($($p)|+) =>
                        throw_validation_failure!(
                             $where,
                             { $( $what_fmt ),+ } $( expected { $( $expected_fmt ),+ } )?
@@ -313,8 +316,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
         Ok(try_validation!(
             self.ecx.read_immediate(op),
             self.path,
-            err_unsup!(ReadPointerAsBytes) => { "(potentially part of) a pointer" } expected { "{expected}" },
-            err_ub!(InvalidUninitBytes(None)) => { "uninitialized memory" } expected { "{expected}" }
+            InvalidUninitBytes(None) => { "uninitialized memory" } expected { "{expected}" }
         ))
     }
 
@@ -339,18 +341,14 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
                 let (_ty, _trait) = try_validation!(
                     self.ecx.get_ptr_vtable(vtable),
                     self.path,
-                    err_ub!(DanglingIntPointer(..)) |
-                    err_ub!(InvalidVTablePointer(..)) =>
+                    DanglingIntPointer(..) |
+                    InvalidVTablePointer(..) =>
                         { "{vtable}" } expected { "a vtable pointer" },
                 );
                 // FIXME: check if the type/trait match what ty::Dynamic says?
             }
             ty::Slice(..) | ty::Str => {
-                let _len = try_validation!(
-                    meta.unwrap_meta().to_machine_usize(self.ecx),
-                    self.path,
-                    err_unsup!(ReadPointerAsBytes) => { "non-integer slice length in wide pointer" },
-                );
+                let _len = meta.unwrap_meta().to_machine_usize(self.ecx)?;
                 // We do not check that `len * elem_size <= isize::MAX`:
                 // that is only required for references, and there it falls out of the
                 // "dereferenceable" check performed by Stacked Borrows.
@@ -380,7 +378,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
         let size_and_align = try_validation!(
             self.ecx.size_and_align_of_mplace(&place),
             self.path,
-            err_ub!(InvalidMeta(msg)) => { "invalid {} metadata: {}", kind, msg },
+            InvalidMeta(msg) => { "invalid {} metadata: {}", kind, msg },
         );
         let (size, align) = size_and_align
             // for the purpose of validity, consider foreign types to have
@@ -396,21 +394,21 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
                 CheckInAllocMsg::InboundsTest, // will anyway be replaced by validity message
             ),
             self.path,
-            err_ub!(AlignmentCheckFailed { required, has }) =>
+            AlignmentCheckFailed { required, has } =>
                 {
                     "an unaligned {kind} (required {} byte alignment but found {})",
                     required.bytes(),
                     has.bytes()
                 },
-            err_ub!(DanglingIntPointer(0, _)) =>
+            DanglingIntPointer(0, _) =>
                 { "a null {kind}" },
-            err_ub!(DanglingIntPointer(i, _)) =>
+            DanglingIntPointer(i, _) =>
                 { "a dangling {kind} (address {i:#x} is unallocated)" },
-            err_ub!(PointerOutOfBounds { .. }) =>
+            PointerOutOfBounds { .. } =>
                 { "a dangling {kind} (going beyond the bounds of its allocation)" },
             // This cannot happen during const-eval (because interning already detects
             // dangling pointers), but it can happen in Miri.
-            err_ub!(PointerUseAfterFree(..)) =>
+            PointerUseAfterFree(..) =>
                 { "a dangling {kind} (use-after-free)" },
         );
         // Do not allow pointers to uninhabited types.
@@ -498,7 +496,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
                 try_validation!(
                     value.to_bool(),
                     self.path,
-                    err_ub!(InvalidBool(..)) =>
+                    InvalidBool(..) =>
                         { "{:x}", value } expected { "a boolean" },
                 );
                 Ok(true)
@@ -508,7 +506,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
                 try_validation!(
                     value.to_char(),
                     self.path,
-                    err_ub!(InvalidChar(..)) =>
+                    InvalidChar(..) =>
                         { "{:x}", value } expected { "a valid unicode scalar value (in `0..=0x10FFFF` but not in `0xD800..=0xDFFF`)" },
                 );
                 Ok(true)
@@ -567,8 +565,8 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
                     let _fn = try_validation!(
                         self.ecx.get_ptr_fn(ptr),
                         self.path,
-                        err_ub!(DanglingIntPointer(..)) |
-                        err_ub!(InvalidFunctionPointer(..)) =>
+                        DanglingIntPointer(..) |
+                        InvalidFunctionPointer(..) =>
                             { "{ptr}" } expected { "a function pointer" },
                     );
                     // FIXME: Check if the signature matches
@@ -683,12 +681,10 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
             Ok(try_validation!(
                 this.ecx.read_discriminant(op),
                 this.path,
-                err_ub!(InvalidTag(val)) =>
+                InvalidTag(val) =>
                     { "{:x}", val } expected { "a valid enum tag" },
-                err_ub!(InvalidUninitBytes(None)) =>
+                InvalidUninitBytes(None) =>
                     { "uninitialized bytes" } expected { "a valid enum tag" },
-                err_unsup!(ReadPointerAsBytes) =>
-                    { "a pointer" } expected { "a valid enum tag" },
             )
             .1)
         })
@@ -828,10 +824,9 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
                 let mplace = op.assert_mem_place(); // strings are unsized and hence never immediate
                 let len = mplace.len(self.ecx)?;
                 try_validation!(
-                    self.ecx.read_bytes_ptr(mplace.ptr, Size::from_bytes(len)),
+                    self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr, Size::from_bytes(len)),
                     self.path,
-                    err_ub!(InvalidUninitBytes(..)) => { "uninitialized data in `str`" },
-                    err_unsup!(ReadPointerAsBytes) => { "a pointer in `str`" },
+                    InvalidUninitBytes(..) => { "uninitialized data in `str`" },
                 );
             }
             ty::Array(tys, ..) | ty::Slice(tys)
@@ -879,9 +874,9 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
                 // We also accept uninit, for consistency with the slow path.
                 let alloc = self.ecx.get_ptr_alloc(mplace.ptr, size, mplace.align)?.expect("we already excluded size 0");
 
-                match alloc.check_bytes(alloc_range(Size::ZERO, size)) {
+                match alloc.get_bytes_strip_provenance() {
                     // In the happy case, we needn't check anything else.
-                    Ok(()) => {}
+                    Ok(_) => {}
                     // Some error happened, try to provide a more detailed description.
                     Err(err) => {
                         // For some errors we might be able to provide extra information.
@@ -899,9 +894,6 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
 
                                 throw_validation_failure!(self.path, { "uninitialized bytes" })
                             }
-                            err_unsup!(ReadPointerAsBytes) => {
-                                throw_validation_failure!(self.path, { "a pointer" } expected { "plain (non-pointer) bytes" })
-                            }
 
                             // Propagate upwards (that will also check for unexpected errors).
                             _ => return Err(err),
@@ -942,14 +934,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
             Ok(()) => Ok(()),
             // Pass through validation failures.
             Err(err) if matches!(err.kind(), err_ub!(ValidationFailure { .. })) => Err(err),
-            // Also pass through InvalidProgram, those just indicate that we could not
-            // validate and each caller will know best what to do with them.
-            Err(err) if matches!(err.kind(), InterpError::InvalidProgram(_)) => Err(err),
-            // Avoid other errors as those do not show *where* in the value the issue lies.
-            Err(err) => {
+            // Complain about any other kind of UB error -- those are bad because we'd like to
+            // report them in a way that shows *where* in the value the issue lies.
+            Err(err) if matches!(err.kind(), InterpError::UndefinedBehavior(_)) => {
                 err.print_backtrace();
-                bug!("Unexpected error during validation: {}", err);
+                bug!("Unexpected Undefined Behavior error during validation: {}", err);
             }
+            // Pass through everything else.
+            Err(err) => Err(err),
         }
     }
 
diff --git a/compiler/rustc_feature/src/active.rs b/compiler/rustc_feature/src/active.rs
index a5091621f66..e09c3ccbc75 100644
--- a/compiler/rustc_feature/src/active.rs
+++ b/compiler/rustc_feature/src/active.rs
@@ -481,8 +481,6 @@ declare_features! (
     (incomplete, raw_dylib, "1.40.0", Some(58713), None),
     /// Allows `&raw const $place_expr` and `&raw mut $place_expr` expressions.
     (active, raw_ref_op, "1.41.0", Some(64490), None),
-    /// Allows using the `#[register_attr]` attribute.
-    (active, register_attr, "1.41.0", Some(66080), None),
     /// Allows using the `#[register_tool]` attribute.
     (active, register_tool, "1.41.0", Some(66079), None),
     /// Allows the `#[repr(i128)]` attribute for enums.
diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs
index d520efed9b8..0487270b52a 100644
--- a/compiler/rustc_feature/src/builtin_attrs.rs
+++ b/compiler/rustc_feature/src/builtin_attrs.rs
@@ -459,10 +459,6 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
     gated!(ffi_pure, Normal, template!(Word), WarnFollowing, experimental!(ffi_pure)),
     gated!(ffi_const, Normal, template!(Word), WarnFollowing, experimental!(ffi_const)),
     gated!(
-        register_attr, CrateLevel, template!(List: "attr1, attr2, ..."), DuplicatesOk,
-        experimental!(register_attr),
-    ),
-    gated!(
         register_tool, CrateLevel, template!(List: "tool1, tool2, ..."), DuplicatesOk,
         experimental!(register_tool),
     ),
diff --git a/compiler/rustc_feature/src/removed.rs b/compiler/rustc_feature/src/removed.rs
index 2ddaf920109..13f275bb6a0 100644
--- a/compiler/rustc_feature/src/removed.rs
+++ b/compiler/rustc_feature/src/removed.rs
@@ -163,6 +163,9 @@ declare_features! (
     (removed, quad_precision_float, "1.0.0", None, None, None),
     (removed, quote, "1.33.0", Some(29601), None, None),
     (removed, reflect, "1.0.0", Some(27749), None, None),
+    /// Allows using the `#[register_attr]` attribute.
+    (removed, register_attr, "CURRENT_RUSTC_VERSION", Some(66080), None,
+     Some("removed in favor of `#![register_tool]`")),
     /// Allows using the macros:
     /// + `__diagnostic_used`
     /// + `__register_diagnostic`
diff --git a/compiler/rustc_hir/src/def.rs b/compiler/rustc_hir/src/def.rs
index be5b7eccbaf..2d2648a8f35 100644
--- a/compiler/rustc_hir/src/def.rs
+++ b/compiler/rustc_hir/src/def.rs
@@ -45,8 +45,6 @@ pub enum NonMacroAttrKind {
     /// Single-segment custom attribute registered by a derive macro
     /// but used before that derive macro was expanded (deprecated).
     DeriveHelperCompat,
-    /// Single-segment custom attribute registered with `#[register_attr]`.
-    Registered,
 }
 
 /// What kind of definition something is; e.g., `mod` vs `struct`.
@@ -564,15 +562,11 @@ impl NonMacroAttrKind {
             NonMacroAttrKind::DeriveHelper | NonMacroAttrKind::DeriveHelperCompat => {
                 "derive helper attribute"
             }
-            NonMacroAttrKind::Registered => "explicitly registered attribute",
         }
     }
 
     pub fn article(self) -> &'static str {
-        match self {
-            NonMacroAttrKind::Registered => "an",
-            _ => "a",
-        }
+        "a"
     }
 
     /// Users of some attributes cannot mark them as used, so they are considered always used.
@@ -581,7 +575,7 @@ impl NonMacroAttrKind {
             NonMacroAttrKind::Tool
             | NonMacroAttrKind::DeriveHelper
             | NonMacroAttrKind::DeriveHelperCompat => true,
-            NonMacroAttrKind::Builtin(..) | NonMacroAttrKind::Registered => false,
+            NonMacroAttrKind::Builtin(..) => false,
         }
     }
 }
diff --git a/compiler/rustc_infer/src/infer/error_reporting/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/mod.rs
index 465508e1205..ecf75411e5f 100644
--- a/compiler/rustc_infer/src/infer/error_reporting/mod.rs
+++ b/compiler/rustc_infer/src/infer/error_reporting/mod.rs
@@ -2725,7 +2725,10 @@ impl<'tcx> TypeRelation<'tcx> for SameTypeModuloInfer<'_, 'tcx> {
         a: ty::Region<'tcx>,
         b: ty::Region<'tcx>,
     ) -> RelateResult<'tcx, ty::Region<'tcx>> {
-        if (a.is_var() && b.is_free_or_static()) || (b.is_var() && a.is_free_or_static()) || a == b
+        if (a.is_var() && b.is_free_or_static())
+            || (b.is_var() && a.is_free_or_static())
+            || (a.is_var() && b.is_var())
+            || a == b
         {
             Ok(a)
         } else {
diff --git a/compiler/rustc_infer/src/infer/nll_relate/mod.rs b/compiler/rustc_infer/src/infer/nll_relate/mod.rs
index bab4f3e9e36..e7e93116a66 100644
--- a/compiler/rustc_infer/src/infer/nll_relate/mod.rs
+++ b/compiler/rustc_infer/src/infer/nll_relate/mod.rs
@@ -396,6 +396,32 @@ where
 
         generalizer.relate(value, value)
     }
+
+    fn relate_opaques(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
+        let (a, b) = if self.a_is_expected() { (a, b) } else { (b, a) };
+        let mut generalize = |ty, ty_is_expected| {
+            let var = self.infcx.next_ty_var_id_in_universe(
+                TypeVariableOrigin {
+                    kind: TypeVariableOriginKind::MiscVariable,
+                    span: self.delegate.span(),
+                },
+                ty::UniverseIndex::ROOT,
+            );
+            if ty_is_expected {
+                self.relate_ty_var((ty, var))
+            } else {
+                self.relate_ty_var((var, ty))
+            }
+        };
+        let (a, b) = match (a.kind(), b.kind()) {
+            (&ty::Opaque(..), _) => (a, generalize(b, false)?),
+            (_, &ty::Opaque(..)) => (generalize(a, true)?, b),
+            _ => unreachable!(),
+        };
+        self.delegate.register_opaque_type(a, b, true)?;
+        trace!(a = ?a.kind(), b = ?b.kind(), "opaque type instantiated");
+        Ok(a)
+    }
 }
 
 /// When we instantiate an inference variable with a value in
@@ -572,32 +598,16 @@ where
             (&ty::Infer(ty::TyVar(vid)), _) => self.relate_ty_var((vid, b)),
 
             (&ty::Opaque(a_def_id, _), &ty::Opaque(b_def_id, _)) if a_def_id == b_def_id => {
-                self.infcx.super_combine_tys(self, a, b)
+                infcx.commit_if_ok(|_| infcx.super_combine_tys(self, a, b)).or_else(|err| {
+                    self.tcx().sess.delay_span_bug(
+                        self.delegate.span(),
+                        "failure to relate an opaque to itself should result in an error later on",
+                    );
+                    if a_def_id.is_local() { self.relate_opaques(a, b) } else { Err(err) }
+                })
             }
             (&ty::Opaque(did, ..), _) | (_, &ty::Opaque(did, ..)) if did.is_local() => {
-                let (a, b) = if self.a_is_expected() { (a, b) } else { (b, a) };
-                let mut generalize = |ty, ty_is_expected| {
-                    let var = infcx.next_ty_var_id_in_universe(
-                        TypeVariableOrigin {
-                            kind: TypeVariableOriginKind::MiscVariable,
-                            span: self.delegate.span(),
-                        },
-                        ty::UniverseIndex::ROOT,
-                    );
-                    if ty_is_expected {
-                        self.relate_ty_var((ty, var))
-                    } else {
-                        self.relate_ty_var((var, ty))
-                    }
-                };
-                let (a, b) = match (a.kind(), b.kind()) {
-                    (&ty::Opaque(..), _) => (a, generalize(b, false)?),
-                    (_, &ty::Opaque(..)) => (generalize(a, true)?, b),
-                    _ => unreachable!(),
-                };
-                self.delegate.register_opaque_type(a, b, true)?;
-                trace!(a = ?a.kind(), b = ?b.kind(), "opaque type instantiated");
-                Ok(a)
+                self.relate_opaques(a, b)
             }
 
             (&ty::Projection(projection_ty), _)
diff --git a/compiler/rustc_middle/src/mir/interpret/allocation.rs b/compiler/rustc_middle/src/mir/interpret/allocation.rs
index 3f618106525..37ec04b07f8 100644
--- a/compiler/rustc_middle/src/mir/interpret/allocation.rs
+++ b/compiler/rustc_middle/src/mir/interpret/allocation.rs
@@ -34,11 +34,11 @@ pub struct Allocation<Prov = AllocId, Extra = ()> {
     /// The actual bytes of the allocation.
     /// Note that the bytes of a pointer represent the offset of the pointer.
     bytes: Box<[u8]>,
-    /// Maps from byte addresses to extra data for each pointer.
+    /// Maps from byte addresses to extra provenance data for each pointer.
     /// Only the first byte of a pointer is inserted into the map; i.e.,
     /// every entry in this map applies to `pointer_size` consecutive bytes starting
     /// at the given offset.
-    relocations: Relocations<Prov>,
+    provenance: ProvenanceMap<Prov>,
     /// Denotes which part of this allocation is initialized.
     init_mask: InitMask,
     /// The alignment of the allocation to detect unaligned reads.
@@ -84,7 +84,7 @@ impl hash::Hash for Allocation {
         }
 
         // Hash the other fields as usual.
-        self.relocations.hash(state);
+        self.provenance.hash(state);
         self.init_mask.hash(state);
         self.align.hash(state);
         self.mutability.hash(state);
@@ -130,6 +130,8 @@ pub enum AllocError {
     ReadPointerAsBytes,
     /// Partially overwriting a pointer.
     PartialPointerOverwrite(Size),
+    /// Partially copying a pointer.
+    PartialPointerCopy(Size),
     /// Using uninitialized data where it is not allowed.
     InvalidUninitBytes(Option<UninitBytesAccess>),
 }
@@ -152,6 +154,9 @@ impl AllocError {
             PartialPointerOverwrite(offset) => InterpError::Unsupported(
                 UnsupportedOpInfo::PartialPointerOverwrite(Pointer::new(alloc_id, offset)),
             ),
+            PartialPointerCopy(offset) => InterpError::Unsupported(
+                UnsupportedOpInfo::PartialPointerCopy(Pointer::new(alloc_id, offset)),
+            ),
             InvalidUninitBytes(info) => InterpError::UndefinedBehavior(
                 UndefinedBehaviorInfo::InvalidUninitBytes(info.map(|b| (alloc_id, b))),
             ),
@@ -211,7 +216,7 @@ impl<Prov> Allocation<Prov> {
         let size = Size::from_bytes(bytes.len());
         Self {
             bytes,
-            relocations: Relocations::new(),
+            provenance: ProvenanceMap::new(),
             init_mask: InitMask::new(size, true),
             align,
             mutability,
@@ -246,7 +251,7 @@ impl<Prov> Allocation<Prov> {
         let bytes = unsafe { bytes.assume_init() };
         Ok(Allocation {
             bytes,
-            relocations: Relocations::new(),
+            provenance: ProvenanceMap::new(),
             init_mask: InitMask::new(size, false),
             align,
             mutability: Mutability::Mut,
@@ -266,22 +271,22 @@ impl Allocation {
     ) -> Result<Allocation<Prov, Extra>, Err> {
         // Compute new pointer provenance, which also adjusts the bytes.
         let mut bytes = self.bytes;
-        let mut new_relocations = Vec::with_capacity(self.relocations.0.len());
+        let mut new_provenance = Vec::with_capacity(self.provenance.0.len());
         let ptr_size = cx.data_layout().pointer_size.bytes_usize();
         let endian = cx.data_layout().endian;
-        for &(offset, alloc_id) in self.relocations.iter() {
+        for &(offset, alloc_id) in self.provenance.iter() {
             let idx = offset.bytes_usize();
             let ptr_bytes = &mut bytes[idx..idx + ptr_size];
             let bits = read_target_uint(endian, ptr_bytes).unwrap();
             let (ptr_prov, ptr_offset) =
                 adjust_ptr(Pointer::new(alloc_id, Size::from_bytes(bits)))?.into_parts();
             write_target_uint(endian, ptr_bytes, ptr_offset.bytes().into()).unwrap();
-            new_relocations.push((offset, ptr_prov));
+            new_provenance.push((offset, ptr_prov));
         }
         // Create allocation.
         Ok(Allocation {
             bytes,
-            relocations: Relocations::from_presorted(new_relocations),
+            provenance: ProvenanceMap::from_presorted(new_provenance),
             init_mask: self.init_mask,
             align: self.align,
             mutability: self.mutability,
@@ -300,8 +305,8 @@ impl<Prov, Extra> Allocation<Prov, Extra> {
         Size::from_bytes(self.len())
     }
 
-    /// Looks at a slice which may describe uninitialized bytes or describe a relocation. This differs
-    /// from `get_bytes_with_uninit_and_ptr` in that it does no relocation checks (even on the
+    /// Looks at a slice which may contain uninitialized bytes or provenance. This differs
+    /// from `get_bytes_with_uninit_and_ptr` in that it does no provenance checks (even on the
     /// edges) at all.
     /// This must not be used for reads affecting the interpreter execution.
     pub fn inspect_with_uninit_and_ptr_outside_interpreter(&self, range: Range<usize>) -> &[u8] {
@@ -313,74 +318,47 @@ impl<Prov, Extra> Allocation<Prov, Extra> {
         &self.init_mask
     }
 
-    /// Returns the relocation list.
-    pub fn relocations(&self) -> &Relocations<Prov> {
-        &self.relocations
+    /// Returns the provenance map.
+    pub fn provenance(&self) -> &ProvenanceMap<Prov> {
+        &self.provenance
     }
 }
 
 /// Byte accessors.
 impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
     /// This is the entirely abstraction-violating way to just grab the raw bytes without
-    /// caring about relocations. It just deduplicates some code between `read_scalar`
-    /// and `get_bytes_internal`.
-    fn get_bytes_even_more_internal(&self, range: AllocRange) -> &[u8] {
-        &self.bytes[range.start.bytes_usize()..range.end().bytes_usize()]
-    }
-
-    /// The last argument controls whether we error out when there are uninitialized or pointer
-    /// bytes. However, we *always* error when there are relocations overlapping the edges of the
-    /// range.
-    ///
-    /// You should never call this, call `get_bytes` or `get_bytes_with_uninit_and_ptr` instead,
+    /// caring about provenance or initialization.
     ///
     /// This function also guarantees that the resulting pointer will remain stable
     /// even when new allocations are pushed to the `HashMap`. `mem_copy_repeatedly` relies
     /// on that.
-    ///
-    /// It is the caller's responsibility to check bounds and alignment beforehand.
-    fn get_bytes_internal(
-        &self,
-        cx: &impl HasDataLayout,
-        range: AllocRange,
-        check_init_and_ptr: bool,
-    ) -> AllocResult<&[u8]> {
-        if check_init_and_ptr {
-            self.check_init(range)?;
-            self.check_relocations(cx, range)?;
-        } else {
-            // We still don't want relocations on the *edges*.
-            self.check_relocation_edges(cx, range)?;
-        }
-
-        Ok(self.get_bytes_even_more_internal(range))
+    #[inline]
+    pub fn get_bytes_unchecked(&self, range: AllocRange) -> &[u8] {
+        &self.bytes[range.start.bytes_usize()..range.end().bytes_usize()]
     }
 
-    /// Checks that these bytes are initialized and not pointer bytes, and then return them
-    /// as a slice.
+    /// Checks that these bytes are initialized, and then strip provenance (if possible) and return
+    /// them.
     ///
     /// It is the caller's responsibility to check bounds and alignment beforehand.
     /// Most likely, you want to use the `PlaceTy` and `OperandTy`-based methods
     /// on `InterpCx` instead.
     #[inline]
-    pub fn get_bytes(&self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult<&[u8]> {
-        self.get_bytes_internal(cx, range, true)
-    }
-
-    /// It is the caller's responsibility to handle uninitialized and pointer bytes.
-    /// However, this still checks that there are no relocations on the *edges*.
-    ///
-    /// It is the caller's responsibility to check bounds and alignment beforehand.
-    #[inline]
-    pub fn get_bytes_with_uninit_and_ptr(
+    pub fn get_bytes_strip_provenance(
         &self,
         cx: &impl HasDataLayout,
         range: AllocRange,
     ) -> AllocResult<&[u8]> {
-        self.get_bytes_internal(cx, range, false)
+        self.check_init(range)?;
+        if !Prov::OFFSET_IS_ADDR {
+            if self.range_has_provenance(cx, range) {
+                return Err(AllocError::ReadPointerAsBytes);
+            }
+        }
+        Ok(self.get_bytes_unchecked(range))
     }
 
-    /// Just calling this already marks everything as defined and removes relocations,
+    /// Just calling this already marks everything as defined and removes provenance,
     /// so be sure to actually put data there!
     ///
     /// It is the caller's responsibility to check bounds and alignment beforehand.
@@ -392,7 +370,7 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
         range: AllocRange,
     ) -> AllocResult<&mut [u8]> {
         self.mark_init(range, true);
-        self.clear_relocations(cx, range)?;
+        self.clear_provenance(cx, range)?;
 
         Ok(&mut self.bytes[range.start.bytes_usize()..range.end().bytes_usize()])
     }
@@ -404,7 +382,7 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
         range: AllocRange,
     ) -> AllocResult<*mut [u8]> {
         self.mark_init(range, true);
-        self.clear_relocations(cx, range)?;
+        self.clear_provenance(cx, range)?;
 
         assert!(range.end().bytes_usize() <= self.bytes.len()); // need to do our own bounds-check
         let begin_ptr = self.bytes.as_mut_ptr().wrapping_add(range.start.bytes_usize());
@@ -415,13 +393,6 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
 
 /// Reading and writing.
 impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
-    /// Validates that this memory range is initiailized and contains no relocations.
-    pub fn check_bytes(&self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
-        // This implicitly does all the checking we are asking for.
-        self.get_bytes(cx, range)?;
-        Ok(())
-    }
-
     /// Reads a *non-ZST* scalar.
     ///
     /// If `read_provenance` is `true`, this will also read provenance; otherwise (if the machine
@@ -438,43 +409,53 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
         range: AllocRange,
         read_provenance: bool,
     ) -> AllocResult<Scalar<Prov>> {
-        if read_provenance {
-            assert_eq!(range.size, cx.data_layout().pointer_size);
-        }
-
         // First and foremost, if anything is uninit, bail.
         if self.is_init(range).is_err() {
             return Err(AllocError::InvalidUninitBytes(None));
         }
 
-        // If we are doing a pointer read, and there is a relocation exactly where we
-        // are reading, then we can put data and relocation back together and return that.
-        if read_provenance && let Some(&prov) = self.relocations.get(&range.start) {
-            // We already checked init and relocations, so we can use this function.
-            let bytes = self.get_bytes_even_more_internal(range);
-            let bits = read_target_uint(cx.data_layout().endian, bytes).unwrap();
-            let ptr = Pointer::new(prov, Size::from_bytes(bits));
-            return Ok(Scalar::from_pointer(ptr, cx));
-        }
+        // Get the integer part of the result. We HAVE TO check provenance before returning this!
+        let bytes = self.get_bytes_unchecked(range);
+        let bits = read_target_uint(cx.data_layout().endian, bytes).unwrap();
 
-        // If we are *not* reading a pointer, and we can just ignore relocations,
-        // then do exactly that.
-        if !read_provenance && Prov::OFFSET_IS_ADDR {
-            // We just strip provenance.
-            let bytes = self.get_bytes_even_more_internal(range);
-            let bits = read_target_uint(cx.data_layout().endian, bytes).unwrap();
-            return Ok(Scalar::from_uint(bits, range.size));
+        if read_provenance {
+            assert_eq!(range.size, cx.data_layout().pointer_size);
+
+            // When reading data with provenance, the easy case is finding provenance exactly where we
+            // are reading, then we can put data and provenance back together and return that.
+            if let Some(&prov) = self.provenance.get(&range.start) {
+                // Now we can return the bits, with their appropriate provenance.
+                let ptr = Pointer::new(prov, Size::from_bytes(bits));
+                return Ok(Scalar::from_pointer(ptr, cx));
+            }
+
+            // If we can work on pointers byte-wise, join the byte-wise provenances.
+            if Prov::OFFSET_IS_ADDR {
+                let mut prov = self.offset_get_provenance(cx, range.start);
+                for offset in 1..range.size.bytes() {
+                    let this_prov =
+                        self.offset_get_provenance(cx, range.start + Size::from_bytes(offset));
+                    prov = Prov::join(prov, this_prov);
+                }
+                // Now use this provenance.
+                let ptr = Pointer::new(prov, Size::from_bytes(bits));
+                return Ok(Scalar::from_maybe_pointer(ptr, cx));
+            }
+        } else {
+            // We are *not* reading a pointer.
+            // If we can just ignore provenance, do exactly that.
+            if Prov::OFFSET_IS_ADDR {
+                // We just strip provenance.
+                return Ok(Scalar::from_uint(bits, range.size));
+            }
         }
 
-        // It's complicated. Better make sure there is no provenance anywhere.
-        // FIXME: If !OFFSET_IS_ADDR, this is the best we can do. But if OFFSET_IS_ADDR, then
-        // `read_pointer` is true and we ideally would distinguish the following two cases:
-        // - The entire `range` is covered by 2 relocations for the same provenance.
-        //   Then we should return a pointer with that provenance.
-        // - The range has inhomogeneous provenance. Then we should return just the
-        //   underlying bits.
-        let bytes = self.get_bytes(cx, range)?;
-        let bits = read_target_uint(cx.data_layout().endian, bytes).unwrap();
+        // Fallback path for when we cannot treat provenance bytewise or ignore it.
+        assert!(!Prov::OFFSET_IS_ADDR);
+        if self.range_has_provenance(cx, range) {
+            return Err(AllocError::ReadPointerAsBytes);
+        }
+        // There is no provenance, we can just return the bits.
         Ok(Scalar::from_uint(bits, range.size))
     }
 
@@ -508,9 +489,9 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
         let dst = self.get_bytes_mut(cx, range)?;
         write_target_uint(endian, dst, bytes).unwrap();
 
-        // See if we have to also write a relocation.
+        // See if we have to also store some provenance.
         if let Some(provenance) = provenance {
-            self.relocations.0.insert(range.start, provenance);
+            self.provenance.0.insert(range.start, provenance);
         }
 
         Ok(())
@@ -519,64 +500,65 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
     /// Write "uninit" to the given memory range.
     pub fn write_uninit(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
         self.mark_init(range, false);
-        self.clear_relocations(cx, range)?;
+        self.clear_provenance(cx, range)?;
         return Ok(());
     }
 }
 
-/// Relocations.
+/// Provenance.
 impl<Prov: Copy, Extra> Allocation<Prov, Extra> {
-    /// Returns all relocations overlapping with the given pointer-offset pair.
-    fn get_relocations(&self, cx: &impl HasDataLayout, range: AllocRange) -> &[(Size, Prov)] {
+    /// Returns all provenance overlapping with the given pointer-offset pair.
+    fn range_get_provenance(&self, cx: &impl HasDataLayout, range: AllocRange) -> &[(Size, Prov)] {
         // We have to go back `pointer_size - 1` bytes, as that one would still overlap with
         // the beginning of this range.
         let start = range.start.bytes().saturating_sub(cx.data_layout().pointer_size.bytes() - 1);
-        self.relocations.range(Size::from_bytes(start)..range.end())
+        self.provenance.range(Size::from_bytes(start)..range.end())
     }
 
-    /// Returns whether this allocation has relocations overlapping with the given range.
-    ///
-    /// Note: this function exists to allow `get_relocations` to be private, in order to somewhat
-    /// limit access to relocations outside of the `Allocation` abstraction.
-    ///
-    pub fn has_relocations(&self, cx: &impl HasDataLayout, range: AllocRange) -> bool {
-        !self.get_relocations(cx, range).is_empty()
+    /// Get the provenance of a single byte.
+    fn offset_get_provenance(&self, cx: &impl HasDataLayout, offset: Size) -> Option<Prov> {
+        let prov = self.range_get_provenance(cx, alloc_range(offset, Size::from_bytes(1)));
+        assert!(prov.len() <= 1);
+        prov.first().map(|(_offset, prov)| *prov)
     }
 
-    /// Checks that there are no relocations overlapping with the given range.
-    #[inline(always)]
-    fn check_relocations(&self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
-        if self.has_relocations(cx, range) { Err(AllocError::ReadPointerAsBytes) } else { Ok(()) }
+    /// Returns whether this allocation has progrnance overlapping with the given range.
+    ///
+    /// Note: this function exists to allow `range_get_provenance` to be private, in order to somewhat
+    /// limit access to provenance outside of the `Allocation` abstraction.
+    ///
+    pub fn range_has_provenance(&self, cx: &impl HasDataLayout, range: AllocRange) -> bool {
+        !self.range_get_provenance(cx, range).is_empty()
     }
 
-    /// Removes all relocations inside the given range.
-    /// If there are relocations overlapping with the edges, they
+    /// Removes all provenance inside the given range.
+    /// If there is provenance overlapping with the edges, it
     /// are removed as well *and* the bytes they cover are marked as
     /// uninitialized. This is a somewhat odd "spooky action at a distance",
     /// but it allows strictly more code to run than if we would just error
     /// immediately in that case.
-    fn clear_relocations(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult
+    fn clear_provenance(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult
     where
         Prov: Provenance,
     {
-        // Find the start and end of the given range and its outermost relocations.
+        // Find the start and end of the given range and its outermost provenance.
         let (first, last) = {
-            // Find all relocations overlapping the given range.
-            let relocations = self.get_relocations(cx, range);
-            if relocations.is_empty() {
+            // Find all provenance overlapping the given range.
+            let provenance = self.range_get_provenance(cx, range);
+            if provenance.is_empty() {
                 return Ok(());
             }
 
             (
-                relocations.first().unwrap().0,
-                relocations.last().unwrap().0 + cx.data_layout().pointer_size,
+                provenance.first().unwrap().0,
+                provenance.last().unwrap().0 + cx.data_layout().pointer_size,
             )
         };
         let start = range.start;
         let end = range.end();
 
-        // We need to handle clearing the relocations from parts of a pointer.
-        // FIXME: Miri should preserve partial relocations; see
+        // We need to handle clearing the provenance from parts of a pointer.
+        // FIXME: Miri should preserve partial provenance; see
         // https://github.com/rust-lang/miri/issues/2181.
         if first < start {
             if Prov::ERR_ON_PARTIAL_PTR_OVERWRITE {
@@ -599,41 +581,32 @@ impl<Prov: Copy, Extra> Allocation<Prov, Extra> {
             self.init_mask.set_range(end, last, false);
         }
 
-        // Forget all the relocations.
-        // Since relocations do not overlap, we know that removing until `last` (exclusive) is fine,
-        // i.e., this will not remove any other relocations just after the ones we care about.
-        self.relocations.0.remove_range(first..last);
-
-        Ok(())
-    }
+        // Forget all the provenance.
+        // Since provenance do not overlap, we know that removing until `last` (exclusive) is fine,
+        // i.e., this will not remove any other provenance just after the ones we care about.
+        self.provenance.0.remove_range(first..last);
 
-    /// Errors if there are relocations overlapping with the edges of the
-    /// given memory range.
-    #[inline]
-    fn check_relocation_edges(&self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
-        self.check_relocations(cx, alloc_range(range.start, Size::ZERO))?;
-        self.check_relocations(cx, alloc_range(range.end(), Size::ZERO))?;
         Ok(())
     }
 }
 
-/// "Relocations" stores the provenance information of pointers stored in memory.
+/// Stores the provenance information of pointers stored in memory.
 #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
-pub struct Relocations<Prov = AllocId>(SortedMap<Size, Prov>);
+pub struct ProvenanceMap<Prov = AllocId>(SortedMap<Size, Prov>);
 
-impl<Prov> Relocations<Prov> {
+impl<Prov> ProvenanceMap<Prov> {
     pub fn new() -> Self {
-        Relocations(SortedMap::new())
+        ProvenanceMap(SortedMap::new())
     }
 
-    // The caller must guarantee that the given relocations are already sorted
+    // The caller must guarantee that the given provenance list is already sorted
     // by address and contain no duplicates.
     pub fn from_presorted(r: Vec<(Size, Prov)>) -> Self {
-        Relocations(SortedMap::from_presorted_elements(r))
+        ProvenanceMap(SortedMap::from_presorted_elements(r))
     }
 }
 
-impl<Prov> Deref for Relocations<Prov> {
+impl<Prov> Deref for ProvenanceMap<Prov> {
     type Target = SortedMap<Size, Prov>;
 
     fn deref(&self) -> &Self::Target {
@@ -641,36 +614,36 @@ impl<Prov> Deref for Relocations<Prov> {
     }
 }
 
-/// A partial, owned list of relocations to transfer into another allocation.
+/// A partial, owned list of provenance to transfer into another allocation.
 ///
 /// Offsets are already adjusted to the destination allocation.
-pub struct AllocationRelocations<Prov> {
-    dest_relocations: Vec<(Size, Prov)>,
+pub struct AllocationProvenance<Prov> {
+    dest_provenance: Vec<(Size, Prov)>,
 }
 
 impl<Prov: Copy, Extra> Allocation<Prov, Extra> {
-    pub fn prepare_relocation_copy(
+    pub fn prepare_provenance_copy(
         &self,
         cx: &impl HasDataLayout,
         src: AllocRange,
         dest: Size,
         count: u64,
-    ) -> AllocationRelocations<Prov> {
-        let relocations = self.get_relocations(cx, src);
-        if relocations.is_empty() {
-            return AllocationRelocations { dest_relocations: Vec::new() };
+    ) -> AllocationProvenance<Prov> {
+        let provenance = self.range_get_provenance(cx, src);
+        if provenance.is_empty() {
+            return AllocationProvenance { dest_provenance: Vec::new() };
         }
 
         let size = src.size;
-        let mut new_relocations = Vec::with_capacity(relocations.len() * (count as usize));
+        let mut new_provenance = Vec::with_capacity(provenance.len() * (count as usize));
 
         // If `count` is large, this is rather wasteful -- we are allocating a big array here, which
         // is mostly filled with redundant information since it's just N copies of the same `Prov`s
-        // at slightly adjusted offsets. The reason we do this is so that in `mark_relocation_range`
+        // at slightly adjusted offsets. The reason we do this is so that in `mark_provenance_range`
         // we can use `insert_presorted`. That wouldn't work with an `Iterator` that just produces
-        // the right sequence of relocations for all N copies.
+        // the right sequence of provenance for all N copies.
         for i in 0..count {
-            new_relocations.extend(relocations.iter().map(|&(offset, reloc)| {
+            new_provenance.extend(provenance.iter().map(|&(offset, reloc)| {
                 // compute offset for current repetition
                 let dest_offset = dest + size * i; // `Size` operations
                 (
@@ -681,17 +654,17 @@ impl<Prov: Copy, Extra> Allocation<Prov, Extra> {
             }));
         }
 
-        AllocationRelocations { dest_relocations: new_relocations }
+        AllocationProvenance { dest_provenance: new_provenance }
     }
 
-    /// Applies a relocation copy.
-    /// The affected range, as defined in the parameters to `prepare_relocation_copy` is expected
-    /// to be clear of relocations.
+    /// Applies a provenance copy.
+    /// The affected range, as defined in the parameters to `prepare_provenance_copy` is expected
+    /// to be clear of provenance.
     ///
     /// This is dangerous to use as it can violate internal `Allocation` invariants!
     /// It only exists to support an efficient implementation of `mem_copy_repeatedly`.
-    pub fn mark_relocation_range(&mut self, relocations: AllocationRelocations<Prov>) {
-        self.relocations.0.insert_presorted(relocations.dest_relocations);
+    pub fn mark_provenance_range(&mut self, provenance: AllocationProvenance<Prov>) {
+        self.provenance.0.insert_presorted(provenance.dest_provenance);
     }
 }
 
diff --git a/compiler/rustc_middle/src/mir/interpret/error.rs b/compiler/rustc_middle/src/mir/interpret/error.rs
index cecb55578d3..e4039cc7c68 100644
--- a/compiler/rustc_middle/src/mir/interpret/error.rs
+++ b/compiler/rustc_middle/src/mir/interpret/error.rs
@@ -401,14 +401,18 @@ impl fmt::Display for UndefinedBehaviorInfo {
 pub enum UnsupportedOpInfo {
     /// Free-form case. Only for errors that are never caught!
     Unsupported(String),
-    /// Encountered a pointer where we needed raw bytes.
-    ReadPointerAsBytes,
     /// Overwriting parts of a pointer; the resulting state cannot be represented in our
     /// `Allocation` data structure. See <https://github.com/rust-lang/miri/issues/2181>.
     PartialPointerOverwrite(Pointer<AllocId>),
+    /// Attempting to `copy` parts of a pointer to somewhere else; the resulting state cannot be
+    /// represented in our `Allocation` data structure. See
+    /// <https://github.com/rust-lang/miri/issues/2181>.
+    PartialPointerCopy(Pointer<AllocId>),
     //
     // The variants below are only reachable from CTFE/const prop, miri will never emit them.
     //
+    /// Encountered a pointer where we needed raw bytes.
+    ReadPointerAsBytes,
     /// Accessing thread local statics
     ThreadLocalStatic(DefId),
     /// Accessing an unsupported extern static.
@@ -420,10 +424,13 @@ impl fmt::Display for UnsupportedOpInfo {
         use UnsupportedOpInfo::*;
         match self {
             Unsupported(ref msg) => write!(f, "{msg}"),
-            ReadPointerAsBytes => write!(f, "unable to turn pointer into raw bytes"),
             PartialPointerOverwrite(ptr) => {
                 write!(f, "unable to overwrite parts of a pointer in memory at {ptr:?}")
             }
+            PartialPointerCopy(ptr) => {
+                write!(f, "unable to copy parts of a pointer from memory at {ptr:?}")
+            }
+            ReadPointerAsBytes => write!(f, "unable to turn pointer into raw bytes"),
             ThreadLocalStatic(did) => write!(f, "cannot access thread local static ({did:?})"),
             ReadExternStatic(did) => write!(f, "cannot read from extern static ({did:?})"),
         }
diff --git a/compiler/rustc_middle/src/mir/interpret/mod.rs b/compiler/rustc_middle/src/mir/interpret/mod.rs
index 93fe7e63710..0fc1217d571 100644
--- a/compiler/rustc_middle/src/mir/interpret/mod.rs
+++ b/compiler/rustc_middle/src/mir/interpret/mod.rs
@@ -128,7 +128,7 @@ pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar};
 
 pub use self::allocation::{
     alloc_range, AllocRange, Allocation, ConstAllocation, InitChunk, InitChunkIter, InitMask,
-    Relocations,
+    ProvenanceMap,
 };
 
 pub use self::pointer::{Pointer, PointerArithmetic, Provenance};
diff --git a/compiler/rustc_middle/src/mir/interpret/pointer.rs b/compiler/rustc_middle/src/mir/interpret/pointer.rs
index 384954cbbd5..95e52e391d8 100644
--- a/compiler/rustc_middle/src/mir/interpret/pointer.rs
+++ b/compiler/rustc_middle/src/mir/interpret/pointer.rs
@@ -107,8 +107,12 @@ impl<T: HasDataLayout> PointerArithmetic for T {}
 /// pointer), but `derive` adds some unnecessary bounds.
 pub trait Provenance: Copy + fmt::Debug {
     /// Says whether the `offset` field of `Pointer`s with this provenance is the actual physical address.
-    /// If `true, ptr-to-int casts work by simply discarding the provenance.
-    /// If `false`, ptr-to-int casts are not supported. The offset *must* be relative in that case.
+    /// - If `false`, the offset *must* be relative. This means the bytes representing a pointer are
+    ///   different from what the Abstract Machine prescribes, so the interpreter must prevent any
+    ///   operation that would inspect the underlying bytes of a pointer, such as ptr-to-int
+    ///   transmutation. A `ReadPointerAsBytes` error will be raised in such situations.
+    /// - If `true`, the interpreter will permit operations to inspect the underlying bytes of a
+    ///   pointer, and implement ptr-to-int transmutation by stripping provenance.
     const OFFSET_IS_ADDR: bool;
 
     /// We also use this trait to control whether to abort execution when a pointer is being partially overwritten
@@ -125,6 +129,9 @@ pub trait Provenance: Copy + fmt::Debug {
     /// Otherwise this function is best-effort (but must agree with `Machine::ptr_get_alloc`).
     /// (Identifying the offset in that allocation, however, is harder -- use `Memory::ptr_get_alloc` for that.)
     fn get_alloc_id(self) -> Option<AllocId>;
+
+    /// Defines the 'join' of provenance: what happens when doing a pointer load and different bytes have different provenance.
+    fn join(left: Option<Self>, right: Option<Self>) -> Option<Self>;
 }
 
 impl Provenance for AllocId {
@@ -152,6 +159,10 @@ impl Provenance for AllocId {
     fn get_alloc_id(self) -> Option<AllocId> {
         Some(self)
     }
+
+    fn join(_left: Option<Self>, _right: Option<Self>) -> Option<Self> {
+        panic!("merging provenance is not supported when `OFFSET_IS_ADDR` is false")
+    }
 }
 
 /// Represents a pointer in the Miri engine.
diff --git a/compiler/rustc_middle/src/mir/interpret/value.rs b/compiler/rustc_middle/src/mir/interpret/value.rs
index ba56c5267df..d4fad7f1ecd 100644
--- a/compiler/rustc_middle/src/mir/interpret/value.rs
+++ b/compiler/rustc_middle/src/mir/interpret/value.rs
@@ -130,9 +130,7 @@ pub enum Scalar<Prov = AllocId> {
     /// The raw bytes of a simple value.
     Int(ScalarInt),
 
-    /// A pointer into an `Allocation`. An `Allocation` in the `memory` module has a list of
-    /// relocations, but a `Scalar` is only large enough to contain one, so we just represent the
-    /// relocation and its associated offset together as a `Pointer` here.
+    /// A pointer.
     ///
     /// We also store the size of the pointer, such that a `Scalar` always knows how big it is.
     /// The size is always the pointer size of the current target, but this is not information
@@ -509,7 +507,7 @@ pub fn get_slice_bytes<'tcx>(cx: &impl HasDataLayout, val: ConstValue<'tcx>) ->
     if let ConstValue::Slice { data, start, end } = val {
         let len = end - start;
         data.inner()
-            .get_bytes(
+            .get_bytes_strip_provenance(
                 cx,
                 AllocRange { start: Size::from_bytes(start), size: Size::from_bytes(len) },
             )
diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs
index f7a1e9b2864..e94e1e8a10d 100644
--- a/compiler/rustc_middle/src/mir/mod.rs
+++ b/compiler/rustc_middle/src/mir/mod.rs
@@ -2687,8 +2687,8 @@ fn pretty_print_const_value<'tcx>(
                 match inner.kind() {
                     ty::Slice(t) => {
                         if *t == u8_type {
-                            // The `inspect` here is okay since we checked the bounds, and there are
-                            // no relocations (we have an active slice reference here). We don't use
+                            // The `inspect` here is okay since we checked the bounds, and `u8` carries
+                            // no provenance (we have an active slice reference here). We don't use
                             // this result to affect interpreter execution.
                             let byte_str = data
                                 .inner()
@@ -2698,8 +2698,8 @@ fn pretty_print_const_value<'tcx>(
                         }
                     }
                     ty::Str => {
-                        // The `inspect` here is okay since we checked the bounds, and there are no
-                        // relocations (we have an active `str` reference here). We don't use this
+                        // The `inspect` here is okay since we checked the bounds, and `str` carries
+                        // no provenance (we have an active `str` reference here). We don't use this
                         // result to affect interpreter execution.
                         let slice = data
                             .inner()
@@ -2714,7 +2714,7 @@ fn pretty_print_const_value<'tcx>(
                 let n = n.kind().try_to_bits(tcx.data_layout.pointer_size).unwrap();
                 // cast is ok because we already checked for pointer size (32 or 64 bit) above
                 let range = AllocRange { start: offset, size: Size::from_bytes(n) };
-                let byte_str = alloc.inner().get_bytes(&tcx, range).unwrap();
+                let byte_str = alloc.inner().get_bytes_strip_provenance(&tcx, range).unwrap();
                 fmt.write_str("*")?;
                 pretty_print_byte_str(fmt, byte_str)?;
                 return Ok(());
diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs
index da6af89b09b..88c16189f1d 100644
--- a/compiler/rustc_middle/src/mir/pretty.rs
+++ b/compiler/rustc_middle/src/mir/pretty.rs
@@ -676,7 +676,7 @@ pub fn write_allocations<'tcx>(
     fn alloc_ids_from_alloc(
         alloc: ConstAllocation<'_>,
     ) -> impl DoubleEndedIterator<Item = AllocId> + '_ {
-        alloc.inner().relocations().values().map(|id| *id)
+        alloc.inner().provenance().values().map(|id| *id)
     }
 
     fn alloc_ids_from_const_val(val: ConstValue<'_>) -> impl Iterator<Item = AllocId> + '_ {
@@ -778,7 +778,7 @@ pub fn write_allocations<'tcx>(
 /// If the allocation is small enough to fit into a single line, no start address is given.
 /// After the hex dump, an ascii dump follows, replacing all unprintable characters (control
 /// characters or characters whose value is larger than 127) with a `.`
-/// This also prints relocations adequately.
+/// This also prints provenance adequately.
 pub fn display_allocation<'a, 'tcx, Prov, Extra>(
     tcx: TyCtxt<'tcx>,
     alloc: &'a Allocation<Prov, Extra>,
@@ -873,34 +873,34 @@ fn write_allocation_bytes<'tcx, Prov: Provenance, Extra>(
         if i != line_start {
             write!(w, " ")?;
         }
-        if let Some(&prov) = alloc.relocations().get(&i) {
-            // Memory with a relocation must be defined
+        if let Some(&prov) = alloc.provenance().get(&i) {
+            // Memory with provenance must be defined
             assert!(alloc.init_mask().is_range_initialized(i, i + ptr_size).is_ok());
             let j = i.bytes_usize();
             let offset = alloc
                 .inspect_with_uninit_and_ptr_outside_interpreter(j..j + ptr_size.bytes_usize());
             let offset = read_target_uint(tcx.data_layout.endian, offset).unwrap();
             let offset = Size::from_bytes(offset);
-            let relocation_width = |bytes| bytes * 3;
+            let provenance_width = |bytes| bytes * 3;
             let ptr = Pointer::new(prov, offset);
             let mut target = format!("{:?}", ptr);
-            if target.len() > relocation_width(ptr_size.bytes_usize() - 1) {
+            if target.len() > provenance_width(ptr_size.bytes_usize() - 1) {
                 // This is too long, try to save some space.
                 target = format!("{:#?}", ptr);
             }
             if ((i - line_start) + ptr_size).bytes_usize() > BYTES_PER_LINE {
-                // This branch handles the situation where a relocation starts in the current line
+                // This branch handles the situation where a provenance starts in the current line
                 // but ends in the next one.
                 let remainder = Size::from_bytes(BYTES_PER_LINE) - (i - line_start);
                 let overflow = ptr_size - remainder;
-                let remainder_width = relocation_width(remainder.bytes_usize()) - 2;
-                let overflow_width = relocation_width(overflow.bytes_usize() - 1) + 1;
+                let remainder_width = provenance_width(remainder.bytes_usize()) - 2;
+                let overflow_width = provenance_width(overflow.bytes_usize() - 1) + 1;
                 ascii.push('╾');
                 for _ in 0..remainder.bytes() - 1 {
                     ascii.push('─');
                 }
                 if overflow_width > remainder_width && overflow_width >= target.len() {
-                    // The case where the relocation fits into the part in the next line
+                    // The case where the provenance fits into the part in the next line
                     write!(w, "╾{0:─^1$}", "", remainder_width)?;
                     line_start =
                         write_allocation_newline(w, line_start, &ascii, pos_width, prefix)?;
@@ -921,11 +921,11 @@ fn write_allocation_bytes<'tcx, Prov: Provenance, Extra>(
                 i += ptr_size;
                 continue;
             } else {
-                // This branch handles a relocation that starts and ends in the current line.
-                let relocation_width = relocation_width(ptr_size.bytes_usize() - 1);
-                oversized_ptr(&mut target, relocation_width);
+                // This branch handles a provenance that starts and ends in the current line.
+                let provenance_width = provenance_width(ptr_size.bytes_usize() - 1);
+                oversized_ptr(&mut target, provenance_width);
                 ascii.push('╾');
-                write!(w, "╾{0:─^1$}╼", target, relocation_width)?;
+                write!(w, "╾{0:─^1$}╼", target, provenance_width)?;
                 for _ in 0..ptr_size.bytes() - 2 {
                     ascii.push('─');
                 }
@@ -935,7 +935,7 @@ fn write_allocation_bytes<'tcx, Prov: Provenance, Extra>(
         } else if alloc.init_mask().is_range_initialized(i, i + Size::from_bytes(1)).is_ok() {
             let j = i.bytes_usize();
 
-            // Checked definedness (and thus range) and relocations. This access also doesn't
+            // Checked definedness (and thus range) and provenance. This access also doesn't
             // influence interpreter execution but is only for debugging.
             let c = alloc.inspect_with_uninit_and_ptr_outside_interpreter(j..j + 1)[0];
             write!(w, "{:02x}", c)?;
diff --git a/compiler/rustc_middle/src/ty/impls_ty.rs b/compiler/rustc_middle/src/ty/impls_ty.rs
index cd00b26b8de..d1c0d62ac6e 100644
--- a/compiler/rustc_middle/src/ty/impls_ty.rs
+++ b/compiler/rustc_middle/src/ty/impls_ty.rs
@@ -113,7 +113,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for mir::interpret::AllocId {
 }
 
 // `Relocations` with default type parameters is a sorted map.
-impl<'a, Prov> HashStable<StableHashingContext<'a>> for mir::interpret::Relocations<Prov>
+impl<'a, Prov> HashStable<StableHashingContext<'a>> for mir::interpret::ProvenanceMap<Prov>
 where
     Prov: HashStable<StableHashingContext<'a>>,
 {
diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs
index cc55b7e8611..329478f27b7 100644
--- a/compiler/rustc_middle/src/ty/print/pretty.rs
+++ b/compiler/rustc_middle/src/ty/print/pretty.rs
@@ -1275,7 +1275,7 @@ pub trait PrettyPrinter<'tcx>:
                                     let range =
                                         AllocRange { start: offset, size: Size::from_bytes(len) };
                                     if let Ok(byte_str) =
-                                        alloc.inner().get_bytes(&self.tcx(), range)
+                                        alloc.inner().get_bytes_strip_provenance(&self.tcx(), range)
                                     {
                                         p!(pretty_print_byte_str(byte_str))
                                     } else {
@@ -1536,6 +1536,34 @@ pub trait PrettyPrinter<'tcx>:
         }
         Ok(self)
     }
+
+    fn pretty_closure_as_impl(
+        mut self,
+        closure: ty::ClosureSubsts<'tcx>,
+    ) -> Result<Self::Const, Self::Error> {
+        let sig = closure.sig();
+        let kind = closure.kind_ty().to_opt_closure_kind().unwrap_or(ty::ClosureKind::Fn);
+
+        write!(self, "impl ")?;
+        self.wrap_binder(&sig, |sig, mut cx| {
+            define_scoped_cx!(cx);
+
+            p!(print(kind), "(");
+            for (i, arg) in sig.inputs()[0].tuple_fields().iter().enumerate() {
+                if i > 0 {
+                    p!(", ");
+                }
+                p!(print(arg));
+            }
+            p!(")");
+
+            if !sig.output().is_unit() {
+                p!(" -> ", print(sig.output()));
+            }
+
+            Ok(cx)
+        })
+    }
 }
 
 // HACK(eddyb) boxed to avoid moving around a large struct by-value.
@@ -2450,6 +2478,11 @@ impl<'tcx> ty::PolyTraitPredicate<'tcx> {
     }
 }
 
+#[derive(Debug, Copy, Clone, TypeFoldable, TypeVisitable, Lift)]
+pub struct PrintClosureAsImpl<'tcx> {
+    pub closure: ty::ClosureSubsts<'tcx>,
+}
+
 forward_display_to_print! {
     ty::Region<'tcx>,
     Ty<'tcx>,
@@ -2542,6 +2575,10 @@ define_print_and_forward_display! {
         p!(print(self.0.trait_ref.print_only_trait_path()));
     }
 
+    PrintClosureAsImpl<'tcx> {
+        p!(pretty_closure_as_impl(self.closure))
+    }
+
     ty::ParamTy {
         p!(write("{}", self.name))
     }
diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs
index 0070575f213..80354a3f8a2 100644
--- a/compiler/rustc_middle/src/ty/sty.rs
+++ b/compiler/rustc_middle/src/ty/sty.rs
@@ -325,6 +325,10 @@ impl<'tcx> ClosureSubsts<'tcx> {
             _ => bug!("closure_sig_as_fn_ptr_ty is not a fn-ptr: {:?}", ty.kind()),
         }
     }
+
+    pub fn print_as_impl_trait(self) -> ty::print::PrintClosureAsImpl<'tcx> {
+        ty::print::PrintClosureAsImpl { closure: self }
+    }
 }
 
 /// Similar to `ClosureSubsts`; see the above documentation for more.
diff --git a/compiler/rustc_monomorphize/src/collector.rs b/compiler/rustc_monomorphize/src/collector.rs
index 82ef16a7f72..5f5540495e9 100644
--- a/compiler/rustc_monomorphize/src/collector.rs
+++ b/compiler/rustc_monomorphize/src/collector.rs
@@ -461,7 +461,7 @@ fn collect_items_rec<'tcx>(
             recursion_depth_reset = None;
 
             if let Ok(alloc) = tcx.eval_static_initializer(def_id) {
-                for &id in alloc.inner().relocations().values() {
+                for &id in alloc.inner().provenance().values() {
                     collect_miri(tcx, id, &mut neighbors);
                 }
             }
@@ -1424,7 +1424,7 @@ fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut MonoIte
         }
         GlobalAlloc::Memory(alloc) => {
             trace!("collecting {:?} with {:#?}", alloc_id, alloc);
-            for &inner in alloc.inner().relocations().values() {
+            for &inner in alloc.inner().provenance().values() {
                 rustc_data_structures::stack::ensure_sufficient_stack(|| {
                     collect_miri(tcx, inner, output);
                 });
@@ -1463,7 +1463,7 @@ fn collect_const_value<'tcx>(
     match value {
         ConstValue::Scalar(Scalar::Ptr(ptr, _size)) => collect_miri(tcx, ptr.provenance, output),
         ConstValue::Slice { data: alloc, start: _, end: _ } | ConstValue::ByRef { alloc, .. } => {
-            for &id in alloc.inner().relocations().values() {
+            for &id in alloc.inner().provenance().values() {
                 collect_miri(tcx, id, output);
             }
         }
diff --git a/compiler/rustc_resolve/src/diagnostics.rs b/compiler/rustc_resolve/src/diagnostics.rs
index 25013036d87..2d15b1b0a1b 100644
--- a/compiler/rustc_resolve/src/diagnostics.rs
+++ b/compiler/rustc_resolve/src/diagnostics.rs
@@ -1172,16 +1172,6 @@ impl<'a> Resolver<'a> {
                 Scope::Module(module, _) => {
                     this.add_module_candidates(module, &mut suggestions, filter_fn);
                 }
-                Scope::RegisteredAttrs => {
-                    let res = Res::NonMacroAttr(NonMacroAttrKind::Registered);
-                    if filter_fn(res) {
-                        suggestions.extend(
-                            this.registered_attrs
-                                .iter()
-                                .map(|ident| TypoSuggestion::typo_from_res(ident.name, res)),
-                        );
-                    }
-                }
                 Scope::MacroUsePrelude => {
                     suggestions.extend(this.macro_use_prelude.iter().filter_map(
                         |(name, binding)| {
diff --git a/compiler/rustc_resolve/src/ident.rs b/compiler/rustc_resolve/src/ident.rs
index 23c0ca108d3..2afba94d793 100644
--- a/compiler/rustc_resolve/src/ident.rs
+++ b/compiler/rustc_resolve/src/ident.rs
@@ -127,7 +127,6 @@ impl<'a> Resolver<'a> {
                 }
                 Scope::CrateRoot => true,
                 Scope::Module(..) => true,
-                Scope::RegisteredAttrs => use_prelude,
                 Scope::MacroUsePrelude => use_prelude || rust_2015,
                 Scope::BuiltinAttrs => true,
                 Scope::ExternPrelude => use_prelude || is_absolute_path,
@@ -187,12 +186,11 @@ impl<'a> Resolver<'a> {
                             match ns {
                                 TypeNS => Scope::ExternPrelude,
                                 ValueNS => Scope::StdLibPrelude,
-                                MacroNS => Scope::RegisteredAttrs,
+                                MacroNS => Scope::MacroUsePrelude,
                             }
                         }
                     }
                 }
-                Scope::RegisteredAttrs => Scope::MacroUsePrelude,
                 Scope::MacroUsePrelude => Scope::StdLibPrelude,
                 Scope::BuiltinAttrs => break, // nowhere else to search
                 Scope::ExternPrelude if is_absolute_path => break,
@@ -556,14 +554,6 @@ impl<'a> Resolver<'a> {
                             Err((Determinacy::Determined, _)) => Err(Determinacy::Determined),
                         }
                     }
-                    Scope::RegisteredAttrs => match this.registered_attrs.get(&ident).cloned() {
-                        Some(ident) => ok(
-                            Res::NonMacroAttr(NonMacroAttrKind::Registered),
-                            ident.span,
-                            this.arenas,
-                        ),
-                        None => Err(Determinacy::Determined),
-                    },
                     Scope::MacroUsePrelude => {
                         match this.macro_use_prelude.get(&ident.name).cloned() {
                             Some(binding) => Ok((binding, Flags::MISC_FROM_PRELUDE)),
diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs
index d91a58b13ff..66090c96d1e 100644
--- a/compiler/rustc_resolve/src/lib.rs
+++ b/compiler/rustc_resolve/src/lib.rs
@@ -108,7 +108,6 @@ enum Scope<'a> {
     // The node ID is for reporting the `PROC_MACRO_DERIVE_RESOLUTION_FALLBACK`
     // lint if it should be reported.
     Module(Module<'a>, Option<NodeId>),
-    RegisteredAttrs,
     MacroUsePrelude,
     BuiltinAttrs,
     ExternPrelude,
@@ -976,7 +975,6 @@ pub struct Resolver<'a> {
     /// A small map keeping true kinds of built-in macros that appear to be fn-like on
     /// the surface (`macro` items in libcore), but are actually attributes or derives.
     builtin_macro_kinds: FxHashMap<LocalDefId, MacroKind>,
-    registered_attrs: FxHashSet<Ident>,
     registered_tools: RegisteredTools,
     macro_use_prelude: FxHashMap<Symbol, &'a NameBinding<'a>>,
     macro_map: FxHashMap<DefId, MacroData>,
@@ -1253,8 +1251,7 @@ impl<'a> Resolver<'a> {
             }
         }
 
-        let (registered_attrs, registered_tools) =
-            macros::registered_attrs_and_tools(session, &krate.attrs);
+        let registered_tools = macros::registered_tools(session, &krate.attrs);
 
         let features = session.features_untracked();
 
@@ -1319,7 +1316,6 @@ impl<'a> Resolver<'a> {
             macro_names: FxHashSet::default(),
             builtin_macros: Default::default(),
             builtin_macro_kinds: Default::default(),
-            registered_attrs,
             registered_tools,
             macro_use_prelude: FxHashMap::default(),
             macro_map: FxHashMap::default(),
diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs
index 070fb9c721b..0c428aa6cc0 100644
--- a/compiler/rustc_resolve/src/macros.rs
+++ b/compiler/rustc_resolve/src/macros.rs
@@ -112,47 +112,32 @@ fn fast_print_path(path: &ast::Path) -> Symbol {
     }
 }
 
-/// The code common between processing `#![register_tool]` and `#![register_attr]`.
-fn registered_idents(
-    sess: &Session,
-    attrs: &[ast::Attribute],
-    attr_name: Symbol,
-    descr: &str,
-) -> FxHashSet<Ident> {
-    let mut registered = FxHashSet::default();
-    for attr in sess.filter_by_name(attrs, attr_name) {
+pub(crate) fn registered_tools(sess: &Session, attrs: &[ast::Attribute]) -> FxHashSet<Ident> {
+    let mut registered_tools = FxHashSet::default();
+    for attr in sess.filter_by_name(attrs, sym::register_tool) {
         for nested_meta in attr.meta_item_list().unwrap_or_default() {
             match nested_meta.ident() {
                 Some(ident) => {
-                    if let Some(old_ident) = registered.replace(ident) {
-                        let msg = format!("{} `{}` was already registered", descr, ident);
+                    if let Some(old_ident) = registered_tools.replace(ident) {
+                        let msg = format!("{} `{}` was already registered", "tool", ident);
                         sess.struct_span_err(ident.span, &msg)
                             .span_label(old_ident.span, "already registered here")
                             .emit();
                     }
                 }
                 None => {
-                    let msg = format!("`{}` only accepts identifiers", attr_name);
+                    let msg = format!("`{}` only accepts identifiers", sym::register_tool);
                     let span = nested_meta.span();
                     sess.struct_span_err(span, &msg).span_label(span, "not an identifier").emit();
                 }
             }
         }
     }
-    registered
-}
-
-pub(crate) fn registered_attrs_and_tools(
-    sess: &Session,
-    attrs: &[ast::Attribute],
-) -> (FxHashSet<Ident>, FxHashSet<Ident>) {
-    let registered_attrs = registered_idents(sess, attrs, sym::register_attr, "attribute");
-    let mut registered_tools = registered_idents(sess, attrs, sym::register_tool, "tool");
     // We implicitly add `rustfmt` and `clippy` to known tools,
     // but it's not an error to register them explicitly.
     let predefined_tools = [sym::clippy, sym::rustfmt];
     registered_tools.extend(predefined_tools.iter().cloned().map(Ident::with_dummy_span));
-    (registered_attrs, registered_tools)
+    registered_tools
 }
 
 // Some feature gates for inner attributes are reported as lints for backward compatibility.
diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs
index a93f9ec0397..54f01577c5e 100644
--- a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs
+++ b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs
@@ -690,13 +690,17 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
                 real_trait_pred = parent_trait_pred;
             }
 
-            // Skipping binder here, remapping below
-            let real_ty = real_trait_pred.self_ty().skip_binder();
-            if self.can_eq(obligation.param_env, real_ty, arg_ty).is_err() {
+            let real_ty = real_trait_pred.self_ty();
+            // We `erase_late_bound_regions` here because `make_subregion` does not handle
+            // `ReLateBound`, and we don't particularly care about the regions.
+            if self
+                .can_eq(obligation.param_env, self.tcx.erase_late_bound_regions(real_ty), arg_ty)
+                .is_err()
+            {
                 continue;
             }
 
-            if let ty::Ref(region, base_ty, mutbl) = *real_ty.kind() {
+            if let ty::Ref(region, base_ty, mutbl) = *real_ty.skip_binder().kind() {
                 let mut autoderef = Autoderef::new(
                     self,
                     obligation.param_env,
diff --git a/compiler/rustc_typeck/src/check/fn_ctxt/suggestions.rs b/compiler/rustc_typeck/src/check/fn_ctxt/suggestions.rs
index 57771e0969b..64d261285c5 100644
--- a/compiler/rustc_typeck/src/check/fn_ctxt/suggestions.rs
+++ b/compiler/rustc_typeck/src/check/fn_ctxt/suggestions.rs
@@ -506,30 +506,30 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             self.resolve_numeric_literals_with_default(self.resolve_vars_if_possible(found));
         // Only suggest changing the return type for methods that
         // haven't set a return type at all (and aren't `fn main()` or an impl).
-        match (
-            &fn_decl.output,
-            found.is_suggestable(self.tcx, false),
-            can_suggest,
-            expected.is_unit(),
-        ) {
-            (&hir::FnRetTy::DefaultReturn(span), true, true, true) => {
-                err.subdiagnostic(AddReturnTypeSuggestion::Add { span, found });
-                true
-            }
-            (&hir::FnRetTy::DefaultReturn(span), false, true, true) => {
-                // FIXME: if `found` could be `impl Iterator` or `impl Fn*`, we should suggest
-                // that.
-                err.subdiagnostic(AddReturnTypeSuggestion::MissingHere { span });
-                true
-            }
-            (&hir::FnRetTy::DefaultReturn(span), _, false, true) => {
+        match &fn_decl.output {
+            &hir::FnRetTy::DefaultReturn(span) if expected.is_unit() && !can_suggest => {
                 // `fn main()` must return `()`, do not suggest changing return type
                 err.subdiagnostic(ExpectedReturnTypeLabel::Unit { span });
-                true
+                return true;
             }
-            // expectation was caused by something else, not the default return
-            (&hir::FnRetTy::DefaultReturn(_), _, _, false) => false,
-            (&hir::FnRetTy::Return(ref ty), _, _, _) => {
+            &hir::FnRetTy::DefaultReturn(span) if expected.is_unit() => {
+                if found.is_suggestable(self.tcx, false) {
+                    err.subdiagnostic(AddReturnTypeSuggestion::Add { span, found: found.to_string() });
+                    return true;
+                } else if let ty::Closure(_, substs) = found.kind()
+                    // FIXME(compiler-errors): Get better at printing binders...
+                    && let closure = substs.as_closure()
+                    && closure.sig().is_suggestable(self.tcx, false)
+                {
+                    err.subdiagnostic(AddReturnTypeSuggestion::Add { span, found: closure.print_as_impl_trait().to_string() });
+                    return true;
+                } else {
+                    // FIXME: if `found` could be `impl Iterator` we should suggest that.
+                    err.subdiagnostic(AddReturnTypeSuggestion::MissingHere { span });
+                    return true
+                }
+            }
+            &hir::FnRetTy::Return(ref ty) => {
                 // Only point to return type if the expected type is the return type, as if they
                 // are not, the expectation must have been caused by something else.
                 debug!("suggest_missing_return_type: return type {:?} node {:?}", ty, ty.kind);
@@ -546,9 +546,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                     self.try_suggest_return_impl_trait(err, expected, ty, fn_id);
                     return true;
                 }
-                false
             }
+            _ => {}
         }
+        false
     }
 
     /// check whether the return type is a generic type with a trait bound
diff --git a/compiler/rustc_typeck/src/check/mod.rs b/compiler/rustc_typeck/src/check/mod.rs
index fb675212e3f..f8d839b6483 100644
--- a/compiler/rustc_typeck/src/check/mod.rs
+++ b/compiler/rustc_typeck/src/check/mod.rs
@@ -542,13 +542,13 @@ fn maybe_check_static_with_link_section(tcx: TyCtxt<'_>, id: LocalDefId) {
     // For the wasm32 target statics with `#[link_section]` are placed into custom
     // sections of the final output file, but this isn't link custom sections of
     // other executable formats. Namely we can only embed a list of bytes,
-    // nothing with pointers to anything else or relocations. If any relocation
-    // show up, reject them here.
+    // nothing with provenance (pointers to anything else). If any provenance
+    // show up, reject it here.
     // `#[link_section]` may contain arbitrary, or even undefined bytes, but it is
     // the consumer's responsibility to ensure all bytes that have been read
     // have defined values.
     if let Ok(alloc) = tcx.eval_static_initializer(id.to_def_id())
-        && alloc.inner().relocations().len() != 0
+        && alloc.inner().provenance().len() != 0
     {
         let msg = "statics with a custom `#[link_section]` must be a \
                         simple list of bytes on the wasm target with no \
diff --git a/compiler/rustc_typeck/src/errors.rs b/compiler/rustc_typeck/src/errors.rs
index 2214fc2ced8..14c0558cdde 100644
--- a/compiler/rustc_typeck/src/errors.rs
+++ b/compiler/rustc_typeck/src/errors.rs
@@ -195,7 +195,7 @@ pub struct AddressOfTemporaryTaken {
 }
 
 #[derive(SessionSubdiagnostic)]
-pub enum AddReturnTypeSuggestion<'tcx> {
+pub enum AddReturnTypeSuggestion {
     #[suggestion(
         typeck::add_return_type_add,
         code = "-> {found} ",
@@ -204,7 +204,7 @@ pub enum AddReturnTypeSuggestion<'tcx> {
     Add {
         #[primary_span]
         span: Span,
-        found: Ty<'tcx>,
+        found: String,
     },
     #[suggestion(
         typeck::add_return_type_missing_here,