about summary refs log tree commit diff
path: root/compiler
diff options
context:
space:
mode:
Diffstat (limited to 'compiler')
-rw-r--r--compiler/rustc_ast_passes/src/ast_validation.rs9
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/move_errors.rs24
-rw-r--r--compiler/rustc_codegen_cranelift/patches/0022-sysroot-Disable-not-compiling-tests.patch20
-rw-r--r--compiler/rustc_codegen_llvm/src/llvm_util.rs10
-rw-r--r--compiler/rustc_codegen_ssa/src/target_features.rs5
-rw-r--r--compiler/rustc_const_eval/src/interpret/eval_context.rs22
-rw-r--r--compiler/rustc_const_eval/src/interpret/memory.rs20
-rw-r--r--compiler/rustc_const_eval/src/interpret/validity.rs5
-rw-r--r--compiler/rustc_const_eval/src/transform/check_consts/qualifs.rs16
-rw-r--r--compiler/rustc_expand/src/lib.rs1
-rw-r--r--compiler/rustc_expand/src/mbe.rs88
-rw-r--r--compiler/rustc_expand/src/mbe/macro_check.rs16
-rw-r--r--compiler/rustc_expand/src/mbe/macro_parser.rs689
-rw-r--r--compiler/rustc_expand/src/mbe/macro_rules.rs86
-rw-r--r--compiler/rustc_expand/src/mbe/quoted.rs30
-rw-r--r--compiler/rustc_expand/src/mbe/transcribe.rs36
-rw-r--r--compiler/rustc_feature/src/active.rs2
-rw-r--r--compiler/rustc_hir/src/lang_items.rs1
-rw-r--r--compiler/rustc_lint/src/traits.rs4
-rw-r--r--compiler/rustc_middle/src/lib.rs1
-rw-r--r--compiler/rustc_middle/src/traits/mod.rs16
-rw-r--r--compiler/rustc_middle/src/traits/select.rs4
-rw-r--r--compiler/rustc_middle/src/traits/structural_impls.rs6
-rw-r--r--compiler/rustc_middle/src/ty/mod.rs1
-rw-r--r--compiler/rustc_middle/src/ty/print/pretty.rs36
-rw-r--r--compiler/rustc_mir_transform/src/const_prop.rs8
-rw-r--r--compiler/rustc_mir_transform/src/early_otherwise_branch.rs33
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs11
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs4
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs34
-rw-r--r--compiler/rustc_parse/src/parser/path.rs12
-rw-r--r--compiler/rustc_passes/src/check_attr.rs24
-rw-r--r--compiler/rustc_passes/src/check_const.rs4
-rw-r--r--compiler/rustc_passes/src/diagnostic_items.rs7
-rw-r--r--compiler/rustc_passes/src/entry.rs52
-rw-r--r--compiler/rustc_passes/src/intrinsicck.rs52
-rw-r--r--compiler/rustc_resolve/src/late/lifetimes.rs25
-rw-r--r--compiler/rustc_span/src/symbol.rs1
-rw-r--r--compiler/rustc_target/src/asm/aarch64.rs2
-rw-r--r--compiler/rustc_trait_selection/src/traits/misc.rs4
-rw-r--r--compiler/rustc_trait_selection/src/traits/project.rs4
-rw-r--r--compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs32
-rw-r--r--compiler/rustc_trait_selection/src/traits/select/confirmation.rs64
-rw-r--r--compiler/rustc_trait_selection/src/traits/select/mod.rs12
-rw-r--r--compiler/rustc_ty_utils/src/instance.rs2
-rw-r--r--compiler/rustc_typeck/src/check/op.rs21
-rw-r--r--compiler/rustc_typeck/src/check/pat.rs6
-rw-r--r--compiler/rustc_typeck/src/coherence/builtin.rs36
48 files changed, 830 insertions, 768 deletions
diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs
index f5e6b15fcbf..8ba6a914c4a 100644
--- a/compiler/rustc_ast_passes/src/ast_validation.rs
+++ b/compiler/rustc_ast_passes/src/ast_validation.rs
@@ -471,10 +471,17 @@ impl<'a> AstValidator<'a> {
     }
 
     fn error_item_without_body(&self, sp: Span, ctx: &str, msg: &str, sugg: &str) {
+        let source_map = self.session.source_map();
+        let end = source_map.end_point(sp);
+        let replace_span = if source_map.span_to_snippet(end).map(|s| s == ";").unwrap_or(false) {
+            end
+        } else {
+            sp.shrink_to_hi()
+        };
         self.err_handler()
             .struct_span_err(sp, msg)
             .span_suggestion(
-                self.session.source_map().end_point(sp),
+                replace_span,
                 &format!("provide a definition for the {}", ctx),
                 sugg.to_string(),
                 Applicability::HasPlaceholders,
diff --git a/compiler/rustc_borrowck/src/diagnostics/move_errors.rs b/compiler/rustc_borrowck/src/diagnostics/move_errors.rs
index 48a70abc0b6..a58c788eb57 100644
--- a/compiler/rustc_borrowck/src/diagnostics/move_errors.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/move_errors.rs
@@ -218,18 +218,29 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
 
     fn report(&mut self, error: GroupedMoveError<'tcx>) {
         let (mut err, err_span) = {
-            let (span, use_spans, original_path, kind): (
+            let (span, use_spans, original_path, kind, has_complex_bindings): (
                 Span,
                 Option<UseSpans<'tcx>>,
                 Place<'tcx>,
                 &IllegalMoveOriginKind<'_>,
+                bool,
             ) = match error {
-                GroupedMoveError::MovesFromPlace { span, original_path, ref kind, .. }
-                | GroupedMoveError::MovesFromValue { span, original_path, ref kind, .. } => {
-                    (span, None, original_path, kind)
+                GroupedMoveError::MovesFromPlace {
+                    span,
+                    original_path,
+                    ref kind,
+                    ref binds_to,
+                    ..
                 }
+                | GroupedMoveError::MovesFromValue {
+                    span,
+                    original_path,
+                    ref kind,
+                    ref binds_to,
+                    ..
+                } => (span, None, original_path, kind, !binds_to.is_empty()),
                 GroupedMoveError::OtherIllegalMove { use_spans, original_path, ref kind } => {
-                    (use_spans.args_or_use(), Some(use_spans), original_path, kind)
+                    (use_spans.args_or_use(), Some(use_spans), original_path, kind, false)
                 }
             };
             debug!(
@@ -248,6 +259,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
                             target_place,
                             span,
                             use_spans,
+                            has_complex_bindings,
                         ),
                     &IllegalMoveOriginKind::InteriorOfTypeWithDestructor { container_ty: ty } => {
                         self.cannot_move_out_of_interior_of_drop(span, ty)
@@ -290,6 +302,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
         deref_target_place: Place<'tcx>,
         span: Span,
         use_spans: Option<UseSpans<'tcx>>,
+        has_complex_bindings: bool,
     ) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
         // Inspect the type of the content behind the
         // borrow to provide feedback about why this
@@ -399,6 +412,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
         let diag_name = self.infcx.tcx.get_diagnostic_name(def_id);
         if matches!(diag_name, Some(sym::Option | sym::Result))
             && use_spans.map_or(true, |v| !v.for_closure())
+            && !has_complex_bindings
         {
             err.span_suggestion_verbose(
                 span.shrink_to_hi(),
diff --git a/compiler/rustc_codegen_cranelift/patches/0022-sysroot-Disable-not-compiling-tests.patch b/compiler/rustc_codegen_cranelift/patches/0022-sysroot-Disable-not-compiling-tests.patch
index 108a97bd7c6..8d9ee3f25c4 100644
--- a/compiler/rustc_codegen_cranelift/patches/0022-sysroot-Disable-not-compiling-tests.patch
+++ b/compiler/rustc_codegen_cranelift/patches/0022-sysroot-Disable-not-compiling-tests.patch
@@ -30,25 +30,5 @@ index 0000000..46fd999
 +
 +[dependencies]
 +rand = "0.7"
-diff --git a/library/core/tests/ptr.rs b/library/core/tests/ptr.rs
-index 1a6be3a..42dbd59 100644
---- a/library/core/tests/ptr.rs
-+++ b/library/core/tests/ptr.rs
-@@ -250,6 +250,7 @@ fn test_unsized_nonnull() {
-     };
- }
- 
-+/*
- #[test]
- #[allow(warnings)]
- // Have a symbol for the test below. It doesn’t need to be an actual variadic function, match the
-@@ -277,6 +277,7 @@ pub fn test_variadic_fnptr() {
-     let mut s = SipHasher::new();
-     assert_eq!(p.hash(&mut s), q.hash(&mut s));
- }
-+*/
- 
- #[test]
- fn write_unaligned_drop() {
 --
 2.21.0 (Apple Git-122)
diff --git a/compiler/rustc_codegen_llvm/src/llvm_util.rs b/compiler/rustc_codegen_llvm/src/llvm_util.rs
index 3ce594b945a..abcdb81c0e2 100644
--- a/compiler/rustc_codegen_llvm/src/llvm_util.rs
+++ b/compiler/rustc_codegen_llvm/src/llvm_util.rs
@@ -187,7 +187,6 @@ pub fn to_llvm_features<'a>(sess: &Session, s: &'a str) -> SmallVec<[&'a str; 2]
         ("x86", "avx512vaes") => smallvec!["vaes"],
         ("x86", "avx512gfni") => smallvec!["gfni"],
         ("x86", "avx512vpclmulqdq") => smallvec!["vpclmulqdq"],
-        ("aarch64", "fp") => smallvec!["fp-armv8"],
         ("aarch64", "rcpc2") => smallvec!["rcpc-immo"],
         ("aarch64", "dpb") => smallvec!["ccpp"],
         ("aarch64", "dpb2") => smallvec!["ccdp"],
@@ -230,6 +229,8 @@ pub fn check_tied_features(
     None
 }
 
+// Used to generate cfg variables and apply features
+// Must express features in the way Rust understands them
 pub fn target_features(sess: &Session) -> Vec<Symbol> {
     let target_machine = create_informational_target_machine(sess);
     let mut features: Vec<Symbol> =
@@ -239,13 +240,14 @@ pub fn target_features(sess: &Session) -> Vec<Symbol> {
                 if sess.is_nightly_build() || gate.is_none() { Some(feature) } else { None }
             })
             .filter(|feature| {
+                // check that all features in a given smallvec are enabled
                 for llvm_feature in to_llvm_features(sess, feature) {
                     let cstr = SmallCStr::new(llvm_feature);
-                    if unsafe { llvm::LLVMRustHasFeature(target_machine, cstr.as_ptr()) } {
-                        return true;
+                    if !unsafe { llvm::LLVMRustHasFeature(target_machine, cstr.as_ptr()) } {
+                        return false;
                     }
                 }
-                false
+                true
             })
             .map(|feature| Symbol::intern(feature))
             .collect();
diff --git a/compiler/rustc_codegen_ssa/src/target_features.rs b/compiler/rustc_codegen_ssa/src/target_features.rs
index dd214d96166..ba1e1862227 100644
--- a/compiler/rustc_codegen_ssa/src/target_features.rs
+++ b/compiler/rustc_codegen_ssa/src/target_features.rs
@@ -43,10 +43,8 @@ const ARM_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
 ];
 
 const AARCH64_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
-    // FEAT_AdvSimd
+    // FEAT_AdvSimd & FEAT_FP
     ("neon", None),
-    // FEAT_FP
-    ("fp", None),
     // FEAT_FP16
     ("fp16", None),
     // FEAT_SVE
@@ -143,7 +141,6 @@ const AARCH64_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
 ];
 
 const AARCH64_TIED_FEATURES: &[&[&str]] = &[
-    &["fp", "neon"],   // Silicon always has both, so avoid needless complications
     &["paca", "pacg"], // Together these represent `pauth` in LLVM
 ];
 
diff --git a/compiler/rustc_const_eval/src/interpret/eval_context.rs b/compiler/rustc_const_eval/src/interpret/eval_context.rs
index a8a57e6990f..d78c7a9fad9 100644
--- a/compiler/rustc_const_eval/src/interpret/eval_context.rs
+++ b/compiler/rustc_const_eval/src/interpret/eval_context.rs
@@ -177,11 +177,10 @@ pub struct LocalState<'tcx, Tag: Provenance = AllocId> {
 pub enum LocalValue<Tag: Provenance = AllocId> {
     /// This local is not currently alive, and cannot be used at all.
     Dead,
-    /// This local is alive but not yet initialized. It can be written to
-    /// but not read from or its address taken. Locals get initialized on
-    /// first write because for unsized locals, we do not know their size
-    /// before that.
-    Uninitialized,
+    /// This local is alive but not yet allocated. It cannot be read from or have its address taken,
+    /// and will be allocated on the first write. This is to support unsized locals, where we cannot
+    /// know their size in advance.
+    Unallocated,
     /// A normal, live local.
     /// Mostly for convenience, we re-use the `Operand` type here.
     /// This is an optimization over just always having a pointer here;
@@ -198,7 +197,7 @@ impl<'tcx, Tag: Provenance + 'static> LocalState<'tcx, Tag> {
     pub fn access(&self) -> InterpResult<'tcx, Operand<Tag>> {
         match self.value {
             LocalValue::Dead => throw_ub!(DeadLocal),
-            LocalValue::Uninitialized => {
+            LocalValue::Unallocated => {
                 bug!("The type checker should prevent reading from a never-written local")
             }
             LocalValue::Live(val) => Ok(val),
@@ -216,8 +215,7 @@ impl<'tcx, Tag: Provenance + 'static> LocalState<'tcx, Tag> {
         match self.value {
             LocalValue::Dead => throw_ub!(DeadLocal),
             LocalValue::Live(Operand::Indirect(mplace)) => Ok(Err(mplace)),
-            ref mut
-            local @ (LocalValue::Live(Operand::Immediate(_)) | LocalValue::Uninitialized) => {
+            ref mut local @ (LocalValue::Live(Operand::Immediate(_)) | LocalValue::Unallocated) => {
                 Ok(Ok(local))
             }
         }
@@ -752,8 +750,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
             })?;
         }
 
-        // Locals are initially uninitialized.
-        let dummy = LocalState { value: LocalValue::Uninitialized, layout: Cell::new(None) };
+        // Locals are initially unallocated.
+        let dummy = LocalState { value: LocalValue::Unallocated, layout: Cell::new(None) };
         let mut locals = IndexVec::from_elem(dummy, &body.local_decls);
 
         // Now mark those locals as dead that we do not want to initialize
@@ -921,7 +919,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
         assert!(local != mir::RETURN_PLACE, "Cannot make return place live");
         trace!("{:?} is now live", local);
 
-        let local_val = LocalValue::Uninitialized;
+        let local_val = LocalValue::Unallocated;
         // StorageLive expects the local to be dead, and marks it live.
         let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val);
         if !matches!(old, LocalValue::Dead) {
@@ -1025,7 +1023,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> std::fmt::Debug
 
                 match self.ecx.stack()[frame].locals[local].value {
                     LocalValue::Dead => write!(fmt, " is dead")?,
-                    LocalValue::Uninitialized => write!(fmt, " is uninitialized")?,
+                    LocalValue::Unallocated => write!(fmt, " is unallocated")?,
                     LocalValue::Live(Operand::Indirect(mplace)) => {
                         write!(
                             fmt,
diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs
index 6397fcaaf8d..871b7578abd 100644
--- a/compiler/rustc_const_eval/src/interpret/memory.rs
+++ b/compiler/rustc_const_eval/src/interpret/memory.rs
@@ -427,22 +427,12 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
             }
         }
 
-        // Extract from the pointer an `Option<AllocId>` and an offset, which is relative to the
-        // allocation or (if that is `None`) an absolute address.
-        let ptr_or_addr = if size.bytes() == 0 {
-            // Let's see what we can do, but don't throw errors if there's nothing there.
-            self.ptr_try_get_alloc(ptr)
-        } else {
-            // A "real" access, we insist on getting an `AllocId`.
-            Ok(self.ptr_get_alloc(ptr)?)
-        };
-        Ok(match ptr_or_addr {
+        Ok(match self.ptr_try_get_alloc(ptr) {
             Err(addr) => {
-                // No memory is actually being accessed.
-                debug_assert!(size.bytes() == 0);
-                // Must be non-null.
-                if addr == 0 {
-                    throw_ub!(DanglingIntPointer(0, msg))
+                // We couldn't get a proper allocation. This is only okay if the access size is 0,
+                // and the address is not null.
+                if size.bytes() > 0 || addr == 0 {
+                    throw_ub!(DanglingIntPointer(addr, msg));
                 }
                 // Must be aligned.
                 if let Some(align) = align {
diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs
index 8fad5e63baf..8bdafa87623 100644
--- a/compiler/rustc_const_eval/src/interpret/validity.rs
+++ b/compiler/rustc_const_eval/src/interpret/validity.rs
@@ -432,9 +432,8 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
         if let Some(ref mut ref_tracking) = self.ref_tracking {
             // Proceed recursively even for ZST, no reason to skip them!
             // `!` is a ZST and we want to validate it.
-            // Skip validation entirely for some external statics
             if let Ok((alloc_id, _offset, _ptr)) = self.ecx.memory.ptr_try_get_alloc(place.ptr) {
-                // not a ZST
+                // Special handling for pointers to statics (irrespective of their type).
                 let alloc_kind = self.ecx.tcx.get_global_alloc(alloc_id);
                 if let Some(GlobalAlloc::Static(did)) = alloc_kind {
                     assert!(!self.ecx.tcx.is_thread_local_static(did));
@@ -469,7 +468,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
                 // We need to clone the path anyway, make sure it gets created
                 // with enough space for the additional `Deref`.
                 let mut new_path = Vec::with_capacity(path.len() + 1);
-                new_path.clone_from(path);
+                new_path.extend(path);
                 new_path.push(PathElem::Deref);
                 new_path
             });
diff --git a/compiler/rustc_const_eval/src/transform/check_consts/qualifs.rs b/compiler/rustc_const_eval/src/transform/check_consts/qualifs.rs
index 4ea21b4d06d..1e02129855e 100644
--- a/compiler/rustc_const_eval/src/transform/check_consts/qualifs.rs
+++ b/compiler/rustc_const_eval/src/transform/check_consts/qualifs.rs
@@ -3,6 +3,7 @@
 //! See the `Qualif` trait for more info.
 
 use rustc_errors::ErrorGuaranteed;
+use rustc_hir::LangItem;
 use rustc_infer::infer::TyCtxtInferExt;
 use rustc_infer::traits::TraitEngine;
 use rustc_middle::mir::*;
@@ -152,18 +153,14 @@ impl Qualif for NeedsNonConstDrop {
             return false;
         }
 
-        let Some(drop_trait) = cx.tcx.lang_items().drop_trait() else {
-            // there is no way to define a type that needs non-const drop
-            // without having the lang item present.
-            return false;
-        };
+        let destruct = cx.tcx.require_lang_item(LangItem::Destruct, None);
 
         let obligation = Obligation::new(
             ObligationCause::dummy(),
             cx.param_env,
             ty::Binder::dummy(ty::TraitPredicate {
                 trait_ref: ty::TraitRef {
-                    def_id: drop_trait,
+                    def_id: destruct,
                     substs: cx.tcx.mk_substs_trait(ty, &[]),
                 },
                 constness: ty::BoundConstness::ConstIfConst,
@@ -174,15 +171,16 @@ impl Qualif for NeedsNonConstDrop {
         cx.tcx.infer_ctxt().enter(|infcx| {
             let mut selcx = SelectionContext::new(&infcx);
             let Some(impl_src) = selcx.select(&obligation).ok().flatten() else {
-                // If we couldn't select a const drop candidate, then it's bad
+                // If we couldn't select a const destruct candidate, then it's bad
                 return true;
             };
 
             if !matches!(
                 impl_src,
-                ImplSource::ConstDrop(_) | ImplSource::Param(_, ty::BoundConstness::ConstIfConst)
+                ImplSource::ConstDestruct(_)
+                    | ImplSource::Param(_, ty::BoundConstness::ConstIfConst)
             ) {
-                // If our const drop candidate is not ConstDrop or implied by the param env,
+                // If our const destruct candidate is not ConstDestruct or implied by the param env,
                 // then it's bad
                 return true;
             }
diff --git a/compiler/rustc_expand/src/lib.rs b/compiler/rustc_expand/src/lib.rs
index 8a9efe01368..14b3f720f83 100644
--- a/compiler/rustc_expand/src/lib.rs
+++ b/compiler/rustc_expand/src/lib.rs
@@ -1,5 +1,6 @@
 #![feature(associated_type_bounds)]
 #![feature(associated_type_defaults)]
+#![feature(box_syntax)]
 #![feature(crate_visibility_modifier)]
 #![feature(decl_macro)]
 #![feature(if_let_guard)]
diff --git a/compiler/rustc_expand/src/mbe.rs b/compiler/rustc_expand/src/mbe.rs
index 3d4c77aba73..0f7a8a9ed75 100644
--- a/compiler/rustc_expand/src/mbe.rs
+++ b/compiler/rustc_expand/src/mbe.rs
@@ -17,23 +17,48 @@ use rustc_data_structures::sync::Lrc;
 use rustc_span::symbol::Ident;
 use rustc_span::Span;
 
-/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
-/// that the delimiter itself might be `NoDelim`.
+/// Contains the sub-token-trees of a "delimited" token tree such as `(a b c)`. The delimiter itself
+/// might be `NoDelim`.
 #[derive(Clone, PartialEq, Encodable, Decodable, Debug)]
 struct Delimited {
     delim: token::DelimToken,
-    tts: Vec<TokenTree>,
+    /// Note: This contains the opening and closing delimiters tokens (e.g. `(` and `)`). Note that
+    /// these could be `NoDelim`. These token kinds must match `delim`, and the methods below
+    /// debug_assert this.
+    all_tts: Vec<TokenTree>,
 }
 
 impl Delimited {
-    /// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
-    fn open_tt(&self, span: DelimSpan) -> TokenTree {
-        TokenTree::token(token::OpenDelim(self.delim), span.open)
+    /// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter. Panics if
+    /// the delimiter is `NoDelim`.
+    fn open_tt(&self) -> &TokenTree {
+        let tt = self.all_tts.first().unwrap();
+        debug_assert!(matches!(
+            tt,
+            &TokenTree::Token(token::Token { kind: token::OpenDelim(d), .. }) if d == self.delim
+        ));
+        tt
+    }
+
+    /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter. Panics if
+    /// the delimeter is `NoDelim`.
+    fn close_tt(&self) -> &TokenTree {
+        let tt = self.all_tts.last().unwrap();
+        debug_assert!(matches!(
+            tt,
+            &TokenTree::Token(token::Token { kind: token::CloseDelim(d), .. }) if d == self.delim
+        ));
+        tt
     }
 
-    /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
-    fn close_tt(&self, span: DelimSpan) -> TokenTree {
-        TokenTree::token(token::CloseDelim(self.delim), span.close)
+    /// Returns the tts excluding the outer delimiters.
+    ///
+    /// FIXME: #67062 has details about why this is sub-optimal.
+    fn inner_tts(&self) -> &[TokenTree] {
+        // These functions are called for the assertions within them.
+        let _open_tt = self.open_tt();
+        let _close_tt = self.close_tt();
+        &self.all_tts[1..self.all_tts.len() - 1]
     }
 }
 
@@ -73,35 +98,24 @@ enum KleeneOp {
     ZeroOrOne,
 }
 
-/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, `$(...)`,
-/// and `${...}` are "first-class" token trees. Useful for parsing macros.
+/// Similar to `tokenstream::TokenTree`, except that `Sequence`, `MetaVar`, `MetaVarDecl`, and
+/// `MetaVarExpr` are "first-class" token trees. Useful for parsing macros.
 #[derive(Debug, Clone, PartialEq, Encodable, Decodable)]
 enum TokenTree {
     Token(Token),
+    /// A delimited sequence, e.g. `($e:expr)` (RHS) or `{ $e }` (LHS).
     Delimited(DelimSpan, Lrc<Delimited>),
-    /// A kleene-style repetition sequence
+    /// A kleene-style repetition sequence, e.g. `$($e:expr)*` (RHS) or `$($e),*` (LHS).
     Sequence(DelimSpan, Lrc<SequenceRepetition>),
-    /// e.g., `$var`
+    /// e.g., `$var`.
     MetaVar(Span, Ident),
-    /// e.g., `$var:expr`. This is only used in the left hand side of MBE macros.
+    /// e.g., `$var:expr`. Only appears on the LHS.
     MetaVarDecl(Span, Ident /* name to bind */, Option<NonterminalKind>),
-    /// A meta-variable expression inside `${...}`
+    /// A meta-variable expression inside `${...}`.
     MetaVarExpr(DelimSpan, MetaVarExpr),
 }
 
 impl TokenTree {
-    /// Return the number of tokens in the tree.
-    fn len(&self) -> usize {
-        match *self {
-            TokenTree::Delimited(_, ref delimed) => match delimed.delim {
-                token::NoDelim => delimed.tts.len(),
-                _ => delimed.tts.len() + 2,
-            },
-            TokenTree::Sequence(_, ref seq) => seq.tts.len(),
-            _ => 0,
-        }
-    }
-
     /// Returns `true` if the given token tree is delimited.
     fn is_delimited(&self) -> bool {
         matches!(*self, TokenTree::Delimited(..))
@@ -115,26 +129,6 @@ impl TokenTree {
         }
     }
 
-    /// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
-    fn get_tt(&self, index: usize) -> TokenTree {
-        match (self, index) {
-            (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
-                delimed.tts[index].clone()
-            }
-            (&TokenTree::Delimited(span, ref delimed), _) => {
-                if index == 0 {
-                    return delimed.open_tt(span);
-                }
-                if index == delimed.tts.len() + 1 {
-                    return delimed.close_tt(span);
-                }
-                delimed.tts[index - 1].clone()
-            }
-            (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
-            _ => panic!("Cannot expand a token tree"),
-        }
-    }
-
     /// Retrieves the `TokenTree`'s span.
     fn span(&self) -> Span {
         match *self {
diff --git a/compiler/rustc_expand/src/mbe/macro_check.rs b/compiler/rustc_expand/src/mbe/macro_check.rs
index f18cf95a2bf..b55a40c5b2c 100644
--- a/compiler/rustc_expand/src/mbe/macro_check.rs
+++ b/compiler/rustc_expand/src/mbe/macro_check.rs
@@ -281,7 +281,7 @@ fn check_binders(
         // `MetaVarExpr` can not appear in the LHS of a macro arm
         TokenTree::MetaVarExpr(..) => {}
         TokenTree::Delimited(_, ref del) => {
-            for tt in &del.tts {
+            for tt in del.inner_tts() {
                 check_binders(sess, node_id, tt, macros, binders, ops, valid);
             }
         }
@@ -344,7 +344,7 @@ fn check_occurrences(
             check_ops_is_prefix(sess, node_id, macros, binders, ops, dl.entire(), name);
         }
         TokenTree::Delimited(_, ref del) => {
-            check_nested_occurrences(sess, node_id, &del.tts, macros, binders, ops, valid);
+            check_nested_occurrences(sess, node_id, del.inner_tts(), macros, binders, ops, valid);
         }
         TokenTree::Sequence(_, ref seq) => {
             let ops = ops.push(seq.kleene);
@@ -431,14 +431,20 @@ fn check_nested_occurrences(
             {
                 let macro_rules = state == NestedMacroState::MacroRulesNotName;
                 state = NestedMacroState::Empty;
-                let rest =
-                    check_nested_macro(sess, node_id, macro_rules, &del.tts, &nested_macros, valid);
+                let rest = check_nested_macro(
+                    sess,
+                    node_id,
+                    macro_rules,
+                    del.inner_tts(),
+                    &nested_macros,
+                    valid,
+                );
                 // If we did not check the whole macro definition, then check the rest as if outside
                 // the macro definition.
                 check_nested_occurrences(
                     sess,
                     node_id,
-                    &del.tts[rest..],
+                    &del.inner_tts()[rest..],
                     macros,
                     binders,
                     ops,
diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs
index 8cc81f1eca8..d8071bf159a 100644
--- a/compiler/rustc_expand/src/mbe/macro_parser.rs
+++ b/compiler/rustc_expand/src/mbe/macro_parser.rs
@@ -72,9 +72,8 @@
 
 crate use NamedMatch::*;
 crate use ParseResult::*;
-use TokenTreeOrTokenTreeSlice::*;
 
-use crate::mbe::{self, DelimSpan, SequenceRepetition, TokenTree};
+use crate::mbe::{self, SequenceRepetition, TokenTree};
 
 use rustc_ast::token::{self, DocComment, Nonterminal, Token};
 use rustc_parse::parser::Parser;
@@ -89,36 +88,6 @@ use rustc_span::symbol::Ident;
 use std::borrow::Cow;
 use std::collections::hash_map::Entry::{Occupied, Vacant};
 use std::mem;
-use std::ops::{Deref, DerefMut};
-
-// To avoid costly uniqueness checks, we require that `MatchSeq` always has a nonempty body.
-
-/// Either a slice of token trees or a single one. This is used as the representation of the
-/// token trees that make up a matcher.
-#[derive(Clone)]
-enum TokenTreeOrTokenTreeSlice<'tt> {
-    Tt(TokenTree),
-    TtSlice(&'tt [TokenTree]),
-}
-
-impl<'tt> TokenTreeOrTokenTreeSlice<'tt> {
-    /// Returns the number of constituent top-level token trees of `self` (top-level in that it
-    /// will not recursively descend into subtrees).
-    fn len(&self) -> usize {
-        match *self {
-            TtSlice(ref v) => v.len(),
-            Tt(ref tt) => tt.len(),
-        }
-    }
-
-    /// The `index`-th token tree of `self`.
-    fn get_tt(&self, index: usize) -> TokenTree {
-        match *self {
-            TtSlice(ref v) => v[index].clone(),
-            Tt(ref tt) => tt.get_tt(index),
-        }
-    }
-}
 
 /// An unzipping of `TokenTree`s... see the `stack` field of `MatcherPos`.
 ///
@@ -127,7 +96,7 @@ impl<'tt> TokenTreeOrTokenTreeSlice<'tt> {
 #[derive(Clone)]
 struct MatcherTtFrame<'tt> {
     /// The "parent" matcher that we are descending into.
-    elts: TokenTreeOrTokenTreeSlice<'tt>,
+    elts: &'tt [TokenTree],
     /// The position of the "dot" in `elts` at the time we descended.
     idx: usize,
 }
@@ -136,26 +105,10 @@ type NamedMatchVec = SmallVec<[NamedMatch; 4]>;
 
 /// Represents a single "position" (aka "matcher position", aka "item"), as
 /// described in the module documentation.
-///
-/// Here:
-///
-/// - `'root` represents the lifetime of the stack slot that holds the root
-///   `MatcherPos`. As described in `MatcherPosHandle`, the root `MatcherPos`
-///   structure is stored on the stack, but subsequent instances are put into
-///   the heap.
-/// - `'tt` represents the lifetime of the token trees that this matcher
-///   position refers to.
-///
-/// It is important to distinguish these two lifetimes because we have a
-/// `SmallVec<TokenTreeOrTokenTreeSlice<'tt>>` below, and the destructor of
-/// that is considered to possibly access the data from its elements (it lacks
-/// a `#[may_dangle]` attribute). As a result, the compiler needs to know that
-/// all the elements in that `SmallVec` strictly outlive the root stack slot
-/// lifetime. By separating `'tt` from `'root`, we can show that.
 #[derive(Clone)]
-struct MatcherPos<'root, 'tt> {
+struct MatcherPos<'tt> {
     /// The token or slice of tokens that make up the matcher. `elts` is short for "elements".
-    top_elts: TokenTreeOrTokenTreeSlice<'tt>,
+    top_elts: &'tt [TokenTree],
 
     /// The position of the "dot" in this matcher
     idx: usize,
@@ -185,7 +138,7 @@ struct MatcherPos<'root, 'tt> {
     match_hi: usize,
 
     /// This field is only used if we are matching a repetition.
-    repetition: Option<MatcherPosRepetition<'root, 'tt>>,
+    repetition: Option<MatcherPosRepetition<'tt>>,
 
     /// Specifically used to "unzip" token trees. By "unzip", we mean to unwrap the delimiters from
     /// a delimited token tree (e.g., something wrapped in `(` `)`) or to get the contents of a doc
@@ -200,9 +153,9 @@ struct MatcherPos<'root, 'tt> {
 
 // This type is used a lot. Make sure it doesn't unintentionally get bigger.
 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
-rustc_data_structures::static_assert_size!(MatcherPos<'_, '_>, 240);
+rustc_data_structures::static_assert_size!(MatcherPos<'_>, 136);
 
-impl<'root, 'tt> MatcherPos<'root, 'tt> {
+impl<'tt> MatcherPos<'tt> {
     /// `len` `Vec`s (initially shared and empty) that will store matches of metavars.
     fn create_matches(len: usize) -> Box<[Lrc<NamedMatchVec>]> {
         if len == 0 {
@@ -220,7 +173,7 @@ impl<'root, 'tt> MatcherPos<'root, 'tt> {
         let match_idx_hi = count_names(ms);
         MatcherPos {
             // Start with the top level matcher given to us.
-            top_elts: TtSlice(ms),
+            top_elts: ms,
 
             // The "dot" is before the first token of the matcher.
             idx: 0,
@@ -241,13 +194,9 @@ impl<'root, 'tt> MatcherPos<'root, 'tt> {
         }
     }
 
-    fn repetition(
-        up: MatcherPosHandle<'root, 'tt>,
-        sp: DelimSpan,
-        seq: Lrc<SequenceRepetition>,
-    ) -> Self {
+    fn repetition(up: Box<MatcherPos<'tt>>, seq: &'tt SequenceRepetition) -> Self {
         MatcherPos {
-            stack: smallvec![],
+            top_elts: &seq.tts,
             idx: 0,
             matches: Self::create_matches(up.matches.len()),
             match_lo: up.match_cur,
@@ -258,7 +207,7 @@ impl<'root, 'tt> MatcherPos<'root, 'tt> {
                 sep: seq.separator.clone(),
                 seq_op: seq.kleene.op,
             }),
-            top_elts: Tt(TokenTree::Sequence(sp, seq)),
+            stack: smallvec![],
         }
     }
 
@@ -270,7 +219,7 @@ impl<'root, 'tt> MatcherPos<'root, 'tt> {
 }
 
 #[derive(Clone)]
-struct MatcherPosRepetition<'root, 'tt> {
+struct MatcherPosRepetition<'tt> {
     /// The KleeneOp of this sequence.
     seq_op: mbe::KleeneOp,
 
@@ -279,55 +228,12 @@ struct MatcherPosRepetition<'root, 'tt> {
 
     /// The "parent" matcher position. That is, the matcher position just before we enter the
     /// sequence.
-    up: MatcherPosHandle<'root, 'tt>,
-}
-
-// Lots of MatcherPos instances are created at runtime. Allocating them on the
-// heap is slow. Furthermore, using SmallVec<MatcherPos> to allocate them all
-// on the stack is also slow, because MatcherPos is quite a large type and
-// instances get moved around a lot between vectors, which requires lots of
-// slow memcpy calls.
-//
-// Therefore, the initial MatcherPos is always allocated on the stack,
-// subsequent ones (of which there aren't that many) are allocated on the heap,
-// and this type is used to encapsulate both cases.
-enum MatcherPosHandle<'root, 'tt> {
-    Ref(&'root mut MatcherPos<'root, 'tt>),
-    Box(Box<MatcherPos<'root, 'tt>>),
-}
-
-impl<'root, 'tt> Clone for MatcherPosHandle<'root, 'tt> {
-    // This always produces a new Box.
-    fn clone(&self) -> Self {
-        MatcherPosHandle::Box(match *self {
-            MatcherPosHandle::Ref(ref r) => Box::new((**r).clone()),
-            MatcherPosHandle::Box(ref b) => b.clone(),
-        })
-    }
-}
-
-impl<'root, 'tt> Deref for MatcherPosHandle<'root, 'tt> {
-    type Target = MatcherPos<'root, 'tt>;
-    fn deref(&self) -> &Self::Target {
-        match *self {
-            MatcherPosHandle::Ref(ref r) => r,
-            MatcherPosHandle::Box(ref b) => b,
-        }
-    }
-}
-
-impl<'root, 'tt> DerefMut for MatcherPosHandle<'root, 'tt> {
-    fn deref_mut(&mut self) -> &mut MatcherPos<'root, 'tt> {
-        match *self {
-            MatcherPosHandle::Ref(ref mut r) => r,
-            MatcherPosHandle::Box(ref mut b) => b,
-        }
-    }
+    up: Box<MatcherPos<'tt>>,
 }
 
-enum EofItems<'root, 'tt> {
+enum EofItems<'tt> {
     None,
-    One(MatcherPosHandle<'root, 'tt>),
+    One(Box<MatcherPos<'tt>>),
     Multiple,
 }
 
@@ -352,8 +258,8 @@ crate type NamedParseResult = ParseResult<FxHashMap<MacroRulesNormalizedIdent, N
 pub(super) fn count_names(ms: &[TokenTree]) -> usize {
     ms.iter().fold(0, |count, elt| {
         count
-            + match *elt {
-                TokenTree::Delimited(_, ref delim) => count_names(&delim.tts),
+            + match elt {
+                TokenTree::Delimited(_, delim) => count_names(delim.inner_tts()),
                 TokenTree::MetaVar(..) => 0,
                 TokenTree::MetaVarDecl(..) => 1,
                 // Panicking here would abort execution because `parse_tree` makes use of this
@@ -362,7 +268,7 @@ pub(super) fn count_names(ms: &[TokenTree]) -> usize {
                 // `0` is still returned to inform that no meta-variable was found. `Meta-variables
                 // != Meta-variable expressions`
                 TokenTree::MetaVarExpr(..) => 0,
-                TokenTree::Sequence(_, ref seq) => seq.num_captures,
+                TokenTree::Sequence(_, seq) => seq.num_captures,
                 TokenTree::Token(..) => 0,
             }
     })
@@ -446,7 +352,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
                 }
             }
             TokenTree::Delimited(_, ref delim) => {
-                for next_m in &delim.tts {
+                for next_m in delim.inner_tts() {
                     n_rec(sess, next_m, res.by_ref(), ret_val)?;
                 }
             }
@@ -492,319 +398,316 @@ fn token_name_eq(t1: &Token, t2: &Token) -> bool {
     }
 }
 
-/// Process the matcher positions of `cur_items` until it is empty. In the process, this will
-/// produce more items in `next_items` and `bb_items`.
-///
-/// For more info about the how this happens, see the module-level doc comments and the inline
-/// comments of this function.
-///
-/// # Parameters
-///
-/// - `cur_items`: the set of current items to be processed. This should be empty by the end of a
-///   successful execution of this function.
-/// - `next_items`: the set of newly generated items. These are used to replenish `cur_items` in
-///   the function `parse`.
-/// - `bb_items`: the set of items that are waiting for the black-box parser.
-/// - `token`: the current token of the parser.
-///
-/// # Returns
-///
-/// `Some(result)` if everything is finished, `None` otherwise. Note that matches are kept track of
-/// through the items generated.
-fn parse_tt_inner<'root, 'tt>(
-    sess: &ParseSess,
-    ms: &[TokenTree],
-    cur_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
-    next_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
-    bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
-    token: &Token,
-) -> Option<NamedParseResult> {
-    // Matcher positions that would be valid if the macro invocation was over now. Only modified if
-    // `token == Eof`.
-    let mut eof_items = EofItems::None;
-
-    while let Some(mut item) = cur_items.pop() {
-        // When unzipped trees end, remove them. This corresponds to backtracking out of a
-        // delimited submatcher into which we already descended. When backtracking out again, we
-        // need to advance the "dot" past the delimiters in the outer matcher.
-        while item.idx >= item.top_elts.len() {
-            match item.stack.pop() {
-                Some(MatcherTtFrame { elts, idx }) => {
-                    item.top_elts = elts;
-                    item.idx = idx + 1;
+// Note: the item vectors could be created and dropped within `parse_tt`, but to avoid excess
+// allocations we have a single vector fo each kind that is cleared and reused repeatedly.
+pub struct TtParser<'tt> {
+    macro_name: Ident,
+
+    /// The set of current items to be processed. This should be empty by the end of a successful
+    /// execution of `parse_tt_inner`.
+    cur_items: Vec<Box<MatcherPos<'tt>>>,
+
+    /// The set of newly generated items. These are used to replenish `cur_items` in the function
+    /// `parse_tt`.
+    next_items: Vec<Box<MatcherPos<'tt>>>,
+
+    /// The set of items that are waiting for the black-box parser.
+    bb_items: Vec<Box<MatcherPos<'tt>>>,
+}
+
+impl<'tt> TtParser<'tt> {
+    pub(super) fn new(macro_name: Ident) -> TtParser<'tt> {
+        TtParser { macro_name, cur_items: vec![], next_items: vec![], bb_items: vec![] }
+    }
+
+    /// Process the matcher positions of `cur_items` until it is empty. In the process, this will
+    /// produce more items in `next_items` and `bb_items`.
+    ///
+    /// For more info about the how this happens, see the module-level doc comments and the inline
+    /// comments of this function.
+    ///
+    /// # Returns
+    ///
+    /// `Some(result)` if everything is finished, `None` otherwise. Note that matches are kept
+    /// track of through the items generated.
+    fn parse_tt_inner(
+        &mut self,
+        sess: &ParseSess,
+        ms: &[TokenTree],
+        token: &Token,
+    ) -> Option<NamedParseResult> {
+        // Matcher positions that would be valid if the macro invocation was over now. Only
+        // modified if `token == Eof`.
+        let mut eof_items = EofItems::None;
+
+        while let Some(mut item) = self.cur_items.pop() {
+            // When unzipped trees end, remove them. This corresponds to backtracking out of a
+            // delimited submatcher into which we already descended. When backtracking out again, we
+            // need to advance the "dot" past the delimiters in the outer matcher.
+            while item.idx >= item.top_elts.len() {
+                match item.stack.pop() {
+                    Some(MatcherTtFrame { elts, idx }) => {
+                        item.top_elts = elts;
+                        item.idx = idx + 1;
+                    }
+                    None => break,
                 }
-                None => break,
             }
-        }
 
-        // Get the current position of the "dot" (`idx`) in `item` and the number of token trees in
-        // the matcher (`len`).
-        let idx = item.idx;
-        let len = item.top_elts.len();
-
-        if idx < len {
-            // We are in the middle of a matcher. Compare the matcher's current tt against `token`.
-            match item.top_elts.get_tt(idx) {
-                TokenTree::Sequence(sp, seq) => {
-                    let op = seq.kleene.op;
-                    if op == mbe::KleeneOp::ZeroOrMore || op == mbe::KleeneOp::ZeroOrOne {
-                        // Allow for the possibility of zero matches of this sequence.
-                        let mut new_item = item.clone();
-                        new_item.match_cur += seq.num_captures;
-                        new_item.idx += 1;
-                        for idx in item.match_cur..item.match_cur + seq.num_captures {
-                            new_item.push_match(idx, MatchedSeq(Lrc::new(smallvec![])));
+            // Get the current position of the "dot" (`idx`) in `item` and the number of token
+            // trees in the matcher (`len`).
+            let idx = item.idx;
+            let len = item.top_elts.len();
+
+            if idx < len {
+                // We are in the middle of a matcher. Compare the matcher's current tt against
+                // `token`.
+                match &item.top_elts[idx] {
+                    TokenTree::Sequence(_sp, seq) => {
+                        let op = seq.kleene.op;
+                        if op == mbe::KleeneOp::ZeroOrMore || op == mbe::KleeneOp::ZeroOrOne {
+                            // Allow for the possibility of zero matches of this sequence.
+                            let mut new_item = item.clone();
+                            new_item.match_cur += seq.num_captures;
+                            new_item.idx += 1;
+                            for idx in item.match_cur..item.match_cur + seq.num_captures {
+                                new_item.push_match(idx, MatchedSeq(Lrc::new(smallvec![])));
+                            }
+                            self.cur_items.push(new_item);
                         }
-                        cur_items.push(new_item);
+
+                        // Allow for the possibility of one or more matches of this sequence.
+                        self.cur_items.push(box MatcherPos::repetition(item, &seq));
                     }
 
-                    // Allow for the possibility of one or more matches of this sequence.
-                    cur_items.push(MatcherPosHandle::Box(Box::new(MatcherPos::repetition(
-                        item, sp, seq,
-                    ))));
-                }
+                    &TokenTree::MetaVarDecl(span, _, None) => {
+                        // E.g. `$e` instead of `$e:expr`.
+                        if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() {
+                            return Some(Error(span, "missing fragment specifier".to_string()));
+                        }
+                    }
 
-                TokenTree::MetaVarDecl(span, _, None) => {
-                    // E.g. `$e` instead of `$e:expr`.
-                    if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() {
-                        return Some(Error(span, "missing fragment specifier".to_string()));
+                    &TokenTree::MetaVarDecl(_, _, Some(kind)) => {
+                        // Built-in nonterminals never start with these tokens, so we can eliminate
+                        // them from consideration.
+                        //
+                        // We use the span of the metavariable declaration to determine any
+                        // edition-specific matching behavior for non-terminals.
+                        if Parser::nonterminal_may_begin_with(kind, token) {
+                            self.bb_items.push(item);
+                        }
                     }
-                }
 
-                TokenTree::MetaVarDecl(_, _, Some(kind)) => {
-                    // Built-in nonterminals never start with these tokens, so we can eliminate
-                    // them from consideration.
-                    //
-                    // We use the span of the metavariable declaration to determine any
-                    // edition-specific matching behavior for non-terminals.
-                    if Parser::nonterminal_may_begin_with(kind, token) {
-                        bb_items.push(item);
+                    TokenTree::Delimited(_, delimited) => {
+                        // To descend into a delimited submatcher, we push the current matcher onto
+                        // a stack and push a new item containing the submatcher onto `cur_items`.
+                        //
+                        // At the beginning of the loop, if we reach the end of the delimited
+                        // submatcher, we pop the stack to backtrack out of the descent. Note that
+                        // we use `all_tts` to include the open and close delimiter tokens.
+                        let lower_elts = mem::replace(&mut item.top_elts, &delimited.all_tts);
+                        let idx = item.idx;
+                        item.stack.push(MatcherTtFrame { elts: lower_elts, idx });
+                        item.idx = 0;
+                        self.cur_items.push(item);
                     }
-                }
 
-                seq @ (TokenTree::Delimited(..)
-                | TokenTree::Token(Token { kind: DocComment(..), .. })) => {
-                    // To descend into a delimited submatcher or a doc comment, we push the current
-                    // matcher onto a stack and push a new item containing the submatcher onto
-                    // `cur_items`.
-                    //
-                    // At the beginning of the loop, if we reach the end of the delimited
-                    // submatcher, we pop the stack to backtrack out of the descent.
-                    let lower_elts = mem::replace(&mut item.top_elts, Tt(seq));
-                    let idx = item.idx;
-                    item.stack.push(MatcherTtFrame { elts: lower_elts, idx });
-                    item.idx = 0;
-                    cur_items.push(item);
+                    TokenTree::Token(t) => {
+                        // Doc comments cannot appear in a matcher.
+                        debug_assert!(!matches!(t, Token { kind: DocComment(..), .. }));
+
+                        // If the token matches, we can just advance the parser. Otherwise, this
+                        // match hash failed, there is nothing to do, and hopefully another item in
+                        // `cur_items` will match.
+                        if token_name_eq(&t, token) {
+                            item.idx += 1;
+                            self.next_items.push(item);
+                        }
+                    }
+
+                    // These cannot appear in a matcher.
+                    TokenTree::MetaVar(..) | TokenTree::MetaVarExpr(..) => unreachable!(),
+                }
+            } else if let Some(repetition) = &item.repetition {
+                // We are past the end of a repetition.
+                debug_assert!(idx <= len + 1);
+
+                if idx == len {
+                    // Add all matches from the sequence to `up`, and move the "dot" past the
+                    // repetition in `up`. This allows for the case where the sequence matching is
+                    // finished.
+                    let mut new_pos = repetition.up.clone();
+                    for idx in item.match_lo..item.match_hi {
+                        let sub = item.matches[idx].clone();
+                        new_pos.push_match(idx, MatchedSeq(sub));
+                    }
+                    new_pos.match_cur = item.match_hi;
+                    new_pos.idx += 1;
+                    self.cur_items.push(new_pos);
                 }
 
-                TokenTree::Token(t) => {
-                    // If the token matches, we can just advance the parser. Otherwise, this match
-                    // hash failed, there is nothing to do, and hopefully another item in
-                    // `cur_items` will match.
-                    if token_name_eq(&t, token) {
+                if idx == len && repetition.sep.is_some() {
+                    if repetition.sep.as_ref().map_or(false, |sep| token_name_eq(token, sep)) {
+                        // The matcher has a separator, and it matches the current token. We can
+                        // advance past the separator token.
                         item.idx += 1;
-                        next_items.push(item);
+                        self.next_items.push(item);
                     }
+                } else if repetition.seq_op != mbe::KleeneOp::ZeroOrOne {
+                    // We don't need a separator. Move the "dot" back to the beginning of the
+                    // matcher and try to match again UNLESS we are only allowed to have _one_
+                    // repetition.
+                    item.match_cur = item.match_lo;
+                    item.idx = 0;
+                    self.cur_items.push(item);
                 }
-
-                // These cannot appear in a matcher.
-                TokenTree::MetaVar(..) | TokenTree::MetaVarExpr(..) => unreachable!(),
-            }
-        } else if let Some(repetition) = &item.repetition {
-            // We are past the end of a repetition.
-            debug_assert!(idx <= len + 1);
-            debug_assert!(matches!(item.top_elts, Tt(TokenTree::Sequence(..))));
-
-            if idx == len {
-                // Add all matches from the sequence to `up`, and move the "dot" past the
-                // repetition in `up`. This allows for the case where the sequence matching is
-                // finished.
-                let mut new_pos = repetition.up.clone();
-                for idx in item.match_lo..item.match_hi {
-                    let sub = item.matches[idx].clone();
-                    new_pos.push_match(idx, MatchedSeq(sub));
+            } else {
+                // We are past the end of the matcher, and not in a repetition. Look for end of
+                // input.
+                debug_assert_eq!(idx, len);
+                if *token == token::Eof {
+                    eof_items = match eof_items {
+                        EofItems::None => EofItems::One(item),
+                        EofItems::One(_) | EofItems::Multiple => EofItems::Multiple,
+                    }
                 }
-                new_pos.match_cur = item.match_hi;
-                new_pos.idx += 1;
-                cur_items.push(new_pos);
             }
+        }
 
-            if idx == len && repetition.sep.is_some() {
-                if repetition.sep.as_ref().map_or(false, |sep| token_name_eq(token, sep)) {
-                    // The matcher has a separator, and it matches the current token. We can
-                    // advance past the separator token.
-                    item.idx += 1;
-                    next_items.push(item);
+        // If we reached the end of input, check that there is EXACTLY ONE possible matcher.
+        // Otherwise, either the parse is ambiguous (which is an error) or there is a syntax error.
+        if *token == token::Eof {
+            Some(match eof_items {
+                EofItems::One(mut eof_item) => {
+                    let matches =
+                        eof_item.matches.iter_mut().map(|dv| Lrc::make_mut(dv).pop().unwrap());
+                    nameize(sess, ms, matches)
                 }
-            } else if repetition.seq_op != mbe::KleeneOp::ZeroOrOne {
-                // We don't need a separator. Move the "dot" back to the beginning of the
-                // matcher and try to match again UNLESS we are only allowed to have _one_
-                // repetition.
-                item.match_cur = item.match_lo;
-                item.idx = 0;
-                cur_items.push(item);
-            }
-        } else {
-            // We are past the end of the matcher, and not in a repetition. Look for end of input.
-            debug_assert_eq!(idx, len);
-            if *token == token::Eof {
-                eof_items = match eof_items {
-                    EofItems::None => EofItems::One(item),
-                    EofItems::One(_) | EofItems::Multiple => EofItems::Multiple,
+                EofItems::Multiple => {
+                    Error(token.span, "ambiguity: multiple successful parses".to_string())
                 }
-            }
+                EofItems::None => Failure(
+                    Token::new(
+                        token::Eof,
+                        if token.span.is_dummy() { token.span } else { token.span.shrink_to_hi() },
+                    ),
+                    "missing tokens in macro arguments",
+                ),
+            })
+        } else {
+            None
         }
     }
 
-    // If we reached the end of input, check that there is EXACTLY ONE possible matcher. Otherwise,
-    // either the parse is ambiguous (which is an error) or there is a syntax error.
-    if *token == token::Eof {
-        Some(match eof_items {
-            EofItems::One(mut eof_item) => {
-                let matches =
-                    eof_item.matches.iter_mut().map(|dv| Lrc::make_mut(dv).pop().unwrap());
-                nameize(sess, ms, matches)
-            }
-            EofItems::Multiple => {
-                Error(token.span, "ambiguity: multiple successful parses".to_string())
+    /// Use the given slice of token trees (`ms`) as a matcher. Match the token stream from the
+    /// given `parser` against it and return the match.
+    pub(super) fn parse_tt(
+        &mut self,
+        parser: &mut Cow<'_, Parser<'_>>,
+        ms: &'tt [TokenTree],
+    ) -> NamedParseResult {
+        // A queue of possible matcher positions. We initialize it with the matcher position in
+        // which the "dot" is before the first token of the first token tree in `ms`.
+        // `parse_tt_inner` then processes all of these possible matcher positions and produces
+        // possible next positions into `next_items`. After some post-processing, the contents of
+        // `next_items` replenish `cur_items` and we start over again.
+        self.cur_items.clear();
+        self.cur_items.push(box MatcherPos::new(ms));
+
+        loop {
+            self.next_items.clear();
+            self.bb_items.clear();
+
+            // Process `cur_items` until either we have finished the input or we need to get some
+            // parsing from the black-box parser done.
+            if let Some(result) = self.parse_tt_inner(parser.sess, ms, &parser.token) {
+                return result;
             }
-            EofItems::None => Failure(
-                Token::new(
-                    token::Eof,
-                    if token.span.is_dummy() { token.span } else { token.span.shrink_to_hi() },
-                ),
-                "missing tokens in macro arguments",
-            ),
-        })
-    } else {
-        None
-    }
-}
-
-/// Use the given slice of token trees (`ms`) as a matcher. Match the token stream from the given
-/// `parser` against it and return the match.
-pub(super) fn parse_tt(
-    parser: &mut Cow<'_, Parser<'_>>,
-    ms: &[TokenTree],
-    macro_name: Ident,
-) -> NamedParseResult {
-    // A queue of possible matcher positions. We initialize it with the matcher position in which
-    // the "dot" is before the first token of the first token tree in `ms`. `parse_tt_inner` then
-    // processes all of these possible matcher positions and produces possible next positions into
-    // `next_items`. After some post-processing, the contents of `next_items` replenish `cur_items`
-    // and we start over again.
-    //
-    // This MatcherPos instance is allocated on the stack. All others -- and there are frequently
-    // *no* others! -- are allocated on the heap.
-    let mut initial = MatcherPos::new(ms);
-    let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)];
-
-    loop {
-        let mut next_items = SmallVec::new();
-
-        // Matcher positions black-box parsed by `Parser`.
-        let mut bb_items = SmallVec::new();
-
-        // Process `cur_items` until either we have finished the input or we need to get some
-        // parsing from the black-box parser done.
-        if let Some(result) = parse_tt_inner(
-            parser.sess,
-            ms,
-            &mut cur_items,
-            &mut next_items,
-            &mut bb_items,
-            &parser.token,
-        ) {
-            return result;
-        }
 
-        // `parse_tt_inner` handled all cur_items, so it's empty.
-        assert!(cur_items.is_empty());
+            // `parse_tt_inner` handled all cur_items, so it's empty.
+            assert!(self.cur_items.is_empty());
+
+            // Error messages here could be improved with links to original rules.
+            match (self.next_items.len(), self.bb_items.len()) {
+                (0, 0) => {
+                    // There are no possible next positions AND we aren't waiting for the black-box
+                    // parser: syntax error.
+                    return Failure(
+                        parser.token.clone(),
+                        "no rules expected this token in macro call",
+                    );
+                }
 
-        // Error messages here could be improved with links to original rules.
-        match (next_items.len(), bb_items.len()) {
-            (0, 0) => {
-                // There are no possible next positions AND we aren't waiting for the black-box
-                // parser: syntax error.
-                return Failure(parser.token.clone(), "no rules expected this token in macro call");
-            }
+                (_, 0) => {
+                    // Dump all possible `next_items` into `cur_items` for the next iteration. Then
+                    // process the next token.
+                    self.cur_items.extend(self.next_items.drain(..));
+                    parser.to_mut().bump();
+                }
 
-            (_, 0) => {
-                // Dump all possible `next_items` into `cur_items` for the next iteration. Then
-                // process the next token.
-                cur_items.extend(next_items.drain(..));
-                parser.to_mut().bump();
-            }
+                (0, 1) => {
+                    // We need to call the black-box parser to get some nonterminal.
+                    let mut item = self.bb_items.pop().unwrap();
+                    if let TokenTree::MetaVarDecl(span, _, Some(kind)) = item.top_elts[item.idx] {
+                        let match_cur = item.match_cur;
+                        // We use the span of the metavariable declaration to determine any
+                        // edition-specific matching behavior for non-terminals.
+                        let nt = match parser.to_mut().parse_nonterminal(kind) {
+                            Err(mut err) => {
+                                err.span_label(
+                                    span,
+                                    format!(
+                                        "while parsing argument for this `{kind}` macro fragment"
+                                    ),
+                                )
+                                .emit();
+                                return ErrorReported;
+                            }
+                            Ok(nt) => nt,
+                        };
+                        item.push_match(match_cur, MatchedNonterminal(Lrc::new(nt)));
+                        item.idx += 1;
+                        item.match_cur += 1;
+                    } else {
+                        unreachable!()
+                    }
+                    self.cur_items.push(item);
+                }
 
-            (0, 1) => {
-                // We need to call the black-box parser to get some nonterminal.
-                let mut item = bb_items.pop().unwrap();
-                if let TokenTree::MetaVarDecl(span, _, Some(kind)) = item.top_elts.get_tt(item.idx)
-                {
-                    let match_cur = item.match_cur;
-                    // We use the span of the metavariable declaration to determine any
-                    // edition-specific matching behavior for non-terminals.
-                    let nt = match parser.to_mut().parse_nonterminal(kind) {
-                        Err(mut err) => {
-                            err.span_label(
-                                span,
-                                format!("while parsing argument for this `{kind}` macro fragment"),
-                            )
-                            .emit();
-                            return ErrorReported;
-                        }
-                        Ok(nt) => nt,
-                    };
-                    item.push_match(match_cur, MatchedNonterminal(Lrc::new(nt)));
-                    item.idx += 1;
-                    item.match_cur += 1;
-                } else {
-                    unreachable!()
+                (_, _) => {
+                    // Too many possibilities!
+                    return self.ambiguity_error(parser.token.span);
                 }
-                cur_items.push(item);
             }
 
-            (_, _) => {
-                // Too many possibilities!
-                return bb_items_ambiguity_error(
-                    macro_name,
-                    next_items,
-                    bb_items,
-                    parser.token.span,
-                );
-            }
+            assert!(!self.cur_items.is_empty());
         }
-
-        assert!(!cur_items.is_empty());
     }
-}
 
-fn bb_items_ambiguity_error<'root, 'tt>(
-    macro_name: Ident,
-    next_items: SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
-    bb_items: SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
-    token_span: rustc_span::Span,
-) -> NamedParseResult {
-    let nts = bb_items
-        .iter()
-        .map(|item| match item.top_elts.get_tt(item.idx) {
-            TokenTree::MetaVarDecl(_, bind, Some(kind)) => {
-                format!("{} ('{}')", kind, bind)
-            }
-            _ => panic!(),
-        })
-        .collect::<Vec<String>>()
-        .join(" or ");
-
-    Error(
-        token_span,
-        format!(
-            "local ambiguity when calling macro `{macro_name}`: multiple parsing options: {}",
-            match next_items.len() {
-                0 => format!("built-in NTs {}.", nts),
-                1 => format!("built-in NTs {} or 1 other option.", nts),
-                n => format!("built-in NTs {} or {} other options.", nts, n),
-            }
-        ),
-    )
+    fn ambiguity_error(&self, token_span: rustc_span::Span) -> NamedParseResult {
+        let nts = self
+            .bb_items
+            .iter()
+            .map(|item| match item.top_elts[item.idx] {
+                TokenTree::MetaVarDecl(_, bind, Some(kind)) => {
+                    format!("{} ('{}')", kind, bind)
+                }
+                _ => panic!(),
+            })
+            .collect::<Vec<String>>()
+            .join(" or ");
+
+        Error(
+            token_span,
+            format!(
+                "local ambiguity when calling macro `{}`: multiple parsing options: {}",
+                self.macro_name,
+                match self.next_items.len() {
+                    0 => format!("built-in NTs {}.", nts),
+                    1 => format!("built-in NTs {} or 1 other option.", nts),
+                    n => format!("built-in NTs {} or {} other options.", nts, n),
+                }
+            ),
+        )
+    }
 }
diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs
index b93edf8da7a..f13b97251d2 100644
--- a/compiler/rustc_expand/src/mbe/macro_rules.rs
+++ b/compiler/rustc_expand/src/mbe/macro_rules.rs
@@ -3,8 +3,7 @@ use crate::base::{SyntaxExtension, SyntaxExtensionKind};
 use crate::expand::{ensure_complete_parse, parse_ast_fragment, AstFragment, AstFragmentKind};
 use crate::mbe;
 use crate::mbe::macro_check;
-use crate::mbe::macro_parser::parse_tt;
-use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success};
+use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success, TtParser};
 use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq};
 use crate::mbe::transcribe::transcribe;
 
@@ -204,15 +203,15 @@ fn trace_macros_note(cx_expansions: &mut FxHashMap<Span, Vec<String>>, sp: Span,
 }
 
 /// Given `lhses` and `rhses`, this is the new macro we create
-fn generic_extension<'cx>(
+fn generic_extension<'cx, 'tt>(
     cx: &'cx mut ExtCtxt<'_>,
     sp: Span,
     def_span: Span,
     name: Ident,
     transparency: Transparency,
     arg: TokenStream,
-    lhses: &[mbe::TokenTree],
-    rhses: &[mbe::TokenTree],
+    lhses: &'tt [mbe::TokenTree],
+    rhses: &'tt [mbe::TokenTree],
     is_local: bool,
 ) -> Box<dyn MacResult + 'cx> {
     let sess = &cx.sess.parse_sess;
@@ -246,30 +245,30 @@ fn generic_extension<'cx>(
     // this situation.)
     let parser = parser_from_cx(sess, arg.clone());
 
-    for (i, lhs) in lhses.iter().enumerate() {
-        // try each arm's matchers
-        let lhs_tt = match *lhs {
-            mbe::TokenTree::Delimited(_, ref delim) => &delim.tts,
+    // A matcher is always delimited, but the delimiters are ignored.
+    let delimited_inner_tts = |tt: &'tt mbe::TokenTree| -> &'tt [mbe::TokenTree] {
+        match tt {
+            mbe::TokenTree::Delimited(_, delimited) => delimited.inner_tts(),
             _ => cx.span_bug(sp, "malformed macro lhs"),
-        };
+        }
+    };
 
+    // Try each arm's matchers.
+    let mut tt_parser = TtParser::new(name);
+    for (i, lhs) in lhses.iter().enumerate() {
         // Take a snapshot of the state of pre-expansion gating at this point.
         // This is used so that if a matcher is not `Success(..)`ful,
         // then the spans which became gated when parsing the unsuccessful matcher
         // are not recorded. On the first `Success(..)`ful matcher, the spans are merged.
         let mut gated_spans_snapshot = mem::take(&mut *sess.gated_spans.spans.borrow_mut());
 
-        match parse_tt(&mut Cow::Borrowed(&parser), lhs_tt, name) {
+        match tt_parser.parse_tt(&mut Cow::Borrowed(&parser), delimited_inner_tts(lhs)) {
             Success(named_matches) => {
                 // The matcher was `Success(..)`ful.
                 // Merge the gated spans from parsing the matcher with the pre-existing ones.
                 sess.gated_spans.merge(gated_spans_snapshot);
 
-                let rhs = match rhses[i] {
-                    // ignore delimiters
-                    mbe::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
-                    _ => cx.span_bug(sp, "malformed macro rhs"),
-                };
+                let rhs = delimited_inner_tts(&rhses[i]).to_vec().clone();
                 let arm_span = rhses[i].span();
 
                 let rhs_spans = rhs.iter().map(|t| t.span()).collect::<Vec<_>>();
@@ -347,14 +346,10 @@ fn generic_extension<'cx>(
     // Check whether there's a missing comma in this macro call, like `println!("{}" a);`
     if let Some((arg, comma_span)) = arg.add_comma() {
         for lhs in lhses {
-            // try each arm's matchers
-            let lhs_tt = match *lhs {
-                mbe::TokenTree::Delimited(_, ref delim) => &delim.tts,
-                _ => continue,
-            };
-            if let Success(_) =
-                parse_tt(&mut Cow::Borrowed(&parser_from_cx(sess, arg.clone())), lhs_tt, name)
-            {
+            if let Success(_) = tt_parser.parse_tt(
+                &mut Cow::Borrowed(&parser_from_cx(sess, arg.clone())),
+                delimited_inner_tts(lhs),
+            ) {
                 if comma_span.is_dummy() {
                     err.note("you might be missing a comma");
                 } else {
@@ -447,7 +442,8 @@ pub fn compile_declarative_macro(
     ];
 
     let parser = Parser::new(&sess.parse_sess, body, true, rustc_parse::MACRO_ARGUMENTS);
-    let argument_map = match parse_tt(&mut Cow::Borrowed(&parser), &argument_gram, def.ident) {
+    let mut tt_parser = TtParser::new(def.ident);
+    let argument_map = match tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &argument_gram) {
         Success(m) => m,
         Failure(token, msg) => {
             let s = parse_failure_msg(&token);
@@ -476,16 +472,17 @@ pub fn compile_declarative_macro(
             .map(|m| {
                 if let MatchedNonterminal(ref nt) = *m {
                     if let NtTT(ref tt) = **nt {
-                        let tt = mbe::quoted::parse(
+                        let mut tts = vec![];
+                        mbe::quoted::parse(
                             tt.clone().into(),
                             true,
                             &sess.parse_sess,
                             def.id,
                             features,
                             edition,
-                        )
-                        .pop()
-                        .unwrap();
+                            &mut tts,
+                        );
+                        let tt = tts.pop().unwrap();
                         valid &= check_lhs_nt_follows(&sess.parse_sess, features, &def, &tt);
                         return tt;
                     }
@@ -502,16 +499,17 @@ pub fn compile_declarative_macro(
             .map(|m| {
                 if let MatchedNonterminal(ref nt) = *m {
                     if let NtTT(ref tt) = **nt {
-                        return mbe::quoted::parse(
+                        let mut tts = vec![];
+                        mbe::quoted::parse(
                             tt.clone().into(),
                             false,
                             &sess.parse_sess,
                             def.id,
                             features,
                             edition,
-                        )
-                        .pop()
-                        .unwrap();
+                            &mut tts,
+                        );
+                        return tts.pop().unwrap();
                     }
                 }
                 sess.parse_sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
@@ -563,8 +561,8 @@ fn check_lhs_nt_follows(
 ) -> bool {
     // lhs is going to be like TokenTree::Delimited(...), where the
     // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
-    if let mbe::TokenTree::Delimited(_, ref tts) = *lhs {
-        check_matcher(sess, features, def, &tts.tts)
+    if let mbe::TokenTree::Delimited(_, delimited) = lhs {
+        check_matcher(sess, features, def, delimited.inner_tts())
     } else {
         let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
         sess.span_diagnostic.span_err(lhs.span(), msg);
@@ -585,7 +583,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
             | TokenTree::MetaVarDecl(..)
             | TokenTree::MetaVarExpr(..) => (),
             TokenTree::Delimited(_, ref del) => {
-                if !check_lhs_no_empty_seq(sess, &del.tts) {
+                if !check_lhs_no_empty_seq(sess, del.inner_tts()) {
                     return false;
                 }
             }
@@ -680,9 +678,9 @@ impl FirstSets {
                     | TokenTree::MetaVarExpr(..) => {
                         first.replace_with(tt.clone());
                     }
-                    TokenTree::Delimited(span, ref delimited) => {
-                        build_recur(sets, &delimited.tts);
-                        first.replace_with(delimited.open_tt(span));
+                    TokenTree::Delimited(_span, ref delimited) => {
+                        build_recur(sets, delimited.inner_tts());
+                        first.replace_with(delimited.open_tt().clone());
                     }
                     TokenTree::Sequence(sp, ref seq_rep) => {
                         let subfirst = build_recur(sets, &seq_rep.tts);
@@ -746,8 +744,8 @@ impl FirstSets {
                     first.add_one(tt.clone());
                     return first;
                 }
-                TokenTree::Delimited(span, ref delimited) => {
-                    first.add_one(delimited.open_tt(span));
+                TokenTree::Delimited(_span, ref delimited) => {
+                    first.add_one(delimited.open_tt().clone());
                     return first;
                 }
                 TokenTree::Sequence(sp, ref seq_rep) => {
@@ -933,9 +931,9 @@ fn check_matcher_core(
                     suffix_first = build_suffix_first();
                 }
             }
-            TokenTree::Delimited(span, ref d) => {
-                let my_suffix = TokenSet::singleton(d.close_tt(span));
-                check_matcher_core(sess, features, def, first_sets, &d.tts, &my_suffix);
+            TokenTree::Delimited(_span, ref d) => {
+                let my_suffix = TokenSet::singleton(d.close_tt().clone());
+                check_matcher_core(sess, features, def, first_sets, d.inner_tts(), &my_suffix);
                 // don't track non NT tokens
                 last.replace_with_irrelevant();
 
diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs
index 12c5dac9e0b..b3ed6b8e4db 100644
--- a/compiler/rustc_expand/src/mbe/quoted.rs
+++ b/compiler/rustc_expand/src/mbe/quoted.rs
@@ -45,10 +45,8 @@ pub(super) fn parse(
     node_id: NodeId,
     features: &Features,
     edition: Edition,
-) -> Vec<TokenTree> {
-    // Will contain the final collection of `self::TokenTree`
-    let mut result = Vec::new();
-
+    result: &mut Vec<TokenTree>,
+) {
     // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
     // additional trees if need be.
     let mut trees = input.trees();
@@ -115,7 +113,6 @@ pub(super) fn parse(
             _ => result.push(tree),
         }
     }
-    result
 }
 
 /// Asks for the `macro_metavar_expr` feature if it is not already declared
@@ -208,7 +205,8 @@ fn parse_tree(
                     // If we didn't find a metavar expression above, then we must have a
                     // repetition sequence in the macro (e.g. `$(pat)*`).  Parse the
                     // contents of the sequence itself
-                    let sequence = parse(tts, parsing_patterns, sess, node_id, features, edition);
+                    let mut sequence = vec![];
+                    parse(tts, parsing_patterns, sess, node_id, features, edition, &mut sequence);
                     // Get the Kleene operator and optional separator
                     let (separator, kleene) =
                         parse_sep_and_kleene_op(&mut trees, delim_span.entire(), sess);
@@ -225,8 +223,8 @@ fn parse_tree(
                     )
                 }
 
-                // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
-                // metavariable that names the crate of the invocation.
+                // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate`
+                // special metavariable that names the crate of the invocation.
                 Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
                     let (ident, is_raw) = token.ident().unwrap();
                     let span = ident.span.with_lo(span.lo());
@@ -270,13 +268,15 @@ fn parse_tree(
 
         // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
         // descend into the delimited set and further parse it.
-        tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
-            span,
-            Lrc::new(Delimited {
-                delim,
-                tts: parse(tts, parsing_patterns, sess, node_id, features, edition),
-            }),
-        ),
+        tokenstream::TokenTree::Delimited(span, delim, tts) => {
+            let mut all_tts = vec![];
+            // Add the explicit open and close delimiters, which
+            // `tokenstream::TokenTree::Delimited` lacks.
+            all_tts.push(TokenTree::token(token::OpenDelim(delim), span.open));
+            parse(tts, parsing_patterns, sess, node_id, features, edition, &mut all_tts);
+            all_tts.push(TokenTree::token(token::CloseDelim(delim), span.close));
+            TokenTree::Delimited(span, Lrc::new(Delimited { delim, all_tts }))
+        }
     }
 }
 
diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs
index 5ec63739cf5..e097f9d9c02 100644
--- a/compiler/rustc_expand/src/mbe/transcribe.rs
+++ b/compiler/rustc_expand/src/mbe/transcribe.rs
@@ -10,7 +10,7 @@ use rustc_errors::{pluralize, PResult};
 use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
 use rustc_span::hygiene::{LocalExpnId, Transparency};
 use rustc_span::symbol::{sym, Ident, MacroRulesNormalizedIdent};
-use rustc_span::Span;
+use rustc_span::{Span, DUMMY_SP};
 
 use smallvec::{smallvec, SmallVec};
 use std::mem;
@@ -34,8 +34,14 @@ enum Frame {
 
 impl Frame {
     /// Construct a new frame around the delimited set of tokens.
-    fn new(tts: Vec<mbe::TokenTree>) -> Frame {
-        let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, tts });
+    fn new(mut tts: Vec<mbe::TokenTree>) -> Frame {
+        // Need to add empty delimeters.
+        let open_tt = mbe::TokenTree::token(token::OpenDelim(token::NoDelim), DUMMY_SP);
+        let close_tt = mbe::TokenTree::token(token::CloseDelim(token::NoDelim), DUMMY_SP);
+        tts.insert(0, open_tt);
+        tts.push(close_tt);
+
+        let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, all_tts: tts });
         Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
     }
 }
@@ -46,12 +52,14 @@ impl Iterator for Frame {
     fn next(&mut self) -> Option<mbe::TokenTree> {
         match *self {
             Frame::Delimited { ref forest, ref mut idx, .. } => {
+                let res = forest.inner_tts().get(*idx).cloned();
                 *idx += 1;
-                forest.tts.get(*idx - 1).cloned()
+                res
             }
             Frame::Sequence { ref forest, ref mut idx, .. } => {
+                let res = forest.tts.get(*idx).cloned();
                 *idx += 1;
-                forest.tts.get(*idx - 1).cloned()
+                res
             }
         }
     }
@@ -257,7 +265,7 @@ pub(super) fn transcribe<'a>(
 
             // Replace meta-variable expressions with the result of their expansion.
             mbe::TokenTree::MetaVarExpr(sp, expr) => {
-                transcribe_metavar_expr(cx, expr, interp, &repeats, &mut result, &sp)?;
+                transcribe_metavar_expr(cx, expr, interp, &mut marker, &repeats, &mut result, &sp)?;
             }
 
             // If we are entering a new delimiter, we push its contents to the `stack` to be
@@ -376,8 +384,8 @@ fn lockstep_iter_size(
 ) -> LockstepIterSize {
     use mbe::TokenTree;
     match *tree {
-        TokenTree::Delimited(_, ref delimed) => {
-            delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
+        TokenTree::Delimited(_, ref delimited) => {
+            delimited.inner_tts().iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
                 size.with(lockstep_iter_size(tt, interpolations, repeats))
             })
         }
@@ -513,17 +521,23 @@ fn transcribe_metavar_expr<'a>(
     cx: &ExtCtxt<'a>,
     expr: MetaVarExpr,
     interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
+    marker: &mut Marker,
     repeats: &[(usize, usize)],
     result: &mut Vec<TreeAndSpacing>,
     sp: &DelimSpan,
 ) -> PResult<'a, ()> {
+    let mut visited_span = || {
+        let mut span = sp.entire();
+        marker.visit_span(&mut span);
+        span
+    };
     match expr {
         MetaVarExpr::Count(original_ident, depth_opt) => {
             let matched = matched_from_ident(cx, original_ident, interp)?;
             let count = count_repetitions(cx, depth_opt, matched, &repeats, sp)?;
             let tt = TokenTree::token(
                 TokenKind::lit(token::Integer, sym::integer(count), None),
-                sp.entire(),
+                visited_span(),
             );
             result.push(tt.into());
         }
@@ -536,7 +550,7 @@ fn transcribe_metavar_expr<'a>(
                 result.push(
                     TokenTree::token(
                         TokenKind::lit(token::Integer, sym::integer(*index), None),
-                        sp.entire(),
+                        visited_span(),
                     )
                     .into(),
                 );
@@ -548,7 +562,7 @@ fn transcribe_metavar_expr<'a>(
                 result.push(
                     TokenTree::token(
                         TokenKind::lit(token::Integer, sym::integer(*length), None),
-                        sp.entire(),
+                        visited_span(),
                     )
                     .into(),
                 );
diff --git a/compiler/rustc_feature/src/active.rs b/compiler/rustc_feature/src/active.rs
index 02cdaa3b958..feef7295254 100644
--- a/compiler/rustc_feature/src/active.rs
+++ b/compiler/rustc_feature/src/active.rs
@@ -287,7 +287,7 @@ declare_features! (
     /// Allows `extern "x86-interrupt" fn()`.
     (active, abi_x86_interrupt, "1.17.0", Some(40180), None),
     /// Allows additional const parameter types, such as `&'static str` or user defined types
-    (incomplete, adt_const_params, "1.56.0", Some(44580), None),
+    (incomplete, adt_const_params, "1.56.0", Some(95174), None),
     /// Allows defining an `#[alloc_error_handler]`.
     (active, alloc_error_handler, "1.29.0", Some(51540), None),
     /// Allows explicit discriminants on non-unit enum variants.
diff --git a/compiler/rustc_hir/src/lang_items.rs b/compiler/rustc_hir/src/lang_items.rs
index b299e71c9c4..7c312e1b61d 100644
--- a/compiler/rustc_hir/src/lang_items.rs
+++ b/compiler/rustc_hir/src/lang_items.rs
@@ -216,6 +216,7 @@ language_item_table! {
     Freeze,                  sym::freeze,              freeze_trait,               Target::Trait,          GenericRequirement::Exact(0);
 
     Drop,                    sym::drop,                drop_trait,                 Target::Trait,          GenericRequirement::None;
+    Destruct,                sym::destruct,            destruct_trait,             Target::Trait,          GenericRequirement::None;
 
     CoerceUnsized,           sym::coerce_unsized,      coerce_unsized_trait,       Target::Trait,          GenericRequirement::Minimum(1);
     DispatchFromDyn,         sym::dispatch_from_dyn,   dispatch_from_dyn_trait,    Target::Trait,          GenericRequirement::Minimum(1);
diff --git a/compiler/rustc_lint/src/traits.rs b/compiler/rustc_lint/src/traits.rs
index 5b6997bf0ee..81d308ee347 100644
--- a/compiler/rustc_lint/src/traits.rs
+++ b/compiler/rustc_lint/src/traits.rs
@@ -93,10 +93,6 @@ impl<'tcx> LateLintPass<'tcx> for DropTraitConstraints {
             let Trait(trait_predicate) = predicate.kind().skip_binder() else {
                 continue
             };
-            if trait_predicate.is_const_if_const() {
-                // `~const Drop` definitely have meanings so avoid linting here.
-                continue;
-            }
             let def_id = trait_predicate.trait_ref.def_id;
             if cx.tcx.lang_items().drop_trait() == Some(def_id) {
                 // Explicitly allow `impl Drop`, a drop-guards-as-Voldemort-type pattern.
diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs
index a575f27ad38..fa2dad5ce25 100644
--- a/compiler/rustc_middle/src/lib.rs
+++ b/compiler/rustc_middle/src/lib.rs
@@ -41,6 +41,7 @@
 #![feature(new_uninit)]
 #![feature(nll)]
 #![feature(once_cell)]
+#![feature(let_chains)]
 #![feature(let_else)]
 #![feature(min_specialization)]
 #![feature(trusted_len)]
diff --git a/compiler/rustc_middle/src/traits/mod.rs b/compiler/rustc_middle/src/traits/mod.rs
index 45610fa77d3..36bbccf0a80 100644
--- a/compiler/rustc_middle/src/traits/mod.rs
+++ b/compiler/rustc_middle/src/traits/mod.rs
@@ -577,7 +577,7 @@ pub enum ImplSource<'tcx, N> {
     TraitAlias(ImplSourceTraitAliasData<'tcx, N>),
 
     /// ImplSource for a `const Drop` implementation.
-    ConstDrop(ImplSourceConstDropData<N>),
+    ConstDestruct(ImplSourceConstDestructData<N>),
 }
 
 impl<'tcx, N> ImplSource<'tcx, N> {
@@ -595,7 +595,7 @@ impl<'tcx, N> ImplSource<'tcx, N> {
             | ImplSource::Pointee(ImplSourcePointeeData) => Vec::new(),
             ImplSource::TraitAlias(d) => d.nested,
             ImplSource::TraitUpcasting(d) => d.nested,
-            ImplSource::ConstDrop(i) => i.nested,
+            ImplSource::ConstDestruct(i) => i.nested,
         }
     }
 
@@ -613,7 +613,7 @@ impl<'tcx, N> ImplSource<'tcx, N> {
             | ImplSource::Pointee(ImplSourcePointeeData) => &[],
             ImplSource::TraitAlias(d) => &d.nested,
             ImplSource::TraitUpcasting(d) => &d.nested,
-            ImplSource::ConstDrop(i) => &i.nested,
+            ImplSource::ConstDestruct(i) => &i.nested,
         }
     }
 
@@ -672,9 +672,11 @@ impl<'tcx, N> ImplSource<'tcx, N> {
                     nested: d.nested.into_iter().map(f).collect(),
                 })
             }
-            ImplSource::ConstDrop(i) => ImplSource::ConstDrop(ImplSourceConstDropData {
-                nested: i.nested.into_iter().map(f).collect(),
-            }),
+            ImplSource::ConstDestruct(i) => {
+                ImplSource::ConstDestruct(ImplSourceConstDestructData {
+                    nested: i.nested.into_iter().map(f).collect(),
+                })
+            }
         }
     }
 }
@@ -767,7 +769,7 @@ pub struct ImplSourceDiscriminantKindData;
 pub struct ImplSourcePointeeData;
 
 #[derive(Clone, PartialEq, Eq, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)]
-pub struct ImplSourceConstDropData<N> {
+pub struct ImplSourceConstDestructData<N> {
     pub nested: Vec<N>,
 }
 
diff --git a/compiler/rustc_middle/src/traits/select.rs b/compiler/rustc_middle/src/traits/select.rs
index e18f04d92ee..56d42706f67 100644
--- a/compiler/rustc_middle/src/traits/select.rs
+++ b/compiler/rustc_middle/src/traits/select.rs
@@ -146,8 +146,8 @@ pub enum SelectionCandidate<'tcx> {
 
     BuiltinUnsizeCandidate,
 
-    /// Implementation of `const Drop`, optionally from a custom `impl const Drop`.
-    ConstDropCandidate(Option<DefId>),
+    /// Implementation of `const Destruct`, optionally from a custom `impl const Drop`.
+    ConstDestructCandidate(Option<DefId>),
 }
 
 /// The result of trait evaluation. The order is important
diff --git a/compiler/rustc_middle/src/traits/structural_impls.rs b/compiler/rustc_middle/src/traits/structural_impls.rs
index 6ce9f5eea34..ea706053231 100644
--- a/compiler/rustc_middle/src/traits/structural_impls.rs
+++ b/compiler/rustc_middle/src/traits/structural_impls.rs
@@ -33,7 +33,7 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::ImplSource<'tcx, N> {
 
             super::ImplSource::TraitUpcasting(ref d) => write!(f, "{:?}", d),
 
-            super::ImplSource::ConstDrop(ref d) => write!(f, "{:?}", d),
+            super::ImplSource::ConstDestruct(ref d) => write!(f, "{:?}", d),
         }
     }
 }
@@ -120,9 +120,9 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::ImplSourceTraitAliasData<'tcx,
     }
 }
 
-impl<N: fmt::Debug> fmt::Debug for traits::ImplSourceConstDropData<N> {
+impl<N: fmt::Debug> fmt::Debug for traits::ImplSourceConstDestructData<N> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "ImplSourceConstDropData(nested={:?})", self.nested)
+        write!(f, "ImplSourceConstDestructData(nested={:?})", self.nested)
     }
 }
 
diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs
index 83ab761aa55..3124cc5dba8 100644
--- a/compiler/rustc_middle/src/ty/mod.rs
+++ b/compiler/rustc_middle/src/ty/mod.rs
@@ -765,6 +765,7 @@ impl<'tcx> TraitPredicate<'tcx> {
         if unlikely!(Some(self.trait_ref.def_id) == tcx.lang_items().drop_trait()) {
             // remap without changing constness of this predicate.
             // this is because `T: ~const Drop` has a different meaning to `T: Drop`
+            // FIXME(fee1-dead): remove this logic after beta bump
             param_env.remap_constness_with(self.constness)
         } else {
             *param_env = param_env.with_constness(self.constness.and(param_env.constness()))
diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs
index 00cb9907d95..5cfd9a5edfb 100644
--- a/compiler/rustc_middle/src/ty/print/pretty.rs
+++ b/compiler/rustc_middle/src/ty/print/pretty.rs
@@ -896,44 +896,48 @@ pub trait PrettyPrinter<'tcx>:
             );
 
             if !generics.is_empty() || !assoc_items.is_empty() {
-                p!("<");
                 let mut first = true;
 
                 for ty in generics {
-                    if !first {
+                    if first {
+                        p!("<");
+                        first = false;
+                    } else {
                         p!(", ");
                     }
                     p!(print(trait_ref.rebind(*ty)));
-                    first = false;
                 }
 
                 for (assoc_item_def_id, term) in assoc_items {
-                    if !first {
+                    // Skip printing `<[generator@] as Generator<_>>::Return` from async blocks
+                    if let Some(ty) = term.skip_binder().ty() &&
+                       let ty::Projection(ty::ProjectionTy { item_def_id, .. }) = ty.kind() &&
+                       Some(*item_def_id) == self.tcx().lang_items().generator_return() {
+                        continue;
+                    }
+
+                    if first {
+                        p!("<");
+                        first = false;
+                    } else {
                         p!(", ");
                     }
+
                     p!(write("{} = ", self.tcx().associated_item(assoc_item_def_id).name));
 
                     match term.skip_binder() {
                         Term::Ty(ty) => {
-                            // Skip printing `<[generator@] as Generator<_>>::Return` from async blocks
-                            if matches!(
-                              ty.kind(), ty::Projection(ty::ProjectionTy { item_def_id, .. })
-                              if Some(*item_def_id) == self.tcx().lang_items().generator_return()
-                            ) {
-                                p!("[async output]")
-                            } else {
-                                p!(print(ty))
-                            }
+                            p!(print(ty))
                         }
                         Term::Const(c) => {
                             p!(print(c));
                         }
                     };
-
-                    first = false;
                 }
 
-                p!(">");
+                if !first {
+                    p!(">");
+                }
             }
 
             first = false;
diff --git a/compiler/rustc_mir_transform/src/const_prop.rs b/compiler/rustc_mir_transform/src/const_prop.rs
index 5ed33ab9fec..c4d15d4d187 100644
--- a/compiler/rustc_mir_transform/src/const_prop.rs
+++ b/compiler/rustc_mir_transform/src/const_prop.rs
@@ -244,8 +244,8 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
     ) -> InterpResult<'tcx, InterpOperand<Self::PointerTag>> {
         let l = &frame.locals[local];
 
-        if l.value == LocalValue::Uninitialized {
-            throw_machine_stop_str!("tried to access an uninitialized local")
+        if l.value == LocalValue::Unallocated {
+            throw_machine_stop_str!("tried to access an unallocated local")
         }
 
         l.access()
@@ -442,7 +442,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
     /// but not reading from them anymore.
     fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) {
         ecx.frame_mut().locals[local] =
-            LocalState { value: LocalValue::Uninitialized, layout: Cell::new(None) };
+            LocalState { value: LocalValue::Unallocated, layout: Cell::new(None) };
     }
 
     fn lint_root(&self, source_info: SourceInfo) -> Option<HirId> {
@@ -1147,7 +1147,7 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> {
                     let frame = self.ecx.frame_mut();
                     frame.locals[local].value =
                         if let StatementKind::StorageLive(_) = statement.kind {
-                            LocalValue::Uninitialized
+                            LocalValue::Unallocated
                         } else {
                             LocalValue::Dead
                         };
diff --git a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs
index 2bf97e5d43c..d72e8d16105 100644
--- a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs
+++ b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs
@@ -95,7 +95,7 @@ pub struct EarlyOtherwiseBranch;
 
 impl<'tcx> MirPass<'tcx> for EarlyOtherwiseBranch {
     fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
-        sess.mir_opt_level() >= 2
+        sess.mir_opt_level() >= 3 && sess.opts.debugging_opts.unsound_mir_opts
     }
 
     fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
@@ -226,6 +226,37 @@ impl<'tcx> MirPass<'tcx> for EarlyOtherwiseBranch {
 
 /// Returns true if computing the discriminant of `place` may be hoisted out of the branch
 fn may_hoist<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, place: Place<'tcx>) -> bool {
+    // FIXME(JakobDegen): This is unsound. Someone could write code like this:
+    // ```rust
+    // let Q = val;
+    // if discriminant(P) == otherwise {
+    //     let ptr = &mut Q as *mut _ as *mut u8;
+    //     unsafe { *ptr = 10; } // Any invalid value for the type
+    // }
+    //
+    // match P {
+    //    A => match Q {
+    //        A => {
+    //            // code
+    //        }
+    //        _ => {
+    //            // don't use Q
+    //        }
+    //    }
+    //    _ => {
+    //        // don't use Q
+    //    }
+    // };
+    // ```
+    //
+    // Hoisting the `discriminant(Q)` out of the `A` arm causes us to compute the discriminant of an
+    // invalid value, which is UB.
+    //
+    // In order to fix this, we would either need to show that the discriminant computation of
+    // `place` is computed in all branches, including the `otherwise` branch, or we would need
+    // another analysis pass to determine that the place is fully initialized. It might even be best
+    // to have the hoisting be performed in a different pass and just do the CFG changing in this
+    // pass.
     for (place, proj) in place.iter_projections() {
         match proj {
             // Dereferencing in the computation of `place` might cause issues from one of two
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index 568682cc3e4..f0ec86ca64a 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -100,11 +100,12 @@ rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144);
 
 impl CreateTokenStream for LazyTokenStreamImpl {
     fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
-        // The token produced by the final call to `next` or `next_desugared`
-        // was not actually consumed by the callback. The combination
-        // of chaining the initial token and using `take` produces the desired
-        // result - we produce an empty `TokenStream` if no calls were made,
-        // and omit the final token otherwise.
+        // The token produced by the final call to `{,inlined_}next` or
+        // `{,inlined_}next_desugared` was not actually consumed by the
+        // callback. The combination of chaining the initial token and using
+        // `take` produces the desired result - we produce an empty
+        // `TokenStream` if no calls were made, and omit the final token
+        // otherwise.
         let mut cursor_snapshot = self.cursor_snapshot.clone();
         let tokens =
             std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index 1b9eeab0298..d625080dee4 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -118,7 +118,7 @@ impl<'a> Parser<'a> {
                         Some(this.parse_ty_param(attrs)?)
                     } else if this.token.can_begin_type() {
                         // Trying to write an associated type bound? (#26271)
-                        let snapshot = this.clone();
+                        let snapshot = this.create_snapshot_for_diagnostic();
                         match this.parse_ty_where_predicate() {
                             Ok(where_predicate) => {
                                 this.struct_span_err(
@@ -133,7 +133,7 @@ impl<'a> Parser<'a> {
                             Err(err) => {
                                 err.cancel();
                                 // FIXME - maybe we should overwrite 'self' outside of `collect_tokens`?
-                                *this = snapshot;
+                                this.restore_snapshot(snapshot);
                                 return Ok((None, TrailingToken::None));
                             }
                         }
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 4e229918b63..3a2f193d319 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -206,8 +206,9 @@ struct TokenCursor {
     frame: TokenCursorFrame,
     stack: Vec<TokenCursorFrame>,
     desugar_doc_comments: bool,
-    // Counts the number of calls to `next` or `next_desugared`,
-    // depending on whether `desugar_doc_comments` is set.
+    // Counts the number of calls to `{,inlined_}next` or
+    // `{,inlined_}next_desugared`, depending on whether
+    // `desugar_doc_comments` is set.
     num_next_calls: usize,
     // During parsing, we may sometimes need to 'unglue' a
     // glued token into two component tokens
@@ -256,6 +257,12 @@ impl TokenCursorFrame {
 
 impl TokenCursor {
     fn next(&mut self) -> (Token, Spacing) {
+        self.inlined_next()
+    }
+
+    /// This always-inlined version should only be used on hot code paths.
+    #[inline(always)]
+    fn inlined_next(&mut self) -> (Token, Spacing) {
         loop {
             let (tree, spacing) = if !self.frame.open_delim {
                 self.frame.open_delim = true;
@@ -285,7 +292,13 @@ impl TokenCursor {
     }
 
     fn next_desugared(&mut self) -> (Token, Spacing) {
-        let (data, attr_style, sp) = match self.next() {
+        self.inlined_next_desugared()
+    }
+
+    /// This always-inlined version should only be used on hot code paths.
+    #[inline(always)]
+    fn inlined_next_desugared(&mut self) -> (Token, Spacing) {
+        let (data, attr_style, sp) = match self.inlined_next() {
             (Token { kind: token::DocComment(_, attr_style, data), span }, _) => {
                 (data, attr_style, span)
             }
@@ -463,12 +476,13 @@ impl<'a> Parser<'a> {
         parser
     }
 
+    #[inline]
     fn next_tok(&mut self, fallback_span: Span) -> (Token, Spacing) {
         loop {
             let (mut next, spacing) = if self.desugar_doc_comments {
-                self.token_cursor.next_desugared()
+                self.token_cursor.inlined_next_desugared()
             } else {
-                self.token_cursor.next()
+                self.token_cursor.inlined_next()
             };
             self.token_cursor.num_next_calls += 1;
             // We've retrieved an token from the underlying
@@ -998,7 +1012,13 @@ impl<'a> Parser<'a> {
     }
 
     /// Advance the parser by one token using provided token as the next one.
-    fn bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
+    fn bump_with(&mut self, next: (Token, Spacing)) {
+        self.inlined_bump_with(next)
+    }
+
+    /// This always-inlined version should only be used on hot code paths.
+    #[inline(always)]
+    fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
         // Bumping after EOF is a bad sign, usually an infinite loop.
         if self.prev_token.kind == TokenKind::Eof {
             let msg = "attempted to bump the parser past EOF (may be stuck in a loop)";
@@ -1016,7 +1036,7 @@ impl<'a> Parser<'a> {
     /// Advance the parser by one token.
     pub fn bump(&mut self) {
         let next_token = self.next_tok(self.token.span);
-        self.bump_with(next_token);
+        self.inlined_bump_with(next_token);
     }
 
     /// Look-ahead `dist` tokens of `self.token` and get access to that token there.
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index 17c57867cf9..07ce879de8f 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -630,10 +630,14 @@ impl<'a> Parser<'a> {
                 Ok(ty) => GenericArg::Type(ty),
                 Err(err) => {
                     if is_const_fn {
-                        if let Ok(expr) = (*snapshot).parse_expr_res(Restrictions::CONST_EXPR, None)
-                        {
-                            self.restore_snapshot(snapshot);
-                            return Ok(Some(self.dummy_const_arg_needs_braces(err, expr.span)));
+                        match (*snapshot).parse_expr_res(Restrictions::CONST_EXPR, None) {
+                            Ok(expr) => {
+                                self.restore_snapshot(snapshot);
+                                return Ok(Some(self.dummy_const_arg_needs_braces(err, expr.span)));
+                            }
+                            Err(err) => {
+                                err.cancel();
+                            }
                         }
                     }
                     // Try to recover from possible `const` arg without braces.
diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs
index ebf6678d3ad..4f9e1d3fa3b 100644
--- a/compiler/rustc_passes/src/check_attr.rs
+++ b/compiler/rustc_passes/src/check_attr.rs
@@ -196,8 +196,7 @@ impl CheckAttrVisitor<'_> {
     fn inline_attr_str_error_with_macro_def(&self, hir_id: HirId, attr: &Attribute, sym: &str) {
         self.tcx.struct_span_lint_hir(UNUSED_ATTRIBUTES, hir_id, attr.span, |lint| {
             lint.build(&format!(
-                "`#[{}]` is ignored on struct fields, match arms and macro defs",
-                sym,
+                "`#[{sym}]` is ignored on struct fields, match arms and macro defs",
             ))
             .warn(
                 "this was previously accepted by the compiler but is \
@@ -214,7 +213,7 @@ impl CheckAttrVisitor<'_> {
 
     fn inline_attr_str_error_without_macro_def(&self, hir_id: HirId, attr: &Attribute, sym: &str) {
         self.tcx.struct_span_lint_hir(UNUSED_ATTRIBUTES, hir_id, attr.span, |lint| {
-            lint.build(&format!("`#[{}]` is ignored on struct fields and match arms", sym))
+            lint.build(&format!("`#[{sym}]` is ignored on struct fields and match arms"))
                 .warn(
                     "this was previously accepted by the compiler but is \
                  being phased out; it will become a hard error in \
@@ -721,7 +720,7 @@ impl CheckAttrVisitor<'_> {
                 .sess
                 .struct_span_err(
                     meta.name_value_literal_span().unwrap_or_else(|| meta.span()),
-                    &format!("`{}` is not a valid identifier", doc_keyword),
+                    &format!("`{doc_keyword}` is not a valid identifier"),
                 )
                 .emit();
             return false;
@@ -805,8 +804,7 @@ impl CheckAttrVisitor<'_> {
                 .struct_span_err(
                     meta.span(),
                     &format!(
-                        "`#![doc({} = \"...\")]` isn't allowed as a crate-level attribute",
-                        attr_name,
+                        "`#![doc({attr_name} = \"...\")]` isn't allowed as a crate-level attribute",
                     ),
                 )
                 .emit();
@@ -1035,8 +1033,7 @@ impl CheckAttrVisitor<'_> {
                                                 attr.meta().unwrap().span,
                                                 "use `doc = include_str!` instead",
                                                 format!(
-                                                    "#{}[doc = include_str!(\"{}\")]",
-                                                    inner, value
+                                                    "#{inner}[doc = include_str!(\"{value}\")]",
                                                 ),
                                                 applicability,
                                             );
@@ -1230,7 +1227,7 @@ impl CheckAttrVisitor<'_> {
                         if let Some(value) = attr.value_str() {
                             diag.span_help(
                                 attr.span,
-                                &format!(r#"try `#[link(name = "{}")]` instead"#, value),
+                                &format!(r#"try `#[link(name = "{value}")]` instead"#),
                             );
                         } else {
                             diag.span_help(attr.span, r#"try `#[link(name = "...")]` instead"#);
@@ -1518,15 +1515,14 @@ impl CheckAttrVisitor<'_> {
                 };
                 self.tcx.struct_span_lint_hir(UNUSED_ATTRIBUTES, hir_id, attr.span, |lint| {
                     lint.build(&format!(
-                        "`#[no_mangle]` has no effect on a foreign {}",
-                        foreign_item_kind
+                        "`#[no_mangle]` has no effect on a foreign {foreign_item_kind}"
                     ))
                     .warn(
                         "this was previously accepted by the compiler but is \
                             being phased out; it will become a hard error in \
                             a future release!",
                     )
-                    .span_label(span, format!("foreign {}", foreign_item_kind))
+                    .span_label(span, format!("foreign {foreign_item_kind}"))
                     .note("symbol names in extern blocks are not mangled")
                     .span_suggestion(
                         attr.span,
@@ -1692,9 +1688,9 @@ impl CheckAttrVisitor<'_> {
                 hint.span(),
                 E0517,
                 "{}",
-                &format!("attribute should be applied to {} {}", article, allowed_targets)
+                &format!("attribute should be applied to {article} {allowed_targets}")
             )
-            .span_label(span, &format!("not {} {}", article, allowed_targets))
+            .span_label(span, &format!("not {article} {allowed_targets}"))
             .emit();
         }
 
diff --git a/compiler/rustc_passes/src/check_const.rs b/compiler/rustc_passes/src/check_const.rs
index 0fdbdb7b08d..0e04a2cfb11 100644
--- a/compiler/rustc_passes/src/check_const.rs
+++ b/compiler/rustc_passes/src/check_const.rs
@@ -80,8 +80,7 @@ impl<'tcx> hir::itemlikevisit::ItemLikeVisitor<'tcx> for CheckConstTraitVisitor<
     /// of the trait being implemented; as those provided functions can be non-const.
     fn visit_item<'hir>(&mut self, item: &'hir hir::Item<'hir>) {
         let _: Option<_> = try {
-            if let hir::ItemKind::Impl(ref imp) = item.kind {
-                if let hir::Constness::Const = imp.constness {
+            if let hir::ItemKind::Impl(ref imp) = item.kind && let hir::Constness::Const = imp.constness {
                     let trait_def_id = imp.of_trait.as_ref()?.trait_def_id()?;
                     let ancestors = self
                         .tcx
@@ -132,7 +131,6 @@ impl<'tcx> hir::itemlikevisit::ItemLikeVisitor<'tcx> for CheckConstTraitVisitor<
                             .note(&format!("`{}` not implemented", to_implement.join("`, `")))
                             .emit();
                     }
-                }
             }
         };
     }
diff --git a/compiler/rustc_passes/src/diagnostic_items.rs b/compiler/rustc_passes/src/diagnostic_items.rs
index 30a0071f0f2..9cbb7917e9a 100644
--- a/compiler/rustc_passes/src/diagnostic_items.rs
+++ b/compiler/rustc_passes/src/diagnostic_items.rs
@@ -61,10 +61,9 @@ fn collect_item(tcx: TyCtxt<'_>, items: &mut DiagnosticItems, name: Symbol, item
     if let Some(original_def_id) = items.name_to_id.insert(name, item_def_id) {
         if original_def_id != item_def_id {
             let mut err = match tcx.hir().span_if_local(item_def_id) {
-                Some(span) => tcx.sess.struct_span_err(
-                    span,
-                    &format!("duplicate diagnostic item found: `{}`.", name),
-                ),
+                Some(span) => tcx
+                    .sess
+                    .struct_span_err(span, &format!("duplicate diagnostic item found: `{name}`.")),
                 None => tcx.sess.struct_err(&format!(
                     "duplicate diagnostic item in crate `{}`: `{}`.",
                     tcx.crate_name(item_def_id.krate),
diff --git a/compiler/rustc_passes/src/entry.rs b/compiler/rustc_passes/src/entry.rs
index f5040a373c2..5a1373ad1a2 100644
--- a/compiler/rustc_passes/src/entry.rs
+++ b/compiler/rustc_passes/src/entry.rs
@@ -148,33 +148,29 @@ fn configure_main(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) -> Option<(De
     } else if let Some((def_id, _)) = visitor.attr_main_fn {
         Some((def_id.to_def_id(), EntryFnType::Main))
     } else {
-        if let Some(main_def) = tcx.resolutions(()).main_def {
-            if let Some(def_id) = main_def.opt_fn_def_id() {
-                // non-local main imports are handled below
-                if let Some(def_id) = def_id.as_local() {
-                    if matches!(tcx.hir().find_by_def_id(def_id), Some(Node::ForeignItem(_))) {
-                        tcx.sess
-                            .struct_span_err(
-                                tcx.def_span(def_id),
-                                "the `main` function cannot be declared in an `extern` block",
-                            )
-                            .emit();
-                        return None;
-                    }
-                }
-
-                if main_def.is_import && !tcx.features().imported_main {
-                    let span = main_def.span;
-                    feature_err(
-                        &tcx.sess.parse_sess,
-                        sym::imported_main,
-                        span,
-                        "using an imported function as entry point `main` is experimental",
+        if let Some(main_def) = tcx.resolutions(()).main_def && let Some(def_id) = main_def.opt_fn_def_id() {
+            // non-local main imports are handled below
+            if let Some(def_id) = def_id.as_local() && matches!(tcx.hir().find_by_def_id(def_id), Some(Node::ForeignItem(_))) {
+                tcx.sess
+                    .struct_span_err(
+                        tcx.def_span(def_id),
+                        "the `main` function cannot be declared in an `extern` block",
                     )
                     .emit();
-                }
-                return Some((def_id, EntryFnType::Main));
+                return None;
             }
+
+            if main_def.is_import && !tcx.features().imported_main {
+                let span = main_def.span;
+                feature_err(
+                    &tcx.sess.parse_sess,
+                    sym::imported_main,
+                    span,
+                    "using an imported function as entry point `main` is experimental",
+                )
+                .emit();
+            }
+            return Some((def_id, EntryFnType::Main));
         }
         no_main_err(tcx, visitor);
         None
@@ -225,11 +221,9 @@ fn no_main_err(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) {
         err.note(&note);
     }
 
-    if let Some(main_def) = tcx.resolutions(()).main_def {
-        if main_def.opt_fn_def_id().is_none() {
-            // There is something at `crate::main`, but it is not a function definition.
-            err.span_label(main_def.span, "non-function item at `crate::main` is found");
-        }
+    if let Some(main_def) = tcx.resolutions(()).main_def && main_def.opt_fn_def_id().is_none(){
+        // There is something at `crate::main`, but it is not a function definition.
+        err.span_label(main_def.span, "non-function item at `crate::main` is found");
     }
 
     if tcx.sess.teach(&err.get_code().unwrap()) {
diff --git a/compiler/rustc_passes/src/intrinsicck.rs b/compiler/rustc_passes/src/intrinsicck.rs
index 6316f3b8459..027eac16bad 100644
--- a/compiler/rustc_passes/src/intrinsicck.rs
+++ b/compiler/rustc_passes/src/intrinsicck.rs
@@ -79,27 +79,25 @@ impl<'tcx> ExprVisitor<'tcx> {
             // Special-case transmuting from `typeof(function)` and
             // `Option<typeof(function)>` to present a clearer error.
             let from = unpack_option_like(self.tcx, from);
-            if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (from.kind(), sk_to) {
-                if size_to == Pointer.size(&self.tcx) {
-                    struct_span_err!(self.tcx.sess, span, E0591, "can't transmute zero-sized type")
-                        .note(&format!("source type: {}", from))
-                        .note(&format!("target type: {}", to))
-                        .help("cast with `as` to a pointer instead")
-                        .emit();
-                    return;
-                }
+            if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (from.kind(), sk_to) && size_to == Pointer.size(&self.tcx) {
+                struct_span_err!(self.tcx.sess, span, E0591, "can't transmute zero-sized type")
+                    .note(&format!("source type: {from}"))
+                    .note(&format!("target type: {to}"))
+                    .help("cast with `as` to a pointer instead")
+                    .emit();
+                return;
             }
         }
 
         // Try to display a sensible error with as much information as possible.
         let skeleton_string = |ty: Ty<'tcx>, sk| match sk {
             Ok(SizeSkeleton::Known(size)) => format!("{} bits", size.bits()),
-            Ok(SizeSkeleton::Pointer { tail, .. }) => format!("pointer to `{}`", tail),
+            Ok(SizeSkeleton::Pointer { tail, .. }) => format!("pointer to `{tail}`"),
             Err(LayoutError::Unknown(bad)) => {
                 if bad == ty {
                     "this type does not have a fixed size".to_owned()
                 } else {
-                    format!("size can vary because of {}", bad)
+                    format!("size can vary because of {bad}")
                 }
             }
             Err(err) => err.to_string(),
@@ -113,7 +111,7 @@ impl<'tcx> ExprVisitor<'tcx> {
                                         or dependently-sized types"
         );
         if from == to {
-            err.note(&format!("`{}` does not have a fixed size", from));
+            err.note(&format!("`{from}` does not have a fixed size"));
         } else {
             err.note(&format!("source type: `{}` ({})", from, skeleton_string(from, sk_from)))
                 .note(&format!("target type: `{}` ({})", to, skeleton_string(to, sk_to)));
@@ -201,7 +199,7 @@ impl<'tcx> ExprVisitor<'tcx> {
             _ => None,
         };
         let Some(asm_ty) = asm_ty else {
-            let msg = &format!("cannot use value of type `{}` for inline assembly", ty);
+            let msg = &format!("cannot use value of type `{ty}` for inline assembly");
             let mut err = self.tcx.sess.struct_span_err(expr.span, msg);
             err.note(
                 "only integers, floats, SIMD vectors, pointers and function pointers \
@@ -216,7 +214,7 @@ impl<'tcx> ExprVisitor<'tcx> {
         if !ty.is_copy_modulo_regions(self.tcx.at(DUMMY_SP), self.param_env) {
             let msg = "arguments for inline assembly must be copyable";
             let mut err = self.tcx.sess.struct_span_err(expr.span, msg);
-            err.note(&format!("`{}` does not implement the Copy trait", ty));
+            err.note(&format!("`{ty}` does not implement the Copy trait"));
             err.emit();
         }
 
@@ -237,7 +235,7 @@ impl<'tcx> ExprVisitor<'tcx> {
                     in_expr.span,
                     &format!("type `{}`", self.typeck_results.expr_ty_adjusted(in_expr)),
                 );
-                err.span_label(expr.span, &format!("type `{}`", ty));
+                err.span_label(expr.span, &format!("type `{ty}`"));
                 err.note(
                     "asm inout arguments must have the same type, \
                     unless they are both pointers or integers of the same size",
@@ -256,7 +254,7 @@ impl<'tcx> ExprVisitor<'tcx> {
         let reg_class = reg.reg_class();
         let supported_tys = reg_class.supported_types(asm_arch);
         let Some((_, feature)) = supported_tys.iter().find(|&&(t, _)| t == asm_ty) else {
-            let msg = &format!("type `{}` cannot be used with this register class", ty);
+            let msg = &format!("type `{ty}` cannot be used with this register class");
             let mut err = self.tcx.sess.struct_span_err(expr.span, msg);
             let supported_tys: Vec<_> =
                 supported_tys.iter().map(|(t, _)| t.to_string()).collect();
@@ -326,12 +324,10 @@ impl<'tcx> ExprVisitor<'tcx> {
                         let mut err = lint.build(msg);
                         err.span_label(expr.span, "for this argument");
                         err.help(&format!(
-                            "use the `{}` modifier to have the register formatted as `{}`",
-                            suggested_modifier, suggested_result,
+                            "use the `{suggested_modifier}` modifier to have the register formatted as `{suggested_result}`",
                         ));
                         err.help(&format!(
-                            "or use the `{}` modifier to keep the default formatting of `{}`",
-                            default_modifier, default_result,
+                            "or use the `{default_modifier}` modifier to keep the default formatting of `{default_result}`",
                         ));
                         err.emit();
                     },
@@ -509,14 +505,14 @@ impl<'tcx> Visitor<'tcx> for ExprVisitor<'tcx> {
         match expr.kind {
             hir::ExprKind::Path(ref qpath) => {
                 let res = self.typeck_results.qpath_res(qpath, expr.hir_id);
-                if let Res::Def(DefKind::Fn, did) = res {
-                    if self.def_id_is_transmute(did) {
-                        let typ = self.typeck_results.node_type(expr.hir_id);
-                        let sig = typ.fn_sig(self.tcx);
-                        let from = sig.inputs().skip_binder()[0];
-                        let to = sig.output().skip_binder();
-                        self.check_transmute(expr.span, from, to);
-                    }
+                if let Res::Def(DefKind::Fn, did) = res
+                    && self.def_id_is_transmute(did)
+                {
+                    let typ = self.typeck_results.node_type(expr.hir_id);
+                    let sig = typ.fn_sig(self.tcx);
+                    let from = sig.inputs().skip_binder()[0];
+                    let to = sig.output().skip_binder();
+                    self.check_transmute(expr.span, from, to);
                 }
             }
 
diff --git a/compiler/rustc_resolve/src/late/lifetimes.rs b/compiler/rustc_resolve/src/late/lifetimes.rs
index ecdbe12c9ab..bcae735bb7e 100644
--- a/compiler/rustc_resolve/src/late/lifetimes.rs
+++ b/compiler/rustc_resolve/src/late/lifetimes.rs
@@ -1,3 +1,4 @@
+// ignore-tidy-filelength
 //! Name resolution for lifetimes.
 //!
 //! Name resolution for lifetimes follows *much* simpler rules than the
@@ -230,6 +231,10 @@ enum Scope<'a> {
         hir_id: hir::HirId,
 
         s: ScopeRef<'a>,
+
+        /// In some cases not allowing late bounds allows us to avoid ICEs.
+        /// This is almost ways set to true.
+        allow_late_bound: bool,
     },
 
     /// Lifetimes introduced by a fn are scoped to the call-site for that fn,
@@ -302,6 +307,7 @@ impl<'a> fmt::Debug for TruncatedScopeDebug<'a> {
                 scope_type,
                 hir_id,
                 s: _,
+                allow_late_bound,
             } => f
                 .debug_struct("Binder")
                 .field("lifetimes", lifetimes)
@@ -311,6 +317,7 @@ impl<'a> fmt::Debug for TruncatedScopeDebug<'a> {
                 .field("scope_type", scope_type)
                 .field("hir_id", hir_id)
                 .field("s", &"..")
+                .field("allow_late_bound", allow_late_bound)
                 .finish(),
             Scope::Body { id, s: _ } => {
                 f.debug_struct("Body").field("id", id).field("s", &"..").finish()
@@ -703,6 +710,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
                     track_lifetime_uses: true,
                     opaque_type_parent: false,
                     scope_type: BinderScopeType::Normal,
+                    allow_late_bound: true,
                 };
                 self.with(scope, move |_old_scope, this| {
                     intravisit::walk_fn(this, fk, fd, b, s, hir_id)
@@ -828,6 +836,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
                     track_lifetime_uses,
                     scope_type: BinderScopeType::Normal,
                     s: ROOT_SCOPE,
+                    allow_late_bound: false,
                 };
                 self.with(scope, |old_scope, this| {
                     this.check_lifetime_params(old_scope, &generics.params);
@@ -896,6 +905,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
                     track_lifetime_uses: true,
                     opaque_type_parent: false,
                     scope_type: BinderScopeType::Normal,
+                    allow_late_bound: true,
                 };
                 self.with(scope, |old_scope, this| {
                     // a bare fn has no bounds, so everything
@@ -1077,6 +1087,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
                             track_lifetime_uses: true,
                             opaque_type_parent: false,
                             scope_type: BinderScopeType::Normal,
+                            allow_late_bound: false,
                         };
                         this.with(scope, |_old_scope, this| {
                             this.visit_generics(generics);
@@ -1097,6 +1108,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
                         track_lifetime_uses: true,
                         opaque_type_parent: false,
                         scope_type: BinderScopeType::Normal,
+                        allow_late_bound: false,
                     };
                     self.with(scope, |_old_scope, this| {
                         let scope = Scope::TraitRefBoundary { s: this.scope };
@@ -1156,6 +1168,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
                     track_lifetime_uses: true,
                     opaque_type_parent: true,
                     scope_type: BinderScopeType::Normal,
+                    allow_late_bound: false,
                 };
                 self.with(scope, |old_scope, this| {
                     this.check_lifetime_params(old_scope, &generics.params);
@@ -1225,6 +1238,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
                     track_lifetime_uses: true,
                     opaque_type_parent: true,
                     scope_type: BinderScopeType::Normal,
+                    allow_late_bound: true,
                 };
                 self.with(scope, |old_scope, this| {
                     this.check_lifetime_params(old_scope, &generics.params);
@@ -1378,6 +1392,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
                             track_lifetime_uses: true,
                             opaque_type_parent: false,
                             scope_type: BinderScopeType::Normal,
+                            allow_late_bound: true,
                         };
                         this.with(scope, |old_scope, this| {
                             this.check_lifetime_params(old_scope, &bound_generic_params);
@@ -1425,6 +1440,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
                     track_lifetime_uses: true,
                     opaque_type_parent: false,
                     scope_type,
+                    allow_late_bound: true,
                 };
                 self.with(scope, |_, this| {
                     intravisit::walk_param_bound(this, bound);
@@ -1477,6 +1493,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
             track_lifetime_uses: true,
             opaque_type_parent: false,
             scope_type,
+            allow_late_bound: true,
         };
         self.with(scope, |old_scope, this| {
             this.check_lifetime_params(old_scope, &trait_ref.bound_generic_params);
@@ -2180,6 +2197,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
             opaque_type_parent: true,
             track_lifetime_uses: false,
             scope_type: BinderScopeType::Normal,
+            allow_late_bound: true,
         };
         self.with(scope, move |old_scope, this| {
             this.check_lifetime_params(old_scope, &generics.params);
@@ -2602,7 +2620,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
         let mut scope = &*self.scope;
         let hir_id = loop {
             match scope {
-                Scope::Binder { hir_id, .. } => {
+                Scope::Binder { hir_id, allow_late_bound: true, .. } => {
                     break *hir_id;
                 }
                 Scope::ObjectLifetimeDefault { ref s, .. }
@@ -2611,8 +2629,11 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
                 | Scope::TraitRefBoundary { ref s, .. } => {
                     scope = *s;
                 }
-                Scope::Root | Scope::Body { .. } => {
+                Scope::Root
+                | Scope::Body { .. }
+                | Scope::Binder { allow_late_bound: false, .. } => {
                     // See issues #83907 and #83693. Just bail out from looking inside.
+                    // See the issue #95023 for not allowing late bound
                     self.tcx.sess.delay_span_bug(
                         rustc_span::DUMMY_SP,
                         "In fn_like_elision without appropriate scope above",
diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs
index 523a1066a1c..48ca321b737 100644
--- a/compiler/rustc_span/src/symbol.rs
+++ b/compiler/rustc_span/src/symbol.rs
@@ -571,6 +571,7 @@ symbols! {
         deref_target,
         derive,
         derive_default_enum,
+        destruct,
         destructuring_assignment,
         diagnostic,
         direct,
diff --git a/compiler/rustc_target/src/asm/aarch64.rs b/compiler/rustc_target/src/asm/aarch64.rs
index 7fb4dbdf2b1..fba8cc6ef8b 100644
--- a/compiler/rustc_target/src/asm/aarch64.rs
+++ b/compiler/rustc_target/src/asm/aarch64.rs
@@ -64,7 +64,7 @@ impl AArch64InlineAsmRegClass {
         match self {
             Self::reg => types! { _: I8, I16, I32, I64, F32, F64; },
             Self::vreg | Self::vreg_low16 => types! {
-                fp: I8, I16, I32, I64, F32, F64,
+                neon: I8, I16, I32, I64, F32, F64,
                     VecI8(8), VecI16(4), VecI32(2), VecI64(1), VecF32(2), VecF64(1),
                     VecI8(16), VecI16(8), VecI32(4), VecI64(2), VecF32(4), VecF64(2);
             },
diff --git a/compiler/rustc_trait_selection/src/traits/misc.rs b/compiler/rustc_trait_selection/src/traits/misc.rs
index 08308253ced..b83b0bf1ca5 100644
--- a/compiler/rustc_trait_selection/src/traits/misc.rs
+++ b/compiler/rustc_trait_selection/src/traits/misc.rs
@@ -11,7 +11,7 @@ use crate::traits::error_reporting::InferCtxtExt;
 
 #[derive(Clone)]
 pub enum CopyImplementationError<'tcx> {
-    InfrigingFields(Vec<&'tcx ty::FieldDef>),
+    InfrigingFields(Vec<(&'tcx ty::FieldDef, Ty<'tcx>)>),
     NotAnAdt,
     HasDestructor,
 }
@@ -67,7 +67,7 @@ pub fn can_type_implement_copy<'tcx>(
                 match traits::fully_normalize(&infcx, ctx, cause, param_env, ty) {
                     Ok(ty) => {
                         if !infcx.type_is_copy_modulo_regions(param_env, ty, span) {
-                            infringing.push(field);
+                            infringing.push((field, ty));
                         }
                     }
                     Err(errors) => {
diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs
index 614a5e04809..390381752f9 100644
--- a/compiler/rustc_trait_selection/src/traits/project.rs
+++ b/compiler/rustc_trait_selection/src/traits/project.rs
@@ -1569,7 +1569,7 @@ fn assemble_candidates_from_impls<'cx, 'tcx>(
             super::ImplSource::AutoImpl(..)
             | super::ImplSource::Builtin(..)
             | super::ImplSource::TraitUpcasting(_)
-            | super::ImplSource::ConstDrop(_) => {
+            | super::ImplSource::ConstDestruct(_) => {
                 // These traits have no associated types.
                 selcx.tcx().sess.delay_span_bug(
                     obligation.cause.span,
@@ -1644,7 +1644,7 @@ fn confirm_select_candidate<'cx, 'tcx>(
         | super::ImplSource::Builtin(..)
         | super::ImplSource::TraitUpcasting(_)
         | super::ImplSource::TraitAlias(..)
-        | super::ImplSource::ConstDrop(_) => {
+        | super::ImplSource::ConstDestruct(_) => {
             // we don't create Select candidates with this kind of resolution
             span_bug!(
                 obligation.cause.span,
diff --git a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs
index d76bcf6807f..c0a283d2eda 100644
--- a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs
+++ b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs
@@ -5,6 +5,7 @@
 //! candidates. See the [rustc dev guide] for more details.
 //!
 //! [rustc dev guide]:https://rustc-dev-guide.rust-lang.org/traits/resolution.html#candidate-assembly
+use hir::LangItem;
 use rustc_hir as hir;
 use rustc_hir::def_id::DefId;
 use rustc_infer::traits::TraitEngine;
@@ -307,7 +308,16 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
             } else if lang_items.drop_trait() == Some(def_id)
                 && obligation.predicate.is_const_if_const()
             {
-                self.assemble_const_drop_candidates(obligation, &mut candidates);
+                // holds to make it easier to transition
+                // FIXME(fee1-dead): add a note for selection error of `~const Drop`
+                // when beta is bumped
+                // FIXME: remove this when beta is bumped
+                #[cfg(bootstrap)]
+                {}
+
+                candidates.vec.push(SelectionCandidate::ConstDestructCandidate(None))
+            } else if lang_items.destruct_trait() == Some(def_id) {
+                self.assemble_const_destruct_candidates(obligation, &mut candidates);
             } else {
                 if lang_items.clone_trait() == Some(def_id) {
                     // Same builtin conditions as `Copy`, i.e., every type which has builtin support
@@ -906,15 +916,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
         }
     }
 
-    fn assemble_const_drop_candidates(
+    fn assemble_const_destruct_candidates(
         &mut self,
         obligation: &TraitObligation<'tcx>,
         candidates: &mut SelectionCandidateSet<'tcx>,
     ) {
-        // If the predicate is `~const Drop` in a non-const environment, we don't actually need
+        // If the predicate is `~const Destruct` in a non-const environment, we don't actually need
         // to check anything. We'll short-circuit checking any obligations in confirmation, too.
-        if obligation.param_env.constness() == hir::Constness::NotConst {
-            candidates.vec.push(ConstDropCandidate(None));
+        if !obligation.is_const() {
+            candidates.vec.push(ConstDestructCandidate(None));
             return;
         }
 
@@ -927,7 +937,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
             | ty::Param(_)
             | ty::Placeholder(_)
             | ty::Projection(_) => {
-                // We don't know if these are `~const Drop`, at least
+                // We don't know if these are `~const Destruct`, at least
                 // not structurally... so don't push a candidate.
             }
 
@@ -951,14 +961,14 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
             | ty::Generator(..)
             | ty::Tuple(_)
             | ty::GeneratorWitness(_) => {
-                // These are built-in, and cannot have a custom `impl const Drop`.
-                candidates.vec.push(ConstDropCandidate(None));
+                // These are built-in, and cannot have a custom `impl const Destruct`.
+                candidates.vec.push(ConstDestructCandidate(None));
             }
 
             ty::Adt(..) => {
                 // Find a custom `impl Drop` impl, if it exists
                 let relevant_impl = self.tcx().find_map_relevant_impl(
-                    obligation.predicate.def_id(),
+                    self.tcx().require_lang_item(LangItem::Drop, None),
                     obligation.predicate.skip_binder().trait_ref.self_ty(),
                     Some,
                 );
@@ -966,11 +976,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                 if let Some(impl_def_id) = relevant_impl {
                     // Check that `impl Drop` is actually const, if there is a custom impl
                     if self.tcx().impl_constness(impl_def_id) == hir::Constness::Const {
-                        candidates.vec.push(ConstDropCandidate(Some(impl_def_id)));
+                        candidates.vec.push(ConstDestructCandidate(Some(impl_def_id)));
                     }
                 } else {
                     // Otherwise check the ADT like a built-in type (structurally)
-                    candidates.vec.push(ConstDropCandidate(None));
+                    candidates.vec.push(ConstDestructCandidate(None));
                 }
             }
 
diff --git a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs
index 05479899f3a..df760a166e8 100644
--- a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs
+++ b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs
@@ -8,7 +8,6 @@
 //! https://rustc-dev-guide.rust-lang.org/traits/resolution.html#confirmation
 use rustc_data_structures::stack::ensure_sufficient_stack;
 use rustc_hir::lang_items::LangItem;
-use rustc_hir::Constness;
 use rustc_index::bit_set::GrowableBitSet;
 use rustc_infer::infer::InferOk;
 use rustc_infer::infer::LateBoundRegionConversionTime::HigherRankedType;
@@ -29,9 +28,9 @@ use crate::traits::TraitNotObjectSafe;
 use crate::traits::VtblSegment;
 use crate::traits::{BuiltinDerivedObligation, ImplDerivedObligation};
 use crate::traits::{
-    ImplSourceAutoImplData, ImplSourceBuiltinData, ImplSourceClosureData, ImplSourceConstDropData,
-    ImplSourceDiscriminantKindData, ImplSourceFnPointerData, ImplSourceGeneratorData,
-    ImplSourceObjectData, ImplSourcePointeeData, ImplSourceTraitAliasData,
+    ImplSourceAutoImplData, ImplSourceBuiltinData, ImplSourceClosureData,
+    ImplSourceConstDestructData, ImplSourceDiscriminantKindData, ImplSourceFnPointerData,
+    ImplSourceGeneratorData, ImplSourceObjectData, ImplSourcePointeeData, ImplSourceTraitAliasData,
     ImplSourceTraitUpcastingData, ImplSourceUserDefinedData,
 };
 use crate::traits::{ObjectCastObligation, PredicateObligation, TraitObligation};
@@ -156,9 +155,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                 Ok(ImplSource::TraitUpcasting(data))
             }
 
-            ConstDropCandidate(def_id) => {
-                let data = self.confirm_const_drop_candidate(obligation, def_id)?;
-                Ok(ImplSource::ConstDrop(data))
+            ConstDestructCandidate(def_id) => {
+                let data = self.confirm_const_destruct_candidate(obligation, def_id)?;
+                Ok(ImplSource::ConstDestruct(data))
             }
         }
     }
@@ -1037,14 +1036,23 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
         Ok(ImplSourceBuiltinData { nested })
     }
 
-    fn confirm_const_drop_candidate(
+    fn confirm_const_destruct_candidate(
         &mut self,
         obligation: &TraitObligation<'tcx>,
         impl_def_id: Option<DefId>,
-    ) -> Result<ImplSourceConstDropData<PredicateObligation<'tcx>>, SelectionError<'tcx>> {
-        // `~const Drop` in a non-const environment is always trivially true, since our type is `Drop`
-        if obligation.param_env.constness() == Constness::NotConst {
-            return Ok(ImplSourceConstDropData { nested: vec![] });
+    ) -> Result<ImplSourceConstDestructData<PredicateObligation<'tcx>>, SelectionError<'tcx>> {
+        // `~const Destruct` in a non-const environment is always trivially true, since our type is `Drop`
+        if !obligation.is_const() {
+            return Ok(ImplSourceConstDestructData { nested: vec![] });
+        }
+
+        let drop_trait = self.tcx().require_lang_item(LangItem::Drop, None);
+        // FIXME: remove if statement below when beta is bumped
+        #[cfg(bootstrap)]
+        {}
+
+        if obligation.predicate.skip_binder().def_id() == drop_trait {
+            return Ok(ImplSourceConstDestructData { nested: vec![] });
         }
 
         let tcx = self.tcx();
@@ -1054,9 +1062,29 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
         let cause = obligation.derived_cause(BuiltinDerivedObligation);
 
         // If we have a custom `impl const Drop`, then
-        // first check it like a regular impl candidate
+        // first check it like a regular impl candidate.
+        // This is copied from confirm_impl_candidate but remaps the predicate to `~const Drop` beforehand.
         if let Some(impl_def_id) = impl_def_id {
-            nested.extend(self.confirm_impl_candidate(obligation, impl_def_id).nested);
+            let obligations = self.infcx.commit_unconditionally(|_| {
+                let mut new_obligation = obligation.clone();
+                new_obligation.predicate = new_obligation.predicate.map_bound(|mut trait_pred| {
+                    trait_pred.trait_ref.def_id = drop_trait;
+                    trait_pred
+                });
+                let substs = self.rematch_impl(impl_def_id, &new_obligation);
+                debug!(?substs, "impl substs");
+                let cause = obligation.derived_cause(ImplDerivedObligation);
+                ensure_sufficient_stack(|| {
+                    self.vtable_impl(
+                        impl_def_id,
+                        substs,
+                        cause,
+                        new_obligation.recursion_depth + 1,
+                        new_obligation.param_env,
+                    )
+                })
+            });
+            nested.extend(obligations.nested);
         }
 
         // We want to confirm the ADT's fields if we have an ADT
@@ -1114,7 +1142,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                             self_ty
                                 .rebind(ty::TraitPredicate {
                                     trait_ref: ty::TraitRef {
-                                        def_id: self.tcx().require_lang_item(LangItem::Drop, None),
+                                        def_id: self
+                                            .tcx()
+                                            .require_lang_item(LangItem::Destruct, None),
                                         substs: self.tcx().mk_substs_trait(nested_ty, &[]),
                                     },
                                     constness: ty::BoundConstness::ConstIfConst,
@@ -1140,7 +1170,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                     let predicate = self_ty
                         .rebind(ty::TraitPredicate {
                             trait_ref: ty::TraitRef {
-                                def_id: self.tcx().require_lang_item(LangItem::Drop, None),
+                                def_id: self.tcx().require_lang_item(LangItem::Destruct, None),
                                 substs: self.tcx().mk_substs_trait(nested_ty, &[]),
                             },
                             constness: ty::BoundConstness::ConstIfConst,
@@ -1158,6 +1188,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
             }
         }
 
-        Ok(ImplSourceConstDropData { nested })
+        Ok(ImplSourceConstDestructData { nested })
     }
 }
diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs
index a1a8497859d..4135fbca060 100644
--- a/compiler/rustc_trait_selection/src/traits/select/mod.rs
+++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs
@@ -1179,7 +1179,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                     GeneratorCandidate => {}
                     // FnDef where the function is const
                     FnPointerCandidate { is_const: true } => {}
-                    ConstDropCandidate(_) => {}
+                    ConstDestructCandidate(_) => {}
                     _ => {
                         // reject all other types of candidates
                         continue;
@@ -1270,7 +1270,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
         // the master cache. Since coherence executes pretty quickly,
         // it's not worth going to more trouble to increase the
         // hit-rate, I don't think.
-        if self.intercrate {
+        if self.intercrate || self.allow_negative_impls {
             return false;
         }
 
@@ -1287,7 +1287,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
         // mode, so don't do any caching. In particular, we might
         // re-use the same `InferCtxt` with both an intercrate
         // and non-intercrate `SelectionContext`
-        if self.intercrate {
+        if self.intercrate || self.allow_negative_impls {
             return None;
         }
         let tcx = self.tcx();
@@ -1589,7 +1589,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
         };
 
         // (*) Prefer `BuiltinCandidate { has_nested: false }`, `PointeeCandidate`,
-        // `DiscriminantKindCandidate`, and `ConstDropCandidate` to anything else.
+        // `DiscriminantKindCandidate`, and `ConstDestructCandidate` to anything else.
         //
         // This is a fix for #53123 and prevents winnowing from accidentally extending the
         // lifetime of a variable.
@@ -1606,7 +1606,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                 BuiltinCandidate { has_nested: false }
                 | DiscriminantKindCandidate
                 | PointeeCandidate
-                | ConstDropCandidate(_),
+                | ConstDestructCandidate(_),
                 _,
             ) => true,
             (
@@ -1614,7 +1614,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                 BuiltinCandidate { has_nested: false }
                 | DiscriminantKindCandidate
                 | PointeeCandidate
-                | ConstDropCandidate(_),
+                | ConstDestructCandidate(_),
             ) => false,
 
             (ParamCandidate(other), ParamCandidate(victim)) => {
diff --git a/compiler/rustc_ty_utils/src/instance.rs b/compiler/rustc_ty_utils/src/instance.rs
index 295a91959eb..802a59abe5f 100644
--- a/compiler/rustc_ty_utils/src/instance.rs
+++ b/compiler/rustc_ty_utils/src/instance.rs
@@ -378,7 +378,7 @@ fn resolve_associated_item<'tcx>(
         | traits::ImplSource::DiscriminantKind(..)
         | traits::ImplSource::Pointee(..)
         | traits::ImplSource::TraitUpcasting(_)
-        | traits::ImplSource::ConstDrop(_) => None,
+        | traits::ImplSource::ConstDestruct(_) => None,
     })
 }
 
diff --git a/compiler/rustc_typeck/src/check/op.rs b/compiler/rustc_typeck/src/check/op.rs
index af154e62a1e..e0dbe027aef 100644
--- a/compiler/rustc_typeck/src/check/op.rs
+++ b/compiler/rustc_typeck/src/check/op.rs
@@ -672,6 +672,27 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                         ex.span,
                         format!("cannot apply unary operator `{}`", op.as_str()),
                     );
+                    let missing_trait = match op {
+                        hir::UnOp::Deref => unreachable!("check unary op `-` or `!` only"),
+                        hir::UnOp::Not => "std::ops::Not",
+                        hir::UnOp::Neg => "std::ops::Neg",
+                    };
+                    let mut visitor = TypeParamVisitor(vec![]);
+                    visitor.visit_ty(operand_ty);
+                    if let [ty] = &visitor.0[..] {
+                        if let ty::Param(p) = *operand_ty.kind() {
+                            suggest_constraining_param(
+                                self.tcx,
+                                self.body_id,
+                                &mut err,
+                                *ty,
+                                operand_ty,
+                                missing_trait,
+                                p,
+                                true,
+                            );
+                        }
+                    }
 
                     let sp = self.tcx.sess.source_map().start_point(ex.span);
                     if let Some(sp) =
diff --git a/compiler/rustc_typeck/src/check/pat.rs b/compiler/rustc_typeck/src/check/pat.rs
index c16be38d5fc..1c4fbbbb9bf 100644
--- a/compiler/rustc_typeck/src/check/pat.rs
+++ b/compiler/rustc_typeck/src/check/pat.rs
@@ -17,7 +17,7 @@ use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS;
 use rustc_span::hygiene::DesugaringKind;
 use rustc_span::lev_distance::find_best_match_for_name;
 use rustc_span::source_map::{Span, Spanned};
-use rustc_span::symbol::{sym, Ident};
+use rustc_span::symbol::{kw, sym, Ident};
 use rustc_span::{BytePos, MultiSpan, DUMMY_SP};
 use rustc_trait_selection::autoderef::Autoderef;
 use rustc_trait_selection::traits::{ObligationCause, Pattern};
@@ -1275,7 +1275,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             .filter(|(_, ident)| !used_fields.contains_key(ident))
             .collect::<Vec<_>>();
 
-        let inexistent_fields_err = if !(inexistent_fields.is_empty() || variant.is_recovered()) {
+        let inexistent_fields_err = if !(inexistent_fields.is_empty() || variant.is_recovered())
+            && !inexistent_fields.iter().any(|field| field.name == kw::Underscore)
+        {
             Some(self.error_inexistent_fields(
                 adt.variant_descr(),
                 &inexistent_fields,
diff --git a/compiler/rustc_typeck/src/coherence/builtin.rs b/compiler/rustc_typeck/src/coherence/builtin.rs
index ef59df0dc88..3135e9996ab 100644
--- a/compiler/rustc_typeck/src/coherence/builtin.rs
+++ b/compiler/rustc_typeck/src/coherence/builtin.rs
@@ -91,8 +91,40 @@ fn visit_implementation_of_copy(tcx: TyCtxt<'_>, impl_did: LocalDefId) {
                 E0204,
                 "the trait `Copy` may not be implemented for this type"
             );
-            for span in fields.iter().map(|f| tcx.def_span(f.did)) {
-                err.span_label(span, "this field does not implement `Copy`");
+            for (field, ty) in fields {
+                let field_span = tcx.def_span(field.did);
+                err.span_label(field_span, "this field does not implement `Copy`");
+                // Spin up a new FulfillmentContext, so we can get the _precise_ reason
+                // why this field does not implement Copy. This is useful because sometimes
+                // it is not immediately clear why Copy is not implemented for a field, since
+                // all we point at is the field itself.
+                tcx.infer_ctxt().enter(|infcx| {
+                    let mut fulfill_cx = traits::FulfillmentContext::new_ignoring_regions();
+                    fulfill_cx.register_bound(
+                        &infcx,
+                        param_env,
+                        ty,
+                        tcx.lang_items().copy_trait().unwrap(),
+                        traits::ObligationCause::dummy_with_span(field_span),
+                    );
+                    for error in fulfill_cx.select_all_or_error(&infcx) {
+                        let error_predicate = error.obligation.predicate;
+                        // Only note if it's not the root obligation, otherwise it's trivial and
+                        // should be self-explanatory (i.e. a field literally doesn't implement Copy).
+
+                        // FIXME: This error could be more descriptive, especially if the error_predicate
+                        // contains a foreign type or if it's a deeply nested type...
+                        if error_predicate != error.root_obligation.predicate {
+                            err.span_note(
+                                error.obligation.cause.span,
+                                &format!(
+                                    "the `Copy` impl for `{}` requires that `{}`",
+                                    ty, error_predicate
+                                ),
+                            );
+                        }
+                    }
+                });
             }
             err.emit();
         }