about summary refs log tree commit diff
path: root/compiler
diff options
context:
space:
mode:
Diffstat (limited to 'compiler')
-rw-r--r--compiler/rustc_ast/src/ast_traits.rs4
-rw-r--r--compiler/rustc_ast/src/lib.rs1
-rw-r--r--compiler/rustc_ast/src/mut_visit.rs12
-rw-r--r--compiler/rustc_ast/src/token.rs9
-rw-r--r--compiler/rustc_ast/src/tokenstream.rs8
-rw-r--r--compiler/rustc_ast_lowering/src/lib.rs1
-rw-r--r--compiler/rustc_attr_parsing/src/lib.rs1
-rw-r--r--compiler/rustc_borrowck/src/lib.rs1
-rw-r--r--compiler/rustc_builtin_macros/src/cfg_eval.rs5
-rw-r--r--compiler/rustc_builtin_macros/src/lib.rs1
-rw-r--r--compiler/rustc_codegen_cranelift/example/mini_core.rs16
-rw-r--r--compiler/rustc_codegen_gcc/example/mini_core.rs16
-rw-r--r--compiler/rustc_codegen_ssa/src/lib.rs1
-rw-r--r--compiler/rustc_const_eval/src/lib.rs1
-rw-r--r--compiler/rustc_data_structures/Cargo.toml5
-rw-r--r--compiler/rustc_data_structures/src/lib.rs1
-rw-r--r--compiler/rustc_data_structures/src/marker.rs2
-rw-r--r--compiler/rustc_data_structures/src/sharded.rs95
-rw-r--r--compiler/rustc_driver/src/lib.rs1
-rw-r--r--compiler/rustc_driver_impl/src/lib.rs1
-rw-r--r--compiler/rustc_expand/src/base.rs49
-rw-r--r--compiler/rustc_expand/src/mbe/transcribe.rs14
-rw-r--r--compiler/rustc_expand/src/proc_macro.rs2
-rw-r--r--compiler/rustc_expand/src/proc_macro_server.rs49
-rw-r--r--compiler/rustc_hir/src/lib.rs1
-rw-r--r--compiler/rustc_hir_analysis/src/lib.rs1
-rw-r--r--compiler/rustc_hir_typeck/src/lib.rs1
-rw-r--r--compiler/rustc_incremental/src/lib.rs1
-rw-r--r--compiler/rustc_lint/src/lib.rs1
-rw-r--r--compiler/rustc_metadata/src/lib.rs1
-rw-r--r--compiler/rustc_middle/src/lib.rs1
-rw-r--r--compiler/rustc_middle/src/mir/interpret/mod.rs20
-rw-r--r--compiler/rustc_middle/src/ty/context.rs4
-rw-r--r--compiler/rustc_mir_build/src/lib.rs1
-rw-r--r--compiler/rustc_mir_dataflow/src/lib.rs1
-rw-r--r--compiler/rustc_mir_transform/src/add_call_guards.rs9
-rw-r--r--compiler/rustc_mir_transform/src/lib.rs1
-rw-r--r--compiler/rustc_monomorphize/src/lib.rs1
-rw-r--r--compiler/rustc_monomorphize/src/partitioning/autodiff.rs16
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs51
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/inspect/build.rs2
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/mod.rs4
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/project_goals.rs3
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/search_graph.rs30
-rw-r--r--compiler/rustc_parse/src/lib.rs1
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs3
-rw-r--r--compiler/rustc_parse/src/parser/item.rs13
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs12
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs12
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs60
-rw-r--r--compiler/rustc_passes/src/lib.rs1
-rw-r--r--compiler/rustc_privacy/src/lib.rs1
-rw-r--r--compiler/rustc_query_impl/src/lib.rs1
-rw-r--r--compiler/rustc_query_system/src/dep_graph/graph.rs31
-rw-r--r--compiler/rustc_query_system/src/lib.rs1
-rw-r--r--compiler/rustc_query_system/src/query/caches.rs14
-rw-r--r--compiler/rustc_sanitizers/src/lib.rs1
-rw-r--r--compiler/rustc_smir/src/lib.rs1
-rw-r--r--compiler/rustc_symbol_mangling/src/lib.rs1
-rw-r--r--compiler/rustc_trait_selection/src/solve/fulfill/derive_errors.rs2
-rw-r--r--compiler/rustc_transmute/src/lib.rs1
-rw-r--r--compiler/rustc_ty_utils/src/lib.rs1
-rw-r--r--compiler/rustc_type_ir/src/search_graph/mod.rs165
-rw-r--r--compiler/rustc_type_ir/src/solve/mod.rs19
64 files changed, 510 insertions, 277 deletions
diff --git a/compiler/rustc_ast/src/ast_traits.rs b/compiler/rustc_ast/src/ast_traits.rs
index a37c0f92b12..849cc650e9d 100644
--- a/compiler/rustc_ast/src/ast_traits.rs
+++ b/compiler/rustc_ast/src/ast_traits.rs
@@ -209,16 +209,12 @@ impl HasTokens for Attribute {
 impl HasTokens for Nonterminal {
     fn tokens(&self) -> Option<&LazyAttrTokenStream> {
         match self {
-            Nonterminal::NtItem(item) => item.tokens(),
-            Nonterminal::NtStmt(stmt) => stmt.tokens(),
             Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.tokens(),
             Nonterminal::NtBlock(block) => block.tokens(),
         }
     }
     fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
         match self {
-            Nonterminal::NtItem(item) => item.tokens_mut(),
-            Nonterminal::NtStmt(stmt) => stmt.tokens_mut(),
             Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.tokens_mut(),
             Nonterminal::NtBlock(block) => block.tokens_mut(),
         }
diff --git a/compiler/rustc_ast/src/lib.rs b/compiler/rustc_ast/src/lib.rs
index 294c6c9ba7a..da510e4967d 100644
--- a/compiler/rustc_ast/src/lib.rs
+++ b/compiler/rustc_ast/src/lib.rs
@@ -6,6 +6,7 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(
     html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
     test(attr(deny(warnings)))
diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs
index fc31912283a..4a1636e6aec 100644
--- a/compiler/rustc_ast/src/mut_visit.rs
+++ b/compiler/rustc_ast/src/mut_visit.rs
@@ -895,19 +895,7 @@ pub fn visit_token<T: MutVisitor>(vis: &mut T, t: &mut Token) {
 // multiple items there....
 fn visit_nonterminal<T: MutVisitor>(vis: &mut T, nt: &mut token::Nonterminal) {
     match nt {
-        token::NtItem(item) => visit_clobber(item, |item| {
-            // This is probably okay, because the only visitors likely to
-            // peek inside interpolated nodes will be renamings/markings,
-            // which map single items to single items.
-            vis.flat_map_item(item).expect_one("expected visitor to produce exactly one item")
-        }),
         token::NtBlock(block) => vis.visit_block(block),
-        token::NtStmt(stmt) => visit_clobber(stmt, |stmt| {
-            // See reasoning above.
-            stmt.map(|stmt| {
-                vis.flat_map_stmt(stmt).expect_one("expected visitor to produce exactly one item")
-            })
-        }),
         token::NtExpr(expr) => vis.visit_expr(expr),
         token::NtLiteral(expr) => vis.visit_expr(expr),
     }
diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs
index ff1edad725a..f7cd63aaaf8 100644
--- a/compiler/rustc_ast/src/token.rs
+++ b/compiler/rustc_ast/src/token.rs
@@ -870,6 +870,7 @@ impl Token {
     /// Is this a pre-parsed expression dropped into the token stream
     /// (which happens while parsing the result of macro expansion)?
     pub fn is_whole_expr(&self) -> bool {
+        #[allow(irrefutable_let_patterns)] // FIXME: temporary
         if let Interpolated(nt) = &self.kind
             && let NtExpr(_) | NtLiteral(_) | NtBlock(_) = &**nt
         {
@@ -1103,9 +1104,7 @@ pub enum NtExprKind {
 #[derive(Clone, Encodable, Decodable)]
 /// For interpolation during macro expansion.
 pub enum Nonterminal {
-    NtItem(P<ast::Item>),
     NtBlock(P<ast::Block>),
-    NtStmt(P<ast::Stmt>),
     NtExpr(P<ast::Expr>),
     NtLiteral(P<ast::Expr>),
 }
@@ -1196,18 +1195,14 @@ impl fmt::Display for NonterminalKind {
 impl Nonterminal {
     pub fn use_span(&self) -> Span {
         match self {
-            NtItem(item) => item.span,
             NtBlock(block) => block.span,
-            NtStmt(stmt) => stmt.span,
             NtExpr(expr) | NtLiteral(expr) => expr.span,
         }
     }
 
     pub fn descr(&self) -> &'static str {
         match self {
-            NtItem(..) => "item",
             NtBlock(..) => "block",
-            NtStmt(..) => "statement",
             NtExpr(..) => "expression",
             NtLiteral(..) => "literal",
         }
@@ -1227,9 +1222,7 @@ impl PartialEq for Nonterminal {
 impl fmt::Debug for Nonterminal {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match *self {
-            NtItem(..) => f.pad("NtItem(..)"),
             NtBlock(..) => f.pad("NtBlock(..)"),
-            NtStmt(..) => f.pad("NtStmt(..)"),
             NtExpr(..) => f.pad("NtExpr(..)"),
             NtLiteral(..) => f.pad("NtLiteral(..)"),
         }
diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs
index b9b20cf9376..bdd244be6d1 100644
--- a/compiler/rustc_ast/src/tokenstream.rs
+++ b/compiler/rustc_ast/src/tokenstream.rs
@@ -23,7 +23,7 @@ use rustc_macros::{Decodable, Encodable, HashStable_Generic};
 use rustc_serialize::{Decodable, Encodable};
 use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym};
 
-use crate::ast::{AttrStyle, StmtKind};
+use crate::ast::AttrStyle;
 use crate::ast_traits::{HasAttrs, HasTokens};
 use crate::token::{self, Delimiter, InvisibleOrigin, Nonterminal, Token, TokenKind};
 use crate::{AttrVec, Attribute};
@@ -461,13 +461,7 @@ impl TokenStream {
 
     pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {
         match nt {
-            Nonterminal::NtItem(item) => TokenStream::from_ast(item),
             Nonterminal::NtBlock(block) => TokenStream::from_ast(block),
-            Nonterminal::NtStmt(stmt) if let StmtKind::Empty = stmt.kind => {
-                // FIXME: Properly collect tokens for empty statements.
-                TokenStream::token_alone(token::Semi, stmt.span)
-            }
-            Nonterminal::NtStmt(stmt) => TokenStream::from_ast(stmt),
             Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => TokenStream::from_ast(expr),
         }
     }
diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs
index e5569458dbd..c7d37e2704d 100644
--- a/compiler/rustc_ast_lowering/src/lib.rs
+++ b/compiler/rustc_ast_lowering/src/lib.rs
@@ -32,6 +32,7 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(rust_logo)]
 #![feature(assert_matches)]
 #![feature(box_patterns)]
diff --git a/compiler/rustc_attr_parsing/src/lib.rs b/compiler/rustc_attr_parsing/src/lib.rs
index 249e71ef70d..a7465847e18 100644
--- a/compiler/rustc_attr_parsing/src/lib.rs
+++ b/compiler/rustc_attr_parsing/src/lib.rs
@@ -77,6 +77,7 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(rust_logo)]
 #![feature(let_chains)]
 #![feature(rustdoc_internals)]
diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs
index 4d3774682ce..7ae7936a2a2 100644
--- a/compiler/rustc_borrowck/src/lib.rs
+++ b/compiler/rustc_borrowck/src/lib.rs
@@ -2,6 +2,7 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(rust_logo)]
 #![feature(assert_matches)]
 #![feature(box_patterns)]
diff --git a/compiler/rustc_builtin_macros/src/cfg_eval.rs b/compiler/rustc_builtin_macros/src/cfg_eval.rs
index 53c61831b42..5788966b6e7 100644
--- a/compiler/rustc_builtin_macros/src/cfg_eval.rs
+++ b/compiler/rustc_builtin_macros/src/cfg_eval.rs
@@ -140,8 +140,9 @@ impl CfgEval<'_> {
                     Annotatable::ForeignItem(self.flat_map_foreign_item(item).pop().unwrap())
                 }
                 Annotatable::Stmt(_) => {
-                    let stmt =
-                        parser.parse_stmt_without_recovery(false, ForceCollect::Yes)?.unwrap();
+                    let stmt = parser
+                        .parse_stmt_without_recovery(false, ForceCollect::Yes, false)?
+                        .unwrap();
                     Annotatable::Stmt(P(self.flat_map_stmt(stmt).pop().unwrap()))
                 }
                 Annotatable::Expr(_) => {
diff --git a/compiler/rustc_builtin_macros/src/lib.rs b/compiler/rustc_builtin_macros/src/lib.rs
index 1defd3867a0..dab3053d8b0 100644
--- a/compiler/rustc_builtin_macros/src/lib.rs
+++ b/compiler/rustc_builtin_macros/src/lib.rs
@@ -5,6 +5,7 @@
 #![allow(internal_features)]
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::untranslatable_diagnostic)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(assert_matches)]
diff --git a/compiler/rustc_codegen_cranelift/example/mini_core.rs b/compiler/rustc_codegen_cranelift/example/mini_core.rs
index 72c9df59d83..6e345b2a6fd 100644
--- a/compiler/rustc_codegen_cranelift/example/mini_core.rs
+++ b/compiler/rustc_codegen_cranelift/example/mini_core.rs
@@ -624,25 +624,25 @@ pub mod intrinsics {
     #[rustc_intrinsic]
     pub fn size_of<T>() -> usize;
     #[rustc_intrinsic]
-    pub unsafe fn size_of_val<T: ?::Sized>(_val: *const T) -> usize;
+    pub unsafe fn size_of_val<T: ?::Sized>(val: *const T) -> usize;
     #[rustc_intrinsic]
     pub fn min_align_of<T>() -> usize;
     #[rustc_intrinsic]
-    pub unsafe fn min_align_of_val<T: ?::Sized>(_val: *const T) -> usize;
+    pub unsafe fn min_align_of_val<T: ?::Sized>(val: *const T) -> usize;
     #[rustc_intrinsic]
-    pub unsafe fn copy<T>(_src: *const T, _dst: *mut T, _count: usize);
+    pub unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize);
     #[rustc_intrinsic]
-    pub unsafe fn transmute<T, U>(_e: T) -> U;
+    pub unsafe fn transmute<T, U>(e: T) -> U;
     #[rustc_intrinsic]
-    pub unsafe fn ctlz_nonzero<T>(_x: T) -> u32;
+    pub unsafe fn ctlz_nonzero<T>(x: T) -> u32;
     #[rustc_intrinsic]
     pub fn needs_drop<T: ?::Sized>() -> bool;
     #[rustc_intrinsic]
-    pub fn bitreverse<T>(_x: T) -> T;
+    pub fn bitreverse<T>(x: T) -> T;
     #[rustc_intrinsic]
-    pub fn bswap<T>(_x: T) -> T;
+    pub fn bswap<T>(x: T) -> T;
     #[rustc_intrinsic]
-    pub unsafe fn write_bytes<T>(_dst: *mut T, _val: u8, _count: usize);
+    pub unsafe fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
     #[rustc_intrinsic]
     pub unsafe fn unreachable() -> !;
 }
diff --git a/compiler/rustc_codegen_gcc/example/mini_core.rs b/compiler/rustc_codegen_gcc/example/mini_core.rs
index 3dad35bc4ce..5544aee9eaf 100644
--- a/compiler/rustc_codegen_gcc/example/mini_core.rs
+++ b/compiler/rustc_codegen_gcc/example/mini_core.rs
@@ -595,25 +595,25 @@ pub mod intrinsics {
     #[rustc_intrinsic]
     pub fn size_of<T>() -> usize;
     #[rustc_intrinsic]
-    pub unsafe fn size_of_val<T: ?::Sized>(_val: *const T) -> usize;
+    pub unsafe fn size_of_val<T: ?::Sized>(val: *const T) -> usize;
     #[rustc_intrinsic]
     pub fn min_align_of<T>() -> usize;
     #[rustc_intrinsic]
-    pub unsafe fn min_align_of_val<T: ?::Sized>(_val: *const T) -> usize;
+    pub unsafe fn min_align_of_val<T: ?::Sized>(val: *const T) -> usize;
     #[rustc_intrinsic]
-    pub unsafe fn copy<T>(_src: *const T, _dst: *mut T, _count: usize);
+    pub unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize);
     #[rustc_intrinsic]
-    pub unsafe fn transmute<T, U>(_e: T) -> U;
+    pub unsafe fn transmute<T, U>(e: T) -> U;
     #[rustc_intrinsic]
-    pub unsafe fn ctlz_nonzero<T>(_x: T) -> u32;
+    pub unsafe fn ctlz_nonzero<T>(x: T) -> u32;
     #[rustc_intrinsic]
     pub fn needs_drop<T: ?::Sized>() -> bool;
     #[rustc_intrinsic]
-    pub fn bitreverse<T>(_x: T) -> T;
+    pub fn bitreverse<T>(x: T) -> T;
     #[rustc_intrinsic]
-    pub fn bswap<T>(_x: T) -> T;
+    pub fn bswap<T>(x: T) -> T;
     #[rustc_intrinsic]
-    pub unsafe fn write_bytes<T>(_dst: *mut T, _val: u8, _count: usize);
+    pub unsafe fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
     #[rustc_intrinsic]
     pub unsafe fn unreachable() -> !;
 }
diff --git a/compiler/rustc_codegen_ssa/src/lib.rs b/compiler/rustc_codegen_ssa/src/lib.rs
index 8ad04032414..44b8cc459cf 100644
--- a/compiler/rustc_codegen_ssa/src/lib.rs
+++ b/compiler/rustc_codegen_ssa/src/lib.rs
@@ -2,6 +2,7 @@
 #![allow(internal_features)]
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::untranslatable_diagnostic)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(assert_matches)]
diff --git a/compiler/rustc_const_eval/src/lib.rs b/compiler/rustc_const_eval/src/lib.rs
index da52d60ae59..e03849c32f9 100644
--- a/compiler/rustc_const_eval/src/lib.rs
+++ b/compiler/rustc_const_eval/src/lib.rs
@@ -1,6 +1,7 @@
 // tidy-alphabetical-start
 #![allow(internal_features)]
 #![allow(rustc::diagnostic_outside_of_impl)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(rust_logo)]
 #![feature(assert_matches)]
 #![feature(box_patterns)]
diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml
index bdf5494f210..b9a5fc3a1fe 100644
--- a/compiler/rustc_data_structures/Cargo.toml
+++ b/compiler/rustc_data_structures/Cargo.toml
@@ -29,6 +29,11 @@ thin-vec = "0.2.12"
 tracing = "0.1"
 # tidy-alphabetical-end
 
+[dependencies.hashbrown]
+version = "0.15.2"
+default-features = false
+features = ["nightly"] # for may_dangle
+
 [dependencies.parking_lot]
 version = "0.12"
 
diff --git a/compiler/rustc_data_structures/src/lib.rs b/compiler/rustc_data_structures/src/lib.rs
index a3b62b46919..865424fd6bb 100644
--- a/compiler/rustc_data_structures/src/lib.rs
+++ b/compiler/rustc_data_structures/src/lib.rs
@@ -24,7 +24,6 @@
 #![feature(dropck_eyepatch)]
 #![feature(extend_one)]
 #![feature(file_buffered)]
-#![feature(hash_raw_entry)]
 #![feature(macro_metavar_expr)]
 #![feature(map_try_insert)]
 #![feature(min_specialization)]
diff --git a/compiler/rustc_data_structures/src/marker.rs b/compiler/rustc_data_structures/src/marker.rs
index 4cacc269709..64c64bfa3c2 100644
--- a/compiler/rustc_data_structures/src/marker.rs
+++ b/compiler/rustc_data_structures/src/marker.rs
@@ -76,6 +76,7 @@ impl_dyn_send!(
     [crate::sync::RwLock<T> where T: DynSend]
     [crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Send + crate::tagged_ptr::Tag]
     [rustc_arena::TypedArena<T> where T: DynSend]
+    [hashbrown::HashTable<T> where T: DynSend]
     [indexmap::IndexSet<V, S> where V: DynSend, S: DynSend]
     [indexmap::IndexMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
     [thin_vec::ThinVec<T> where T: DynSend]
@@ -153,6 +154,7 @@ impl_dyn_sync!(
     [crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Sync + crate::tagged_ptr::Tag]
     [parking_lot::lock_api::Mutex<R, T> where R: DynSync, T: ?Sized + DynSend]
     [parking_lot::lock_api::RwLock<R, T> where R: DynSync, T: ?Sized + DynSend + DynSync]
+    [hashbrown::HashTable<T> where T: DynSync]
     [indexmap::IndexSet<V, S> where V: DynSync, S: DynSync]
     [indexmap::IndexMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
     [smallvec::SmallVec<A> where A: smallvec::Array + DynSync]
diff --git a/compiler/rustc_data_structures/src/sharded.rs b/compiler/rustc_data_structures/src/sharded.rs
index 3016348f224..49cafcb17a0 100644
--- a/compiler/rustc_data_structures/src/sharded.rs
+++ b/compiler/rustc_data_structures/src/sharded.rs
@@ -1,11 +1,11 @@
 use std::borrow::Borrow;
-use std::collections::hash_map::RawEntryMut;
 use std::hash::{Hash, Hasher};
-use std::iter;
+use std::{iter, mem};
 
 use either::Either;
+use hashbrown::hash_table::{Entry, HashTable};
 
-use crate::fx::{FxHashMap, FxHasher};
+use crate::fx::FxHasher;
 use crate::sync::{CacheAligned, Lock, LockGuard, Mode, is_dyn_thread_safe};
 
 // 32 shards is sufficient to reduce contention on an 8-core Ryzen 7 1700,
@@ -140,17 +140,67 @@ pub fn shards() -> usize {
     1
 }
 
-pub type ShardedHashMap<K, V> = Sharded<FxHashMap<K, V>>;
+pub type ShardedHashMap<K, V> = Sharded<HashTable<(K, V)>>;
 
 impl<K: Eq, V> ShardedHashMap<K, V> {
     pub fn with_capacity(cap: usize) -> Self {
-        Self::new(|| FxHashMap::with_capacity_and_hasher(cap, rustc_hash::FxBuildHasher::default()))
+        Self::new(|| HashTable::with_capacity(cap))
     }
     pub fn len(&self) -> usize {
         self.lock_shards().map(|shard| shard.len()).sum()
     }
 }
 
+impl<K: Eq + Hash, V> ShardedHashMap<K, V> {
+    #[inline]
+    pub fn get<Q>(&self, key: &Q) -> Option<V>
+    where
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+        V: Clone,
+    {
+        let hash = make_hash(key);
+        let shard = self.lock_shard_by_hash(hash);
+        let (_, value) = shard.find(hash, |(k, _)| k.borrow() == key)?;
+        Some(value.clone())
+    }
+
+    #[inline]
+    pub fn get_or_insert_with(&self, key: K, default: impl FnOnce() -> V) -> V
+    where
+        V: Copy,
+    {
+        let hash = make_hash(&key);
+        let mut shard = self.lock_shard_by_hash(hash);
+
+        match table_entry(&mut shard, hash, &key) {
+            Entry::Occupied(e) => e.get().1,
+            Entry::Vacant(e) => {
+                let value = default();
+                e.insert((key, value));
+                value
+            }
+        }
+    }
+
+    #[inline]
+    pub fn insert(&self, key: K, value: V) -> Option<V> {
+        let hash = make_hash(&key);
+        let mut shard = self.lock_shard_by_hash(hash);
+
+        match table_entry(&mut shard, hash, &key) {
+            Entry::Occupied(e) => {
+                let previous = mem::replace(&mut e.into_mut().1, value);
+                Some(previous)
+            }
+            Entry::Vacant(e) => {
+                e.insert((key, value));
+                None
+            }
+        }
+    }
+}
+
 impl<K: Eq + Hash + Copy> ShardedHashMap<K, ()> {
     #[inline]
     pub fn intern_ref<Q: ?Sized>(&self, value: &Q, make: impl FnOnce() -> K) -> K
@@ -160,13 +210,12 @@ impl<K: Eq + Hash + Copy> ShardedHashMap<K, ()> {
     {
         let hash = make_hash(value);
         let mut shard = self.lock_shard_by_hash(hash);
-        let entry = shard.raw_entry_mut().from_key_hashed_nocheck(hash, value);
 
-        match entry {
-            RawEntryMut::Occupied(e) => *e.key(),
-            RawEntryMut::Vacant(e) => {
+        match table_entry(&mut shard, hash, value) {
+            Entry::Occupied(e) => e.get().0,
+            Entry::Vacant(e) => {
                 let v = make();
-                e.insert_hashed_nocheck(hash, v, ());
+                e.insert((v, ()));
                 v
             }
         }
@@ -180,13 +229,12 @@ impl<K: Eq + Hash + Copy> ShardedHashMap<K, ()> {
     {
         let hash = make_hash(&value);
         let mut shard = self.lock_shard_by_hash(hash);
-        let entry = shard.raw_entry_mut().from_key_hashed_nocheck(hash, &value);
 
-        match entry {
-            RawEntryMut::Occupied(e) => *e.key(),
-            RawEntryMut::Vacant(e) => {
+        match table_entry(&mut shard, hash, &value) {
+            Entry::Occupied(e) => e.get().0,
+            Entry::Vacant(e) => {
                 let v = make(value);
-                e.insert_hashed_nocheck(hash, v, ());
+                e.insert((v, ()));
                 v
             }
         }
@@ -203,17 +251,30 @@ impl<K: Eq + Hash + Copy + IntoPointer> ShardedHashMap<K, ()> {
         let hash = make_hash(&value);
         let shard = self.lock_shard_by_hash(hash);
         let value = value.into_pointer();
-        shard.raw_entry().from_hash(hash, |entry| entry.into_pointer() == value).is_some()
+        shard.find(hash, |(k, ())| k.into_pointer() == value).is_some()
     }
 }
 
 #[inline]
-pub fn make_hash<K: Hash + ?Sized>(val: &K) -> u64 {
+fn make_hash<K: Hash + ?Sized>(val: &K) -> u64 {
     let mut state = FxHasher::default();
     val.hash(&mut state);
     state.finish()
 }
 
+#[inline]
+fn table_entry<'a, K, V, Q>(
+    table: &'a mut HashTable<(K, V)>,
+    hash: u64,
+    key: &Q,
+) -> Entry<'a, (K, V)>
+where
+    K: Hash + Borrow<Q>,
+    Q: ?Sized + Eq,
+{
+    table.entry(hash, move |(k, _)| k.borrow() == key, |(k, _)| make_hash(k))
+}
+
 /// Get a shard with a pre-computed hash value. If `get_shard_by_value` is
 /// ever used in combination with `get_shard_by_hash` on a single `Sharded`
 /// instance, then `hash` must be computed with `FxHasher`. Otherwise,
diff --git a/compiler/rustc_driver/src/lib.rs b/compiler/rustc_driver/src/lib.rs
index a03834c519d..381309f83b2 100644
--- a/compiler/rustc_driver/src/lib.rs
+++ b/compiler/rustc_driver/src/lib.rs
@@ -3,6 +3,7 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(rust_logo)]
 #![feature(rustdoc_internals)]
 // tidy-alphabetical-end
diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs
index 6e4020c6eca..ed5662da16d 100644
--- a/compiler/rustc_driver_impl/src/lib.rs
+++ b/compiler/rustc_driver_impl/src/lib.rs
@@ -7,6 +7,7 @@
 // tidy-alphabetical-start
 #![allow(internal_features)]
 #![allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(decl_macro)]
diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs
index 86b12f6be4e..895efe05e11 100644
--- a/compiler/rustc_expand/src/base.rs
+++ b/compiler/rustc_expand/src/base.rs
@@ -7,7 +7,7 @@ use std::sync::Arc;
 
 use rustc_ast::attr::{AttributeExt, MarkedAttrs};
 use rustc_ast::ptr::P;
-use rustc_ast::token::Nonterminal;
+use rustc_ast::token::MetaVarKind;
 use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::visit::{AssocCtxt, Visitor};
 use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, Item, NodeId, PatKind};
@@ -19,7 +19,7 @@ use rustc_feature::Features;
 use rustc_hir as hir;
 use rustc_lint_defs::{BufferedEarlyLint, RegisteredTools};
 use rustc_parse::MACRO_ARGUMENTS;
-use rustc_parse::parser::Parser;
+use rustc_parse::parser::{ForceCollect, Parser};
 use rustc_session::config::CollapseMacroDebuginfo;
 use rustc_session::parse::ParseSess;
 use rustc_session::{Limit, Session};
@@ -1405,13 +1405,13 @@ pub fn parse_macro_name_and_helper_attrs(
 /// If this item looks like a specific enums from `rental`, emit a fatal error.
 /// See #73345 and #83125 for more details.
 /// FIXME(#73933): Remove this eventually.
-fn pretty_printing_compatibility_hack(item: &Item, sess: &Session) {
+fn pretty_printing_compatibility_hack(item: &Item, psess: &ParseSess) {
     let name = item.ident.name;
     if name == sym::ProceduralMasqueradeDummyType
         && let ast::ItemKind::Enum(enum_def, _) = &item.kind
         && let [variant] = &*enum_def.variants
         && variant.ident.name == sym::Input
-        && let FileName::Real(real) = sess.source_map().span_to_filename(item.ident.span)
+        && let FileName::Real(real) = psess.source_map().span_to_filename(item.ident.span)
         && let Some(c) = real
             .local_path()
             .unwrap_or(Path::new(""))
@@ -1429,7 +1429,7 @@ fn pretty_printing_compatibility_hack(item: &Item, sess: &Session) {
         };
 
         if crate_matches {
-            sess.dcx().emit_fatal(errors::ProcMacroBackCompat {
+            psess.dcx().emit_fatal(errors::ProcMacroBackCompat {
                 crate_name: "rental".to_string(),
                 fixed_version: "0.5.6".to_string(),
             });
@@ -1437,7 +1437,7 @@ fn pretty_printing_compatibility_hack(item: &Item, sess: &Session) {
     }
 }
 
-pub(crate) fn ann_pretty_printing_compatibility_hack(ann: &Annotatable, sess: &Session) {
+pub(crate) fn ann_pretty_printing_compatibility_hack(ann: &Annotatable, psess: &ParseSess) {
     let item = match ann {
         Annotatable::Item(item) => item,
         Annotatable::Stmt(stmt) => match &stmt.kind {
@@ -1446,17 +1446,36 @@ pub(crate) fn ann_pretty_printing_compatibility_hack(ann: &Annotatable, sess: &S
         },
         _ => return,
     };
-    pretty_printing_compatibility_hack(item, sess)
+    pretty_printing_compatibility_hack(item, psess)
 }
 
-pub(crate) fn nt_pretty_printing_compatibility_hack(nt: &Nonterminal, sess: &Session) {
-    let item = match nt {
-        Nonterminal::NtItem(item) => item,
-        Nonterminal::NtStmt(stmt) => match &stmt.kind {
-            ast::StmtKind::Item(item) => item,
-            _ => return,
-        },
+pub(crate) fn stream_pretty_printing_compatibility_hack(
+    kind: MetaVarKind,
+    stream: &TokenStream,
+    psess: &ParseSess,
+) {
+    let item = match kind {
+        MetaVarKind::Item => {
+            let mut parser = Parser::new(psess, stream.clone(), None);
+            // No need to collect tokens for this simple check.
+            parser
+                .parse_item(ForceCollect::No)
+                .expect("failed to reparse item")
+                .expect("an actual item")
+        }
+        MetaVarKind::Stmt => {
+            let mut parser = Parser::new(psess, stream.clone(), None);
+            // No need to collect tokens for this simple check.
+            let stmt = parser
+                .parse_stmt(ForceCollect::No)
+                .expect("failed to reparse")
+                .expect("an actual stmt");
+            match &stmt.kind {
+                ast::StmtKind::Item(item) => item.clone(),
+                _ => return,
+            }
+        }
         _ => return,
     };
-    pretty_printing_compatibility_hack(item, sess)
+    pretty_printing_compatibility_hack(&item, psess)
 }
diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs
index 70107c80147..cffa4af6ac3 100644
--- a/compiler/rustc_expand/src/mbe/transcribe.rs
+++ b/compiler/rustc_expand/src/mbe/transcribe.rs
@@ -7,7 +7,7 @@ use rustc_ast::token::{
     TokenKind,
 };
 use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
-use rustc_ast::{ExprKind, TyKind};
+use rustc_ast::{ExprKind, StmtKind, TyKind};
 use rustc_data_structures::fx::FxHashMap;
 use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize};
 use rustc_parse::lexer::nfc_normalize;
@@ -323,6 +323,18 @@ pub(super) fn transcribe<'a>(
                             let kind = token::NtLifetime(*ident, *is_raw);
                             TokenTree::token_alone(kind, sp)
                         }
+                        MatchedSingle(ParseNtResult::Item(item)) => {
+                            mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
+                        }
+                        MatchedSingle(ParseNtResult::Stmt(stmt)) => {
+                            let stream = if let StmtKind::Empty = stmt.kind {
+                                // FIXME: Properly collect tokens for empty statements.
+                                TokenStream::token_alone(token::Semi, stmt.span)
+                            } else {
+                                TokenStream::from_ast(stmt)
+                            };
+                            mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
+                        }
                         MatchedSingle(ParseNtResult::Pat(pat, pat_kind)) => mk_delimited(
                             pat.span,
                             MetaVarKind::Pat(*pat_kind),
diff --git a/compiler/rustc_expand/src/proc_macro.rs b/compiler/rustc_expand/src/proc_macro.rs
index a6cdeaee176..d5af9849e75 100644
--- a/compiler/rustc_expand/src/proc_macro.rs
+++ b/compiler/rustc_expand/src/proc_macro.rs
@@ -122,7 +122,7 @@ impl MultiItemModifier for DeriveProcMacro {
         // We had a lint for a long time, but now we just emit a hard error.
         // Eventually we might remove the special case hard error check
         // altogether. See #73345.
-        crate::base::ann_pretty_printing_compatibility_hack(&item, &ecx.sess);
+        crate::base::ann_pretty_printing_compatibility_hack(&item, &ecx.sess.psess);
         let input = item.to_tokens();
         let stream = {
             let _timer =
diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs
index 007241437ec..e0269eb6903 100644
--- a/compiler/rustc_expand/src/proc_macro_server.rs
+++ b/compiler/rustc_expand/src/proc_macro_server.rs
@@ -115,11 +115,43 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
 
         while let Some(tree) = iter.next() {
             let (Token { kind, span }, joint) = match tree.clone() {
-                tokenstream::TokenTree::Delimited(span, _, delim, tts) => {
-                    let delimiter = pm::Delimiter::from_internal(delim);
+                tokenstream::TokenTree::Delimited(span, _, mut delim, mut stream) => {
+                    // We used to have an alternative behaviour for crates that
+                    // needed it: a hack used to pass AST fragments to
+                    // attribute and derive macros as a single nonterminal
+                    // token instead of a token stream. Such token needs to be
+                    // "unwrapped" and not represented as a delimited group. We
+                    // had a lint for a long time, but now we just emit a hard
+                    // error. Eventually we might remove the special case hard
+                    // error check altogether. See #73345.
+                    if let Delimiter::Invisible(InvisibleOrigin::MetaVar(kind)) = delim {
+                        crate::base::stream_pretty_printing_compatibility_hack(
+                            kind,
+                            &stream,
+                            rustc.psess(),
+                        );
+                    }
+
+                    // In `mk_delimited` we avoid nesting invisible delimited
+                    // of the same `MetaVarKind`. Here we do the same but
+                    // ignore the `MetaVarKind` because it is discarded when we
+                    // convert it to a `Group`.
+                    while let Delimiter::Invisible(InvisibleOrigin::MetaVar(_)) = delim {
+                        if stream.len() == 1
+                            && let tree = stream.iter().next().unwrap()
+                            && let tokenstream::TokenTree::Delimited(_, _, delim2, stream2) = tree
+                            && let Delimiter::Invisible(InvisibleOrigin::MetaVar(_)) = delim2
+                        {
+                            delim = *delim2;
+                            stream = stream2.clone();
+                        } else {
+                            break;
+                        }
+                    }
+
                     trees.push(TokenTree::Group(Group {
-                        delimiter,
-                        stream: Some(tts),
+                        delimiter: pm::Delimiter::from_internal(delim),
+                        stream: Some(stream),
                         span: DelimSpan {
                             open: span.open,
                             close: span.close,
@@ -279,15 +311,6 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
 
                 Interpolated(nt) => {
                     let stream = TokenStream::from_nonterminal_ast(&nt);
-                    // We used to have an alternative behaviour for crates that
-                    // needed it: a hack used to pass AST fragments to
-                    // attribute and derive macros as a single nonterminal
-                    // token instead of a token stream. Such token needs to be
-                    // "unwrapped" and not represented as a delimited group. We
-                    // had a lint for a long time, but now we just emit a hard
-                    // error. Eventually we might remove the special case hard
-                    // error check altogether. See #73345.
-                    crate::base::nt_pretty_printing_compatibility_hack(&nt, rustc.ecx.sess);
                     trees.push(TokenTree::Group(Group {
                         delimiter: pm::Delimiter::None,
                         stream: Some(stream),
diff --git a/compiler/rustc_hir/src/lib.rs b/compiler/rustc_hir/src/lib.rs
index 84d369f1edd..4a839d40571 100644
--- a/compiler/rustc_hir/src/lib.rs
+++ b/compiler/rustc_hir/src/lib.rs
@@ -4,6 +4,7 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(associated_type_defaults)]
 #![feature(box_patterns)]
 #![feature(closure_track_caller)]
diff --git a/compiler/rustc_hir_analysis/src/lib.rs b/compiler/rustc_hir_analysis/src/lib.rs
index 26a20690a95..cb3cdc5e8d3 100644
--- a/compiler/rustc_hir_analysis/src/lib.rs
+++ b/compiler/rustc_hir_analysis/src/lib.rs
@@ -59,6 +59,7 @@ This API is completely unstable and subject to change.
 #![allow(internal_features)]
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::untranslatable_diagnostic)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(assert_matches)]
diff --git a/compiler/rustc_hir_typeck/src/lib.rs b/compiler/rustc_hir_typeck/src/lib.rs
index 263fb84206e..4968998fd51 100644
--- a/compiler/rustc_hir_typeck/src/lib.rs
+++ b/compiler/rustc_hir_typeck/src/lib.rs
@@ -1,6 +1,7 @@
 // tidy-alphabetical-start
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::untranslatable_diagnostic)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(array_windows)]
 #![feature(box_patterns)]
 #![feature(if_let_guard)]
diff --git a/compiler/rustc_incremental/src/lib.rs b/compiler/rustc_incremental/src/lib.rs
index 299ee487638..dabfb6a90ca 100644
--- a/compiler/rustc_incremental/src/lib.rs
+++ b/compiler/rustc_incremental/src/lib.rs
@@ -2,6 +2,7 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![deny(missing_docs)]
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs
index 35867d8c9ef..c38a7540018 100644
--- a/compiler/rustc_lint/src/lib.rs
+++ b/compiler/rustc_lint/src/lib.rs
@@ -21,6 +21,7 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(array_windows)]
diff --git a/compiler/rustc_metadata/src/lib.rs b/compiler/rustc_metadata/src/lib.rs
index 8898c5824fa..634c82fd1d5 100644
--- a/compiler/rustc_metadata/src/lib.rs
+++ b/compiler/rustc_metadata/src/lib.rs
@@ -1,5 +1,6 @@
 // tidy-alphabetical-start
 #![allow(internal_features)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(coroutines)]
diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs
index 8fe2cc7101b..1e6178144c9 100644
--- a/compiler/rustc_middle/src/lib.rs
+++ b/compiler/rustc_middle/src/lib.rs
@@ -29,6 +29,7 @@
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::potential_query_instability)]
 #![allow(rustc::untranslatable_diagnostic)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(allocator_api)]
diff --git a/compiler/rustc_middle/src/mir/interpret/mod.rs b/compiler/rustc_middle/src/mir/interpret/mod.rs
index 2675b7e0fc5..effedf854e4 100644
--- a/compiler/rustc_middle/src/mir/interpret/mod.rs
+++ b/compiler/rustc_middle/src/mir/interpret/mod.rs
@@ -452,12 +452,7 @@ impl<'tcx> TyCtxt<'tcx> {
         }
         let id = self.alloc_map.reserve();
         debug!("creating alloc {:?} with id {id:?}", alloc_salt.0);
-        let had_previous = self
-            .alloc_map
-            .to_alloc
-            .lock_shard_by_value(&id)
-            .insert(id, alloc_salt.0.clone())
-            .is_some();
+        let had_previous = self.alloc_map.to_alloc.insert(id, alloc_salt.0.clone()).is_some();
         // We just reserved, so should always be unique.
         assert!(!had_previous);
         dedup.insert(alloc_salt, id);
@@ -510,7 +505,7 @@ impl<'tcx> TyCtxt<'tcx> {
     /// local dangling pointers and allocations in constants/statics.
     #[inline]
     pub fn try_get_global_alloc(self, id: AllocId) -> Option<GlobalAlloc<'tcx>> {
-        self.alloc_map.to_alloc.lock_shard_by_value(&id).get(&id).cloned()
+        self.alloc_map.to_alloc.get(&id)
     }
 
     #[inline]
@@ -529,9 +524,7 @@ impl<'tcx> TyCtxt<'tcx> {
     /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
     /// call this function twice, even with the same `Allocation` will ICE the compiler.
     pub fn set_alloc_id_memory(self, id: AllocId, mem: ConstAllocation<'tcx>) {
-        if let Some(old) =
-            self.alloc_map.to_alloc.lock_shard_by_value(&id).insert(id, GlobalAlloc::Memory(mem))
-        {
+        if let Some(old) = self.alloc_map.to_alloc.insert(id, GlobalAlloc::Memory(mem)) {
             bug!("tried to set allocation ID {id:?}, but it was already existing as {old:#?}");
         }
     }
@@ -539,11 +532,8 @@ impl<'tcx> TyCtxt<'tcx> {
     /// Freezes an `AllocId` created with `reserve` by pointing it at a static item. Trying to
     /// call this function twice, even with the same `DefId` will ICE the compiler.
     pub fn set_nested_alloc_id_static(self, id: AllocId, def_id: LocalDefId) {
-        if let Some(old) = self
-            .alloc_map
-            .to_alloc
-            .lock_shard_by_value(&id)
-            .insert(id, GlobalAlloc::Static(def_id.to_def_id()))
+        if let Some(old) =
+            self.alloc_map.to_alloc.insert(id, GlobalAlloc::Static(def_id.to_def_id()))
         {
             bug!("tried to set allocation ID {id:?}, but it was already existing as {old:#?}");
         }
diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs
index 544a6cb64f3..a2472157d0e 100644
--- a/compiler/rustc_middle/src/ty/context.rs
+++ b/compiler/rustc_middle/src/ty/context.rs
@@ -2336,8 +2336,8 @@ macro_rules! sty_debug_print {
                 $(let mut $variant = total;)*
 
                 for shard in tcx.interners.type_.lock_shards() {
-                    let types = shard.keys();
-                    for &InternedInSet(t) in types {
+                    let types = shard.iter();
+                    for &(InternedInSet(t), ()) in types {
                         let variant = match t.internee {
                             ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
                                 ty::Float(..) | ty::Str | ty::Never => continue,
diff --git a/compiler/rustc_mir_build/src/lib.rs b/compiler/rustc_mir_build/src/lib.rs
index 8e96d46dac2..a25697ba086 100644
--- a/compiler/rustc_mir_build/src/lib.rs
+++ b/compiler/rustc_mir_build/src/lib.rs
@@ -3,6 +3,7 @@
 // tidy-alphabetical-start
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::untranslatable_diagnostic)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(assert_matches)]
 #![feature(box_patterns)]
 #![feature(if_let_guard)]
diff --git a/compiler/rustc_mir_dataflow/src/lib.rs b/compiler/rustc_mir_dataflow/src/lib.rs
index a0efc623b8e..82c57ef5678 100644
--- a/compiler/rustc_mir_dataflow/src/lib.rs
+++ b/compiler/rustc_mir_dataflow/src/lib.rs
@@ -1,4 +1,5 @@
 // tidy-alphabetical-start
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(assert_matches)]
 #![feature(associated_type_defaults)]
 #![feature(box_patterns)]
diff --git a/compiler/rustc_mir_transform/src/add_call_guards.rs b/compiler/rustc_mir_transform/src/add_call_guards.rs
index bc335cee147..bacff287859 100644
--- a/compiler/rustc_mir_transform/src/add_call_guards.rs
+++ b/compiler/rustc_mir_transform/src/add_call_guards.rs
@@ -32,9 +32,12 @@ pub(super) use self::AddCallGuards::*;
 
 impl<'tcx> crate::MirPass<'tcx> for AddCallGuards {
     fn run_pass(&self, _tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
-        let mut pred_count: IndexVec<_, _> =
-            body.basic_blocks.predecessors().iter().map(|ps| ps.len()).collect();
-        pred_count[START_BLOCK] += 1;
+        let mut pred_count = IndexVec::from_elem(0u8, &body.basic_blocks);
+        for (_, data) in body.basic_blocks.iter_enumerated() {
+            for succ in data.terminator().successors() {
+                pred_count[succ] = pred_count[succ].saturating_add(1);
+            }
+        }
 
         // We need a place to store the new blocks generated
         let mut new_blocks = Vec::new();
diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs
index 739cee5d7f4..205d388f4fb 100644
--- a/compiler/rustc_mir_transform/src/lib.rs
+++ b/compiler/rustc_mir_transform/src/lib.rs
@@ -1,4 +1,5 @@
 // tidy-alphabetical-start
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(array_windows)]
 #![feature(assert_matches)]
 #![feature(box_patterns)]
diff --git a/compiler/rustc_monomorphize/src/lib.rs b/compiler/rustc_monomorphize/src/lib.rs
index 8f6914f3d72..5dbae50c499 100644
--- a/compiler/rustc_monomorphize/src/lib.rs
+++ b/compiler/rustc_monomorphize/src/lib.rs
@@ -1,4 +1,5 @@
 // tidy-alphabetical-start
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(array_windows)]
 #![feature(file_buffered)]
 #![feature(if_let_guard)]
diff --git a/compiler/rustc_monomorphize/src/partitioning/autodiff.rs b/compiler/rustc_monomorphize/src/partitioning/autodiff.rs
index 0c855508434..ebe0b258c1b 100644
--- a/compiler/rustc_monomorphize/src/partitioning/autodiff.rs
+++ b/compiler/rustc_monomorphize/src/partitioning/autodiff.rs
@@ -12,21 +12,15 @@ fn adjust_activity_to_abi<'tcx>(tcx: TyCtxt<'tcx>, fn_ty: Ty<'tcx>, da: &mut Vec
     if !matches!(fn_ty.kind(), ty::FnDef(..)) {
         bug!("expected fn def for autodiff, got {:?}", fn_ty);
     }
-    let fnc_binder: ty::Binder<'_, ty::FnSig<'_>> = fn_ty.fn_sig(tcx);
 
-    // If rustc compiles the unmodified primal, we know that this copy of the function
-    // also has correct lifetimes. We know that Enzyme won't free the shadow too early
-    // (or actually at all), so let's strip lifetimes when computing the layout.
-    let x = tcx.instantiate_bound_regions_with_erased(fnc_binder);
+    // We don't actually pass the types back into the type system.
+    // All we do is decide how to handle the arguments.
+    let sig = fn_ty.fn_sig(tcx).skip_binder();
+
     let mut new_activities = vec![];
     let mut new_positions = vec![];
-    for (i, ty) in x.inputs().iter().enumerate() {
+    for (i, ty) in sig.inputs().iter().enumerate() {
         if let Some(inner_ty) = ty.builtin_deref(true) {
-            if ty.is_fn_ptr() {
-                // FIXME(ZuseZ4): add a nicer error, or just figure out how to support them,
-                // since Enzyme itself can handle them.
-                tcx.dcx().err("function pointers are currently not supported in autodiff");
-            }
             if inner_ty.is_slice() {
                 // We know that the length will be passed as extra arg.
                 if !da.is_empty() {
diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs
index b0bbec48947..e48ee71c858 100644
--- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs
@@ -271,12 +271,39 @@ where
     /// and will need to clearly document it in the rustc-dev-guide before
     /// stabilization.
     pub(super) fn step_kind_for_source(&self, source: GoalSource) -> PathKind {
-        match (self.current_goal_kind, source) {
-            (_, GoalSource::NormalizeGoal(step_kind)) => step_kind,
-            (CurrentGoalKind::CoinductiveTrait, GoalSource::ImplWhereBound) => {
-                PathKind::Coinductive
+        match source {
+            // We treat these goals as unknown for now. It is likely that most miscellaneous
+            // nested goals will be converted to an inductive variant in the future.
+            //
+            // Having unknown cycles is always the safer option, as changing that to either
+            // succeed or hard error is backwards compatible. If we incorrectly treat a cycle
+            // as inductive even though it should not be, it may be unsound during coherence and
+            // fixing it may cause inference breakage or introduce ambiguity.
+            GoalSource::Misc => PathKind::Unknown,
+            GoalSource::NormalizeGoal(path_kind) => path_kind,
+            GoalSource::ImplWhereBound => {
+                // We currently only consider a cycle coinductive if it steps
+                // into a where-clause of a coinductive trait.
+                //
+                // We probably want to make all traits coinductive in the future,
+                // so we treat cycles involving their where-clauses as ambiguous.
+                if let CurrentGoalKind::CoinductiveTrait = self.current_goal_kind {
+                    PathKind::Coinductive
+                } else {
+                    PathKind::Unknown
+                }
             }
-            _ => PathKind::Inductive,
+            // Relating types is always unproductive. If we were to map proof trees to
+            // corecursive functions as explained in #136824, relating types never
+            // introduces a constructor which could cause the recursion to be guarded.
+            GoalSource::TypeRelating => PathKind::Inductive,
+            // Instantiating a higher ranked goal can never cause the recursion to be
+            // guarded and is therefore unproductive.
+            GoalSource::InstantiateHigherRanked => PathKind::Inductive,
+            // These goal sources are likely unproductive and can be changed to
+            // `PathKind::Inductive`. Keeping them as unknown until we're confident
+            // about this and have an example where it is necessary.
+            GoalSource::AliasBoundConstCondition | GoalSource::AliasWellFormed => PathKind::Unknown,
         }
     }
 
@@ -606,7 +633,7 @@ where
 
             let (NestedNormalizationGoals(nested_goals), _, certainty) = self.evaluate_goal_raw(
                 GoalEvaluationKind::Nested,
-                GoalSource::Misc,
+                GoalSource::TypeRelating,
                 unconstrained_goal,
             )?;
             // Add the nested goals from normalization to our own nested goals.
@@ -683,7 +710,7 @@ where
     pub(super) fn add_normalizes_to_goal(&mut self, mut goal: Goal<I, ty::NormalizesTo<I>>) {
         goal.predicate = goal.predicate.fold_with(&mut ReplaceAliasWithInfer::new(
             self,
-            GoalSource::Misc,
+            GoalSource::TypeRelating,
             goal.param_env,
         ));
         self.inspect.add_normalizes_to_goal(self.delegate, self.max_input_universe, goal);
@@ -939,7 +966,15 @@ where
         rhs: T,
     ) -> Result<(), NoSolution> {
         let goals = self.delegate.relate(param_env, lhs, variance, rhs, self.origin_span)?;
-        self.add_goals(GoalSource::Misc, goals);
+        if cfg!(debug_assertions) {
+            for g in goals.iter() {
+                match g.predicate.kind().skip_binder() {
+                    ty::PredicateKind::Subtype { .. } | ty::PredicateKind::AliasRelate(..) => {}
+                    p => unreachable!("unexpected nested goal in `relate`: {p:?}"),
+                }
+            }
+        }
+        self.add_goals(GoalSource::TypeRelating, goals);
         Ok(())
     }
 
diff --git a/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs b/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs
index 1607fbb1b6a..6a8e0790f7c 100644
--- a/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs
@@ -421,7 +421,7 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> ProofTreeBuilder<D> {
         self.add_goal(
             delegate,
             max_input_universe,
-            GoalSource::Misc,
+            GoalSource::TypeRelating,
             goal.with(delegate.cx(), goal.predicate),
         );
     }
diff --git a/compiler/rustc_next_trait_solver/src/solve/mod.rs b/compiler/rustc_next_trait_solver/src/solve/mod.rs
index 1fa35b60304..199f0c7512e 100644
--- a/compiler/rustc_next_trait_solver/src/solve/mod.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/mod.rs
@@ -313,7 +313,9 @@ where
                     ty::AliasRelationDirection::Equate,
                 ),
             );
-            self.add_goal(GoalSource::Misc, alias_relate_goal);
+            // We normalize the self type to be able to relate it with
+            // types from candidates.
+            self.add_goal(GoalSource::TypeRelating, alias_relate_goal);
             self.try_evaluate_added_goals()?;
             Ok(self.resolve_vars_if_possible(normalized_term))
         } else {
diff --git a/compiler/rustc_next_trait_solver/src/solve/project_goals.rs b/compiler/rustc_next_trait_solver/src/solve/project_goals.rs
index d9452880071..944d5f0e042 100644
--- a/compiler/rustc_next_trait_solver/src/solve/project_goals.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/project_goals.rs
@@ -24,7 +24,8 @@ where
                 ty::AliasRelationDirection::Equate,
             ),
         );
-        self.add_goal(GoalSource::Misc, goal);
+        // A projection goal holds if the alias is equal to the expected term.
+        self.add_goal(GoalSource::TypeRelating, goal);
         self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
     }
 }
diff --git a/compiler/rustc_next_trait_solver/src/solve/search_graph.rs b/compiler/rustc_next_trait_solver/src/solve/search_graph.rs
index 67eb442d2cc..eba496fa226 100644
--- a/compiler/rustc_next_trait_solver/src/solve/search_graph.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/search_graph.rs
@@ -1,9 +1,9 @@
 use std::convert::Infallible;
 use std::marker::PhantomData;
 
-use rustc_type_ir::Interner;
 use rustc_type_ir::search_graph::{self, PathKind};
-use rustc_type_ir::solve::{CanonicalInput, Certainty, QueryResult};
+use rustc_type_ir::solve::{CanonicalInput, Certainty, NoSolution, QueryResult};
+use rustc_type_ir::{Interner, TypingMode};
 
 use super::inspect::ProofTreeBuilder;
 use super::{FIXPOINT_STEP_LIMIT, has_no_inference_or_external_constraints};
@@ -47,7 +47,24 @@ where
     ) -> QueryResult<I> {
         match kind {
             PathKind::Coinductive => response_no_constraints(cx, input, Certainty::Yes),
-            PathKind::Inductive => response_no_constraints(cx, input, Certainty::overflow(false)),
+            PathKind::Unknown => response_no_constraints(cx, input, Certainty::overflow(false)),
+            // Even though we know these cycles to be unproductive, we still return
+            // overflow during coherence. This is both as we are not 100% confident in
+            // the implementation yet and any incorrect errors would be unsound there.
+            // The affected cases are also fairly artificial and not necessarily desirable
+            // so keeping this as ambiguity is fine for now.
+            //
+            // See `tests/ui/traits/next-solver/cycles/unproductive-in-coherence.rs` for an
+            // example where this would matter. We likely should change these cycles to `NoSolution`
+            // even in coherence once this is a bit more settled.
+            PathKind::Inductive => match input.typing_mode {
+                TypingMode::Coherence => {
+                    response_no_constraints(cx, input, Certainty::overflow(false))
+                }
+                TypingMode::Analysis { .. }
+                | TypingMode::PostBorrowckAnalysis { .. }
+                | TypingMode::PostAnalysis => Err(NoSolution),
+            },
         }
     }
 
@@ -57,12 +74,7 @@ where
         input: CanonicalInput<I>,
         result: QueryResult<I>,
     ) -> bool {
-        match kind {
-            PathKind::Coinductive => response_no_constraints(cx, input, Certainty::Yes) == result,
-            PathKind::Inductive => {
-                response_no_constraints(cx, input, Certainty::overflow(false)) == result
-            }
-        }
+        Self::initial_provisional_result(cx, kind, input) == result
     }
 
     fn on_stack_overflow(
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 2edc8c83017..79939aab7fc 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -4,6 +4,7 @@
 #![allow(internal_features)]
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::untranslatable_diagnostic)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(array_windows)]
 #![feature(assert_matches)]
 #![feature(box_patterns)]
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index b37dd64fbed..9b2d562a69e 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -1364,7 +1364,6 @@ impl<'a> Parser<'a> {
                     self.bump();
                     return Ok(self.mk_expr(self.prev_token.span, ExprKind::Block(block, None)));
                 }
-                _ => {}
             };
         } else if let Some(path) = self.eat_metavar_seq(MetaVarKind::Path, |this| {
             this.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))
@@ -3064,7 +3063,7 @@ impl<'a> Parser<'a> {
             }
 
             self.restore_snapshot(pre_pat_snapshot);
-            match self.parse_stmt_without_recovery(true, ForceCollect::No) {
+            match self.parse_stmt_without_recovery(true, ForceCollect::No, false) {
                 // Consume statements for as long as possible.
                 Ok(Some(stmt)) => {
                     stmts.push(stmt);
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 9e6cdfe59bb..2388463ecfc 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -21,10 +21,10 @@ use super::diagnostics::{ConsumeClosingDelim, dummy_arg};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{
     AttrWrapper, ExpKeywordPair, ExpTokenPair, FollowedByType, ForceCollect, Parser, PathStyle,
-    Trailing, UsePreAttrPos,
+    Recovered, Trailing, UsePreAttrPos,
 };
 use crate::errors::{self, MacroExpandsToAdtField};
-use crate::{exp, fluent_generated as fluent, maybe_whole};
+use crate::{exp, fluent_generated as fluent};
 
 impl<'a> Parser<'a> {
     /// Parses a source module as a crate. This is the main entry point for the parser.
@@ -142,10 +142,13 @@ impl<'a> Parser<'a> {
         fn_parse_mode: FnParseMode,
         force_collect: ForceCollect,
     ) -> PResult<'a, Option<Item>> {
-        maybe_whole!(self, NtItem, |item| {
+        if let Some(item) =
+            self.eat_metavar_seq(MetaVarKind::Item, |this| this.parse_item(ForceCollect::Yes))
+        {
+            let mut item = item.expect("an actual item");
             attrs.prepend_to_nt_inner(&mut item.attrs);
-            Some(item.into_inner())
-        });
+            return Ok(Some(item.into_inner()));
+        }
 
         self.collect_tokens(None, attrs, force_collect, |this, mut attrs| {
             let lo = this.token.span;
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 4ec8d9e5e49..323c0c1d6d0 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -1076,10 +1076,12 @@ impl<'a> Parser<'a> {
         let initial_semicolon = self.token.span;
 
         while self.eat(exp!(Semi)) {
-            let _ = self.parse_stmt_without_recovery(false, ForceCollect::No).unwrap_or_else(|e| {
-                e.cancel();
-                None
-            });
+            let _ = self
+                .parse_stmt_without_recovery(false, ForceCollect::No, false)
+                .unwrap_or_else(|e| {
+                    e.cancel();
+                    None
+                });
         }
 
         expect_err
@@ -1746,6 +1748,8 @@ pub enum ParseNtResult {
     Tt(TokenTree),
     Ident(Ident, IdentIsRaw),
     Lifetime(Ident, IdentIsRaw),
+    Item(P<ast::Item>),
+    Stmt(P<ast::Stmt>),
     Pat(P<ast::Pat>, NtPatKind),
     Ty(P<ast::Ty>),
     Meta(P<ast::AttrItem>),
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index d537bc17ce3..1123755ce00 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -48,12 +48,11 @@ impl<'a> Parser<'a> {
         /// Old variant of `may_be_ident`. Being phased out.
         fn nt_may_be_ident(nt: &Nonterminal) -> bool {
             match nt {
-                NtStmt(_)
-                | NtExpr(_)
+                NtExpr(_)
                 | NtLiteral(_) // `true`, `false`
                 => true,
 
-                NtItem(_) | NtBlock(_) => false,
+                NtBlock(_) => false,
             }
         }
 
@@ -96,8 +95,7 @@ impl<'a> Parser<'a> {
                 token::OpenDelim(Delimiter::Brace) => true,
                 token::NtLifetime(..) => true,
                 token::Interpolated(nt) => match &**nt {
-                    NtBlock(_) | NtStmt(_) | NtExpr(_) | NtLiteral(_) => true,
-                    NtItem(_) => false,
+                    NtBlock(_) | NtExpr(_) | NtLiteral(_) => true,
                 },
                 token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(k))) => match k {
                     MetaVarKind::Block
@@ -147,7 +145,7 @@ impl<'a> Parser<'a> {
             // Note that TT is treated differently to all the others.
             NonterminalKind::TT => return Ok(ParseNtResult::Tt(self.parse_token_tree())),
             NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? {
-                Some(item) => NtItem(item),
+                Some(item) => return Ok(ParseNtResult::Item(item)),
                 None => {
                     return Err(self
                         .dcx()
@@ -160,7 +158,7 @@ impl<'a> Parser<'a> {
                 NtBlock(self.collect_tokens_no_attrs(|this| this.parse_block())?)
             }
             NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
-                Some(s) => NtStmt(P(s)),
+                Some(stmt) => return Ok(ParseNtResult::Stmt(P(stmt))),
                 None => {
                     return Err(self
                         .dcx()
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 3a9dc5ce798..0896bd88b4c 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -5,7 +5,7 @@ use std::ops::Bound;
 use ast::Label;
 use rustc_ast as ast;
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter, TokenKind};
+use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, TokenKind};
 use rustc_ast::util::classify::{self, TrailingBrace};
 use rustc_ast::{
     AttrStyle, AttrVec, Block, BlockCheckMode, DUMMY_NODE_ID, Expr, ExprKind, HasAttrs, Local,
@@ -33,8 +33,8 @@ impl<'a> Parser<'a> {
     /// If `force_collect` is [`ForceCollect::Yes`], forces collection of tokens regardless of
     /// whether or not we have attributes.
     // Public for rustfmt usage.
-    pub(super) fn parse_stmt(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<Stmt>> {
-        Ok(self.parse_stmt_without_recovery(false, force_collect).unwrap_or_else(|e| {
+    pub fn parse_stmt(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<Stmt>> {
+        Ok(self.parse_stmt_without_recovery(false, force_collect, false).unwrap_or_else(|e| {
             e.emit();
             self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
             None
@@ -42,23 +42,27 @@ impl<'a> Parser<'a> {
     }
 
     /// If `force_collect` is [`ForceCollect::Yes`], forces collection of tokens regardless of
-    /// whether or not we have attributes.
-    // Public for `cfg_eval` macro expansion.
+    /// whether or not we have attributes. If `force_full_expr` is true, parses the stmt without
+    /// using `Restriction::STMT_EXPR`. Public for `cfg_eval` macro expansion.
     pub fn parse_stmt_without_recovery(
         &mut self,
         capture_semi: bool,
         force_collect: ForceCollect,
+        force_full_expr: bool,
     ) -> PResult<'a, Option<Stmt>> {
         let pre_attr_pos = self.collect_pos();
         let attrs = self.parse_outer_attributes()?;
         let lo = self.token.span;
 
-        maybe_whole!(self, NtStmt, |stmt| {
+        if let Some(stmt) = self.eat_metavar_seq(MetaVarKind::Stmt, |this| {
+            this.parse_stmt_without_recovery(false, ForceCollect::Yes, false)
+        }) {
+            let mut stmt = stmt.expect("an actual statement");
             stmt.visit_attrs(|stmt_attrs| {
                 attrs.prepend_to_nt_inner(stmt_attrs);
             });
-            Some(stmt.into_inner())
-        });
+            return Ok(Some(stmt));
+        }
 
         if self.token.is_keyword(kw::Mut) && self.is_keyword_ahead(1, &[kw::Let]) {
             self.bump();
@@ -147,12 +151,14 @@ impl<'a> Parser<'a> {
         } else if self.token != token::CloseDelim(Delimiter::Brace) {
             // Remainder are line-expr stmts. This is similar to the `parse_stmt_path_start` case
             // above.
+            let restrictions =
+                if force_full_expr { Restrictions::empty() } else { Restrictions::STMT_EXPR };
             let e = self.collect_tokens(
                 Some(pre_attr_pos),
                 AttrWrapper::empty(),
                 force_collect,
                 |this, _empty_attrs| {
-                    let (expr, _) = this.parse_expr_res(Restrictions::STMT_EXPR, attrs)?;
+                    let (expr, _) = this.parse_expr_res(restrictions, attrs)?;
                     Ok((expr, Trailing::No, UsePreAttrPos::Yes))
                 },
             )?;
@@ -229,11 +235,15 @@ impl<'a> Parser<'a> {
         let mac = P(MacCall { path, args });
 
         let kind = if (style == MacStmtStyle::Braces
-            && self.token != token::Dot
-            && self.token != token::Question)
-            || self.token == token::Semi
-            || self.token == token::Eof
-        {
+            && !matches!(self.token.kind, token::Dot | token::Question))
+            || matches!(
+                self.token.kind,
+                token::Semi
+                    | token::Eof
+                    | token::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
+                        MetaVarKind::Stmt
+                    )))
+            ) {
             StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None }))
         } else {
             // Since none of the above applied, this is an expression statement macro.
@@ -501,7 +511,7 @@ impl<'a> Parser<'a> {
         //      bar;
         //
         // which is valid in other languages, but not Rust.
-        match self.parse_stmt_without_recovery(false, ForceCollect::No) {
+        match self.parse_stmt_without_recovery(false, ForceCollect::No, false) {
             // If the next token is an open brace, e.g., we have:
             //
             //     if expr other_expr {
@@ -810,10 +820,24 @@ impl<'a> Parser<'a> {
         &mut self,
         recover: AttemptLocalParseRecovery,
     ) -> PResult<'a, Option<Stmt>> {
-        // Skip looking for a trailing semicolon when we have an interpolated statement.
-        maybe_whole!(self, NtStmt, |stmt| Some(stmt.into_inner()));
+        // Skip looking for a trailing semicolon when we have a metavar seq.
+        if let Some(stmt) = self.eat_metavar_seq(MetaVarKind::Stmt, |this| {
+            // Why pass `true` for `force_full_expr`? Statement expressions are less expressive
+            // than "full" expressions, due to the `STMT_EXPR` restriction, and sometimes need
+            // parentheses. E.g. the "full" expression `match paren_around_match {} | true` when
+            // used in statement context must be written `(match paren_around_match {} | true)`.
+            // However, if the expression we are parsing in this statement context was pasted by a
+            // declarative macro, it may have come from a "full" expression context, and lack
+            // these parentheses. So we lift the `STMT_EXPR` restriction to ensure the statement
+            // will reparse successfully.
+            this.parse_stmt_without_recovery(false, ForceCollect::No, true)
+        }) {
+            let stmt = stmt.expect("an actual statement");
+            return Ok(Some(stmt));
+        }
 
-        let Some(mut stmt) = self.parse_stmt_without_recovery(true, ForceCollect::No)? else {
+        let Some(mut stmt) = self.parse_stmt_without_recovery(true, ForceCollect::No, false)?
+        else {
             return Ok(None);
         };
 
diff --git a/compiler/rustc_passes/src/lib.rs b/compiler/rustc_passes/src/lib.rs
index 93ff0f66d69..6f6115af96c 100644
--- a/compiler/rustc_passes/src/lib.rs
+++ b/compiler/rustc_passes/src/lib.rs
@@ -6,6 +6,7 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(let_chains)]
diff --git a/compiler/rustc_privacy/src/lib.rs b/compiler/rustc_privacy/src/lib.rs
index f45e8c065b3..cf32f237b86 100644
--- a/compiler/rustc_privacy/src/lib.rs
+++ b/compiler/rustc_privacy/src/lib.rs
@@ -1,5 +1,6 @@
 // tidy-alphabetical-start
 #![allow(internal_features)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(associated_type_defaults)]
diff --git a/compiler/rustc_query_impl/src/lib.rs b/compiler/rustc_query_impl/src/lib.rs
index 40da46a027d..a83c388c747 100644
--- a/compiler/rustc_query_impl/src/lib.rs
+++ b/compiler/rustc_query_impl/src/lib.rs
@@ -3,6 +3,7 @@
 // tidy-alphabetical-start
 #![allow(internal_features)]
 #![allow(unused_parens)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(min_specialization)]
diff --git a/compiler/rustc_query_system/src/dep_graph/graph.rs b/compiler/rustc_query_system/src/dep_graph/graph.rs
index 6ece01c211b..3109d53cd2c 100644
--- a/compiler/rustc_query_system/src/dep_graph/graph.rs
+++ b/compiler/rustc_query_system/src/dep_graph/graph.rs
@@ -1,5 +1,4 @@
 use std::assert_matches::assert_matches;
-use std::collections::hash_map::Entry;
 use std::fmt::Debug;
 use std::hash::Hash;
 use std::marker::PhantomData;
@@ -9,7 +8,7 @@ use std::sync::atomic::{AtomicU32, Ordering};
 use rustc_data_structures::fingerprint::Fingerprint;
 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
 use rustc_data_structures::profiling::{QueryInvocationId, SelfProfilerRef};
-use rustc_data_structures::sharded::{self, Sharded};
+use rustc_data_structures::sharded::{self, ShardedHashMap};
 use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
 use rustc_data_structures::sync::{AtomicU64, Lock};
 use rustc_data_structures::unord::UnordMap;
@@ -619,7 +618,7 @@ impl<D: Deps> DepGraphData<D> {
         if let Some(prev_index) = self.previous.node_to_index_opt(dep_node) {
             self.current.prev_index_to_index.lock()[prev_index]
         } else {
-            self.current.new_node_to_index.lock_shard_by_value(dep_node).get(dep_node).copied()
+            self.current.new_node_to_index.get(dep_node)
         }
     }
 
@@ -1048,7 +1047,7 @@ rustc_index::newtype_index! {
 /// first, and `data` second.
 pub(super) struct CurrentDepGraph<D: Deps> {
     encoder: GraphEncoder<D>,
-    new_node_to_index: Sharded<FxHashMap<DepNode, DepNodeIndex>>,
+    new_node_to_index: ShardedHashMap<DepNode, DepNodeIndex>,
     prev_index_to_index: Lock<IndexVec<SerializedDepNodeIndex, Option<DepNodeIndex>>>,
 
     /// This is used to verify that fingerprints do not change between the creation of a node
@@ -1117,12 +1116,9 @@ impl<D: Deps> CurrentDepGraph<D> {
                 profiler,
                 previous,
             ),
-            new_node_to_index: Sharded::new(|| {
-                FxHashMap::with_capacity_and_hasher(
-                    new_node_count_estimate / sharded::shards(),
-                    Default::default(),
-                )
-            }),
+            new_node_to_index: ShardedHashMap::with_capacity(
+                new_node_count_estimate / sharded::shards(),
+            ),
             prev_index_to_index: Lock::new(IndexVec::from_elem_n(None, prev_graph_node_count)),
             anon_id_seed,
             #[cfg(debug_assertions)]
@@ -1152,14 +1148,9 @@ impl<D: Deps> CurrentDepGraph<D> {
         edges: EdgesVec,
         current_fingerprint: Fingerprint,
     ) -> DepNodeIndex {
-        let dep_node_index = match self.new_node_to_index.lock_shard_by_value(&key).entry(key) {
-            Entry::Occupied(entry) => *entry.get(),
-            Entry::Vacant(entry) => {
-                let dep_node_index = self.encoder.send(key, current_fingerprint, edges);
-                entry.insert(dep_node_index);
-                dep_node_index
-            }
-        };
+        let dep_node_index = self
+            .new_node_to_index
+            .get_or_insert_with(key, || self.encoder.send(key, current_fingerprint, edges));
 
         #[cfg(debug_assertions)]
         self.record_edge(dep_node_index, key, current_fingerprint);
@@ -1257,7 +1248,7 @@ impl<D: Deps> CurrentDepGraph<D> {
     ) {
         let node = &prev_graph.index_to_node(prev_index);
         debug_assert!(
-            !self.new_node_to_index.lock_shard_by_value(node).contains_key(node),
+            !self.new_node_to_index.get(node).is_some(),
             "node from previous graph present in new node collection"
         );
     }
@@ -1382,7 +1373,7 @@ fn panic_on_forbidden_read<D: Deps>(data: &DepGraphData<D>, dep_node_index: DepN
     if dep_node.is_none() {
         // Try to find it among the new nodes
         for shard in data.current.new_node_to_index.lock_shards() {
-            if let Some((node, _)) = shard.iter().find(|(_, index)| **index == dep_node_index) {
+            if let Some((node, _)) = shard.iter().find(|(_, index)| *index == dep_node_index) {
                 dep_node = Some(*node);
                 break;
             }
diff --git a/compiler/rustc_query_system/src/lib.rs b/compiler/rustc_query_system/src/lib.rs
index a546362414c..2aedd365adc 100644
--- a/compiler/rustc_query_system/src/lib.rs
+++ b/compiler/rustc_query_system/src/lib.rs
@@ -3,7 +3,6 @@
 #![feature(assert_matches)]
 #![feature(core_intrinsics)]
 #![feature(dropck_eyepatch)]
-#![feature(hash_raw_entry)]
 #![feature(let_chains)]
 #![feature(min_specialization)]
 // tidy-alphabetical-end
diff --git a/compiler/rustc_query_system/src/query/caches.rs b/compiler/rustc_query_system/src/query/caches.rs
index 3b47e7eba0f..30b5d7e5954 100644
--- a/compiler/rustc_query_system/src/query/caches.rs
+++ b/compiler/rustc_query_system/src/query/caches.rs
@@ -2,8 +2,7 @@ use std::fmt::Debug;
 use std::hash::Hash;
 use std::sync::OnceLock;
 
-use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::sharded::{self, Sharded};
+use rustc_data_structures::sharded::ShardedHashMap;
 pub use rustc_data_structures::vec_cache::VecCache;
 use rustc_hir::def_id::LOCAL_CRATE;
 use rustc_index::Idx;
@@ -36,7 +35,7 @@ pub trait QueryCache: Sized {
 /// In-memory cache for queries whose keys aren't suitable for any of the
 /// more specialized kinds of cache. Backed by a sharded hashmap.
 pub struct DefaultCache<K, V> {
-    cache: Sharded<FxHashMap<K, (V, DepNodeIndex)>>,
+    cache: ShardedHashMap<K, (V, DepNodeIndex)>,
 }
 
 impl<K, V> Default for DefaultCache<K, V> {
@@ -55,19 +54,14 @@ where
 
     #[inline(always)]
     fn lookup(&self, key: &K) -> Option<(V, DepNodeIndex)> {
-        let key_hash = sharded::make_hash(key);
-        let lock = self.cache.lock_shard_by_hash(key_hash);
-        let result = lock.raw_entry().from_key_hashed_nocheck(key_hash, key);
-
-        if let Some((_, value)) = result { Some(*value) } else { None }
+        self.cache.get(key)
     }
 
     #[inline]
     fn complete(&self, key: K, value: V, index: DepNodeIndex) {
-        let mut lock = self.cache.lock_shard_by_value(&key);
         // We may be overwriting another value. This is all right, since the dep-graph
         // will check that the fingerprint matches.
-        lock.insert(key, (value, index));
+        self.cache.insert(key, (value, index));
     }
 
     fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {
diff --git a/compiler/rustc_sanitizers/src/lib.rs b/compiler/rustc_sanitizers/src/lib.rs
index e4792563e71..c4a9cab24d0 100644
--- a/compiler/rustc_sanitizers/src/lib.rs
+++ b/compiler/rustc_sanitizers/src/lib.rs
@@ -4,6 +4,7 @@
 //! compiler.
 
 // tidy-alphabetical-start
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(let_chains)]
 // tidy-alphabetical-end
 
diff --git a/compiler/rustc_smir/src/lib.rs b/compiler/rustc_smir/src/lib.rs
index 9f888875306..eaba14bbf30 100644
--- a/compiler/rustc_smir/src/lib.rs
+++ b/compiler/rustc_smir/src/lib.rs
@@ -9,6 +9,7 @@
 // tidy-alphabetical-start
 #![allow(internal_features)]
 #![allow(rustc::usage_of_ty_tykind)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(
     html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
     test(attr(allow(unused_variables), deny(warnings)))
diff --git a/compiler/rustc_symbol_mangling/src/lib.rs b/compiler/rustc_symbol_mangling/src/lib.rs
index 269401ef3a2..c4558b5ce8b 100644
--- a/compiler/rustc_symbol_mangling/src/lib.rs
+++ b/compiler/rustc_symbol_mangling/src/lib.rs
@@ -89,6 +89,7 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(let_chains)]
diff --git a/compiler/rustc_trait_selection/src/solve/fulfill/derive_errors.rs b/compiler/rustc_trait_selection/src/solve/fulfill/derive_errors.rs
index 4f177df89e2..352ac7c1a4e 100644
--- a/compiler/rustc_trait_selection/src/solve/fulfill/derive_errors.rs
+++ b/compiler/rustc_trait_selection/src/solve/fulfill/derive_errors.rs
@@ -440,7 +440,7 @@ impl<'tcx> ProofTreeVisitor<'tcx> for BestObligation<'tcx> {
             match (child_mode, nested_goal.source()) {
                 (
                     ChildMode::Trait(_) | ChildMode::Host(_),
-                    GoalSource::Misc | GoalSource::NormalizeGoal(_),
+                    GoalSource::Misc | GoalSource::TypeRelating | GoalSource::NormalizeGoal(_),
                 ) => {
                     continue;
                 }
diff --git a/compiler/rustc_transmute/src/lib.rs b/compiler/rustc_transmute/src/lib.rs
index 00928137d29..7d800e49ff4 100644
--- a/compiler/rustc_transmute/src/lib.rs
+++ b/compiler/rustc_transmute/src/lib.rs
@@ -1,4 +1,5 @@
 // tidy-alphabetical-start
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(never_type)]
 // tidy-alphabetical-end
 
diff --git a/compiler/rustc_ty_utils/src/lib.rs b/compiler/rustc_ty_utils/src/lib.rs
index 35cc6f39856..143b7d53880 100644
--- a/compiler/rustc_ty_utils/src/lib.rs
+++ b/compiler/rustc_ty_utils/src/lib.rs
@@ -6,6 +6,7 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
+#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(assert_matches)]
diff --git a/compiler/rustc_type_ir/src/search_graph/mod.rs b/compiler/rustc_type_ir/src/search_graph/mod.rs
index 18e84db5d68..9ae83928057 100644
--- a/compiler/rustc_type_ir/src/search_graph/mod.rs
+++ b/compiler/rustc_type_ir/src/search_graph/mod.rs
@@ -13,6 +13,7 @@
 /// behavior as long as the resulting behavior is still correct.
 use std::cmp::Ordering;
 use std::collections::BTreeMap;
+use std::collections::hash_map::Entry;
 use std::fmt::Debug;
 use std::hash::Hash;
 use std::marker::PhantomData;
@@ -20,7 +21,7 @@ use std::marker::PhantomData;
 use derive_where::derive_where;
 use rustc_index::{Idx, IndexVec};
 #[cfg(feature = "nightly")]
-use rustc_macros::HashStable_NoContext;
+use rustc_macros::{HashStable_NoContext, TyDecodable, TyEncodable};
 use tracing::debug;
 
 use crate::data_structures::HashMap;
@@ -111,21 +112,35 @@ pub trait Delegate {
 /// In the initial iteration of a cycle, we do not yet have a provisional
 /// result. In the case we return an initial provisional result depending
 /// on the kind of cycle.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
-#[cfg_attr(feature = "nightly", derive(HashStable_NoContext))]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[cfg_attr(feature = "nightly", derive(TyDecodable, TyEncodable, HashStable_NoContext))]
 pub enum PathKind {
-    Coinductive,
+    /// A path consisting of only inductive/unproductive steps. Their initial
+    /// provisional result is `Err(NoSolution)`. We currently treat them as
+    /// `PathKind::Unknown` during coherence until we're fully confident in
+    /// our approach.
     Inductive,
+    /// A path which is not be coinductive right now but we may want
+    /// to change of them to be so in the future. We return an ambiguous
+    /// result in this case to prevent people from relying on this.
+    Unknown,
+    /// A path with at least one coinductive step. Such cycles hold.
+    Coinductive,
 }
+
 impl PathKind {
     /// Returns the path kind when merging `self` with `rest`.
     ///
     /// Given an inductive path `self` and a coinductive path `rest`,
     /// the path `self -> rest` would be coinductive.
+    ///
+    /// This operation represents an ordering and would be equivalent
+    /// to `max(self, rest)`.
     fn extend(self, rest: PathKind) -> PathKind {
-        match self {
-            PathKind::Coinductive => PathKind::Coinductive,
-            PathKind::Inductive => rest,
+        match (self, rest) {
+            (PathKind::Coinductive, _) | (_, PathKind::Coinductive) => PathKind::Coinductive,
+            (PathKind::Unknown, _) | (_, PathKind::Unknown) => PathKind::Unknown,
+            (PathKind::Inductive, PathKind::Inductive) => PathKind::Inductive,
         }
     }
 }
@@ -159,9 +174,6 @@ impl UsageKind {
             }
         }
     }
-    fn and_merge(&mut self, other: impl Into<Self>) {
-        *self = self.merge(other);
-    }
 }
 
 /// For each goal we track whether the paths from this goal
@@ -297,7 +309,7 @@ impl CycleHeads {
 
             let path_from_entry = match step_kind {
                 PathKind::Coinductive => AllPathsToHeadCoinductive::Yes,
-                PathKind::Inductive => path_from_entry,
+                PathKind::Unknown | PathKind::Inductive => path_from_entry,
             };
 
             self.insert(head, path_from_entry);
@@ -305,6 +317,63 @@ impl CycleHeads {
     }
 }
 
+bitflags::bitflags! {
+    /// Tracks how nested goals have been accessed. This is necessary to disable
+    /// global cache entries if computing them would otherwise result in a cycle or
+    /// access a provisional cache entry.
+    #[derive(Debug, Clone, Copy)]
+    pub struct PathsToNested: u8 {
+        /// The initial value when adding a goal to its own nested goals.
+        const EMPTY                      = 1 << 0;
+        const INDUCTIVE                  = 1 << 1;
+        const UNKNOWN                    = 1 << 2;
+        const COINDUCTIVE                = 1 << 3;
+    }
+}
+impl From<PathKind> for PathsToNested {
+    fn from(path: PathKind) -> PathsToNested {
+        match path {
+            PathKind::Inductive => PathsToNested::INDUCTIVE,
+            PathKind::Unknown => PathsToNested::UNKNOWN,
+            PathKind::Coinductive => PathsToNested::COINDUCTIVE,
+        }
+    }
+}
+impl PathsToNested {
+    /// The implementation of this function is kind of ugly. We check whether
+    /// there currently exist 'weaker' paths in the set, if so we upgrade these
+    /// paths to at least `path`.
+    #[must_use]
+    fn extend_with(mut self, path: PathKind) -> Self {
+        match path {
+            PathKind::Inductive => {
+                if self.intersects(PathsToNested::EMPTY) {
+                    self.remove(PathsToNested::EMPTY);
+                    self.insert(PathsToNested::INDUCTIVE);
+                }
+            }
+            PathKind::Unknown => {
+                if self.intersects(PathsToNested::EMPTY | PathsToNested::INDUCTIVE) {
+                    self.remove(PathsToNested::EMPTY | PathsToNested::INDUCTIVE);
+                    self.insert(PathsToNested::UNKNOWN);
+                }
+            }
+            PathKind::Coinductive => {
+                if self.intersects(
+                    PathsToNested::EMPTY | PathsToNested::INDUCTIVE | PathsToNested::UNKNOWN,
+                ) {
+                    self.remove(
+                        PathsToNested::EMPTY | PathsToNested::INDUCTIVE | PathsToNested::UNKNOWN,
+                    );
+                    self.insert(PathsToNested::COINDUCTIVE);
+                }
+            }
+        }
+
+        self
+    }
+}
+
 /// The nested goals of each stack entry and the path from the
 /// stack entry to that nested goal.
 ///
@@ -322,15 +391,18 @@ impl CycleHeads {
 /// results from a the cycle BAB depending on the cycle root.
 #[derive_where(Debug, Default, Clone; X: Cx)]
 struct NestedGoals<X: Cx> {
-    nested_goals: HashMap<X::Input, UsageKind>,
+    nested_goals: HashMap<X::Input, PathsToNested>,
 }
 impl<X: Cx> NestedGoals<X> {
     fn is_empty(&self) -> bool {
         self.nested_goals.is_empty()
     }
 
-    fn insert(&mut self, input: X::Input, path_from_entry: UsageKind) {
-        self.nested_goals.entry(input).or_insert(path_from_entry).and_merge(path_from_entry);
+    fn insert(&mut self, input: X::Input, paths_to_nested: PathsToNested) {
+        match self.nested_goals.entry(input) {
+            Entry::Occupied(mut entry) => *entry.get_mut() |= paths_to_nested,
+            Entry::Vacant(entry) => drop(entry.insert(paths_to_nested)),
+        }
     }
 
     /// Adds the nested goals of a nested goal, given that the path `step_kind` from this goal
@@ -341,18 +413,15 @@ impl<X: Cx> NestedGoals<X> {
     /// the same as for the child.
     fn extend_from_child(&mut self, step_kind: PathKind, nested_goals: &NestedGoals<X>) {
         #[allow(rustc::potential_query_instability)]
-        for (input, path_from_entry) in nested_goals.iter() {
-            let path_from_entry = match step_kind {
-                PathKind::Coinductive => UsageKind::Single(PathKind::Coinductive),
-                PathKind::Inductive => path_from_entry,
-            };
-            self.insert(input, path_from_entry);
+        for (input, paths_to_nested) in nested_goals.iter() {
+            let paths_to_nested = paths_to_nested.extend_with(step_kind);
+            self.insert(input, paths_to_nested);
         }
     }
 
     #[cfg_attr(feature = "nightly", rustc_lint_query_instability)]
     #[allow(rustc::potential_query_instability)]
-    fn iter(&self) -> impl Iterator<Item = (X::Input, UsageKind)> {
+    fn iter(&self) -> impl Iterator<Item = (X::Input, PathsToNested)> + '_ {
         self.nested_goals.iter().map(|(i, p)| (*i, *p))
     }
 
@@ -490,7 +559,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
             // goals as this change may cause them to now depend on additional
             // goals, resulting in new cycles. See the dev-guide for examples.
             if parent_depends_on_cycle {
-                parent.nested_goals.insert(parent.input, UsageKind::Single(PathKind::Inductive))
+                parent.nested_goals.insert(parent.input, PathsToNested::EMPTY);
             }
         }
     }
@@ -666,7 +735,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
             //
             // We must therefore not use the global cache entry for `B` in that case.
             // See tests/ui/traits/next-solver/cycles/hidden-by-overflow.rs
-            last.nested_goals.insert(last.input, UsageKind::Single(PathKind::Inductive));
+            last.nested_goals.insert(last.input, PathsToNested::EMPTY);
         }
 
         debug!("encountered stack overflow");
@@ -749,16 +818,11 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
 
                 // We now care about the path from the next highest cycle head to the
                 // provisional cache entry.
-                match path_from_head {
-                    PathKind::Coinductive => {}
-                    PathKind::Inductive => {
-                        *path_from_head = Self::cycle_path_kind(
-                            &self.stack,
-                            stack_entry.step_kind_from_parent,
-                            head,
-                        )
-                    }
-                }
+                *path_from_head = path_from_head.extend(Self::cycle_path_kind(
+                    &self.stack,
+                    stack_entry.step_kind_from_parent,
+                    head,
+                ));
                 // Mutate the result of the provisional cache entry in case we did
                 // not reach a fixpoint.
                 *result = mutate_result(input, *result);
@@ -858,7 +922,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
             for &ProvisionalCacheEntry {
                 encountered_overflow,
                 ref heads,
-                path_from_head,
+                path_from_head: head_to_provisional,
                 result: _,
             } in entries.iter()
             {
@@ -870,24 +934,19 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
 
                 // A provisional cache entry only applies if the path from its highest head
                 // matches the path when encountering the goal.
+                //
+                // We check if any of the paths taken while computing the global goal
+                // would end up with an applicable provisional cache entry.
                 let head = heads.highest_cycle_head();
-                let full_path = match Self::cycle_path_kind(stack, step_kind_from_parent, head) {
-                    PathKind::Coinductive => UsageKind::Single(PathKind::Coinductive),
-                    PathKind::Inductive => path_from_global_entry,
-                };
-
-                match (full_path, path_from_head) {
-                    (UsageKind::Mixed, _)
-                    | (UsageKind::Single(PathKind::Coinductive), PathKind::Coinductive)
-                    | (UsageKind::Single(PathKind::Inductive), PathKind::Inductive) => {
-                        debug!(
-                            ?full_path,
-                            ?path_from_head,
-                            "cache entry not applicable due to matching paths"
-                        );
-                        return false;
-                    }
-                    _ => debug!(?full_path, ?path_from_head, "paths don't match"),
+                let head_to_curr = Self::cycle_path_kind(stack, step_kind_from_parent, head);
+                let full_paths = path_from_global_entry.extend_with(head_to_curr);
+                if full_paths.contains(head_to_provisional.into()) {
+                    debug!(
+                        ?full_paths,
+                        ?head_to_provisional,
+                        "cache entry not applicable due to matching paths"
+                    );
+                    return false;
                 }
             }
         }
@@ -986,8 +1045,8 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
         let last = &mut self.stack[last_index];
         last.reached_depth = last.reached_depth.max(next_index);
 
-        last.nested_goals.insert(input, UsageKind::Single(step_kind_from_parent));
-        last.nested_goals.insert(last.input, UsageKind::Single(PathKind::Inductive));
+        last.nested_goals.insert(input, step_kind_from_parent.into());
+        last.nested_goals.insert(last.input, PathsToNested::EMPTY);
         if last_index != head {
             last.heads.insert(head, step_kind_from_parent);
         }
diff --git a/compiler/rustc_type_ir/src/solve/mod.rs b/compiler/rustc_type_ir/src/solve/mod.rs
index 6f37db1cb85..de2e0f2c0ec 100644
--- a/compiler/rustc_type_ir/src/solve/mod.rs
+++ b/compiler/rustc_type_ir/src/solve/mod.rs
@@ -58,20 +58,24 @@ impl<I: Interner, P> Goal<I, P> {
 /// Why a specific goal has to be proven.
 ///
 /// This is necessary as we treat nested goals different depending on
-/// their source. This is currently mostly used by proof tree visitors
-/// but will be used by cycle handling in the future.
+/// their source. This is used to decide whether a cycle is coinductive.
+/// See the documentation of `EvalCtxt::step_kind_for_source` for more details
+/// about this.
+///
+/// It is also used by proof tree visitors, e.g. for diagnostics purposes.
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
 #[cfg_attr(feature = "nightly", derive(HashStable_NoContext))]
 pub enum GoalSource {
     Misc,
-    /// We're proving a where-bound of an impl.
+    /// A nested goal required to prove that types are equal/subtypes.
+    /// This is always an unproductive step.
     ///
-    /// FIXME(-Znext-solver=coinductive): Explain how and why this
-    /// changes whether cycles are coinductive.
+    /// This is also used for all `NormalizesTo` goals as we they are used
+    /// to relate types in `AliasRelate`.
+    TypeRelating,
+    /// We're proving a where-bound of an impl.
     ImplWhereBound,
     /// Const conditions that need to hold for `~const` alias bounds to hold.
-    ///
-    /// FIXME(-Znext-solver=coinductive): Are these even coinductive?
     AliasBoundConstCondition,
     /// Instantiating a higher-ranked goal and re-proving it.
     InstantiateHigherRanked,
@@ -79,7 +83,6 @@ pub enum GoalSource {
     /// This is used in two places: projecting to an opaque whose hidden type
     /// is already registered in the opaque type storage, and for rigid projections.
     AliasWellFormed,
-
     /// In case normalizing aliases in nested goals cycles, eagerly normalizing these
     /// aliases in the context of the parent may incorrectly change the cycle kind.
     /// Normalizing aliases in goals therefore tracks the original path kind for this