about summary refs log tree commit diff
path: root/compiler
diff options
context:
space:
mode:
Diffstat (limited to 'compiler')
-rw-r--r--compiler/rustc_arena/src/lib.rs4
-rw-r--r--compiler/rustc_ast/src/ast.rs9
-rw-r--r--compiler/rustc_ast/src/token.rs7
-rw-r--r--compiler/rustc_codegen_llvm/src/back/write.rs3
-rw-r--r--compiler/rustc_data_structures/src/graph/vec_graph/mod.rs2
-rw-r--r--compiler/rustc_feature/src/active.rs4
-rw-r--r--compiler/rustc_hir/src/def.rs4
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/mod.rs2
-rw-r--r--compiler/rustc_infer/src/infer/mod.rs2
-rw-r--r--compiler/rustc_interface/src/passes.rs7
-rw-r--r--compiler/rustc_interface/src/tests.rs1
-rw-r--r--compiler/rustc_interface/src/util.rs5
-rw-r--r--compiler/rustc_lint/src/lib.rs5
-rw-r--r--compiler/rustc_lint/src/methods.rs106
-rw-r--r--compiler/rustc_lint/src/unused.rs6
-rw-r--r--compiler/rustc_middle/src/lib.rs2
-rw-r--r--compiler/rustc_middle/src/mir/mod.rs12
-rw-r--r--compiler/rustc_middle/src/ty/inhabitedness/mod.rs6
-rw-r--r--compiler/rustc_middle/src/ty/mod.rs4
-rw-r--r--compiler/rustc_middle/src/ty/relate.rs2
-rw-r--r--compiler/rustc_middle/src/ty/sty.rs4
-rw-r--r--compiler/rustc_mir/src/borrow_check/type_check/mod.rs17
-rw-r--r--compiler/rustc_mir/src/const_eval/eval_queries.rs49
-rw-r--r--compiler/rustc_mir/src/const_eval/mod.rs2
-rw-r--r--compiler/rustc_mir/src/dataflow/impls/borrows.rs2
-rw-r--r--compiler/rustc_mir/src/dataflow/impls/liveness.rs4
-rw-r--r--compiler/rustc_mir/src/interpret/intern.rs87
-rw-r--r--compiler/rustc_mir/src/interpret/mod.rs2
-rw-r--r--compiler/rustc_mir/src/interpret/validity.rs91
-rw-r--r--compiler/rustc_mir/src/transform/const_prop.rs11
-rw-r--r--compiler/rustc_mir/src/transform/function_item_references.rs205
-rw-r--r--compiler/rustc_mir/src/transform/instcombine.rs5
-rw-r--r--compiler/rustc_mir/src/transform/mod.rs2
-rw-r--r--compiler/rustc_mir/src/transform/promote_consts.rs3
-rw-r--r--compiler/rustc_mir_build/src/build/expr/as_operand.rs2
-rw-r--r--compiler/rustc_mir_build/src/lib.rs1
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/_match.rs1850
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs164
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs16
-rw-r--r--compiler/rustc_parse/src/parser/item.rs41
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs1
-rw-r--r--compiler/rustc_parse/src/parser/path.rs50
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs14
-rw-r--r--compiler/rustc_resolve/src/late.rs88
-rw-r--r--compiler/rustc_resolve/src/late/diagnostics.rs105
-rw-r--r--compiler/rustc_session/src/filesearch.rs4
-rw-r--r--compiler/rustc_session/src/lint/builtin.rs31
-rw-r--r--compiler/rustc_session/src/options.rs8
-rw-r--r--compiler/rustc_span/src/caching_source_map_view.rs42
-rw-r--r--compiler/rustc_span/src/lib.rs28
-rw-r--r--compiler/rustc_span/src/symbol.rs10
-rw-r--r--compiler/rustc_trait_selection/src/traits/const_evaluatable.rs61
-rw-r--r--compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs4
-rw-r--r--compiler/rustc_trait_selection/src/traits/object_safety.rs105
-rw-r--r--compiler/rustc_typeck/src/check/check.rs2
-rw-r--r--compiler/rustc_typeck/src/check/expr.rs78
-rw-r--r--compiler/rustc_typeck/src/check/gather_locals.rs29
-rw-r--r--compiler/rustc_typeck/src/check/method/suggest.rs54
-rw-r--r--compiler/rustc_typeck/src/coherence/inherent_impls_overlap.rs35
-rw-r--r--compiler/rustc_typeck/src/collect.rs10
60 files changed, 1911 insertions, 1599 deletions
diff --git a/compiler/rustc_arena/src/lib.rs b/compiler/rustc_arena/src/lib.rs
index 1a85a46ed74..a8522547666 100644
--- a/compiler/rustc_arena/src/lib.rs
+++ b/compiler/rustc_arena/src/lib.rs
@@ -533,7 +533,7 @@ impl DropArena {
         ptr::write(mem, object);
         let result = &mut *mem;
         // Record the destructor after doing the allocation as that may panic
-        // and would cause `object`'s destuctor to run twice if it was recorded before
+        // and would cause `object`'s destructor to run twice if it was recorded before
         self.destructors
             .borrow_mut()
             .push(DropType { drop_fn: drop_for_type::<T>, obj: result as *mut T as *mut u8 });
@@ -560,7 +560,7 @@ impl DropArena {
         mem::forget(vec.drain(..));
 
         // Record the destructors after doing the allocation as that may panic
-        // and would cause `object`'s destuctor to run twice if it was recorded before
+        // and would cause `object`'s destructor to run twice if it was recorded before
         for i in 0..len {
             destructors.push(DropType {
                 drop_fn: drop_for_type::<T>,
diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs
index 7224b482ed7..7d5e235c885 100644
--- a/compiler/rustc_ast/src/ast.rs
+++ b/compiler/rustc_ast/src/ast.rs
@@ -222,6 +222,15 @@ pub enum AngleBracketedArg {
     Constraint(AssocTyConstraint),
 }
 
+impl AngleBracketedArg {
+    pub fn span(&self) -> Span {
+        match self {
+            AngleBracketedArg::Arg(arg) => arg.span(),
+            AngleBracketedArg::Constraint(constraint) => constraint.span,
+        }
+    }
+}
+
 impl Into<Option<P<GenericArgs>>> for AngleBracketedArgs {
     fn into(self) -> Option<P<GenericArgs>> {
         Some(P(GenericArgs::AngleBracketed(self)))
diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs
index d991027cb45..2bba7e618c0 100644
--- a/compiler/rustc_ast/src/token.rs
+++ b/compiler/rustc_ast/src/token.rs
@@ -303,6 +303,13 @@ impl TokenKind {
             _ => None,
         }
     }
+
+    pub fn should_end_const_arg(&self) -> bool {
+        match self {
+            Gt | Ge | BinOp(Shr) | BinOpEq(Shr) => true,
+            _ => false,
+        }
+    }
 }
 
 impl Token {
diff --git a/compiler/rustc_codegen_llvm/src/back/write.rs b/compiler/rustc_codegen_llvm/src/back/write.rs
index ea1a7cfa5d3..3902df8a7ca 100644
--- a/compiler/rustc_codegen_llvm/src/back/write.rs
+++ b/compiler/rustc_codegen_llvm/src/back/write.rs
@@ -128,7 +128,8 @@ pub fn target_machine_factory(
     let (opt_level, _) = to_llvm_opt_settings(optlvl);
     let use_softfp = sess.opts.cg.soft_float;
 
-    let ffunction_sections = sess.target.options.function_sections;
+    let ffunction_sections =
+        sess.opts.debugging_opts.function_sections.unwrap_or(sess.target.options.function_sections);
     let fdata_sections = ffunction_sections;
 
     let code_model = to_llvm_code_model(sess.code_model());
diff --git a/compiler/rustc_data_structures/src/graph/vec_graph/mod.rs b/compiler/rustc_data_structures/src/graph/vec_graph/mod.rs
index 064467174ca..4ed88878418 100644
--- a/compiler/rustc_data_structures/src/graph/vec_graph/mod.rs
+++ b/compiler/rustc_data_structures/src/graph/vec_graph/mod.rs
@@ -29,7 +29,7 @@ impl<N: Idx> VecGraph<N> {
 
         // Create the *edge starts* array. We are iterating over over
         // the (sorted) edge pairs. We maintain the invariant that the
-        // length of the `node_starts` arary is enough to store the
+        // length of the `node_starts` array is enough to store the
         // current source node -- so when we see that the source node
         // for an edge is greater than the current length, we grow the
         // edge-starts array by just enough.
diff --git a/compiler/rustc_feature/src/active.rs b/compiler/rustc_feature/src/active.rs
index f5c530c192a..ad926a810e6 100644
--- a/compiler/rustc_feature/src/active.rs
+++ b/compiler/rustc_feature/src/active.rs
@@ -607,6 +607,9 @@ declare_features! (
     /// Allow anonymous constants from an inline `const` block
     (active, inline_const, "1.49.0", Some(76001), None),
 
+    /// Allows unsized fn parameters.
+    (active, unsized_fn_params, "1.49.0", Some(48055), None),
+
     // -------------------------------------------------------------------------
     // feature-group-end: actual feature gates
     // -------------------------------------------------------------------------
@@ -629,6 +632,7 @@ pub const INCOMPLETE_FEATURES: &[Symbol] = &[
     sym::specialization,
     sym::inline_const,
     sym::repr128,
+    sym::unsized_locals,
 ];
 
 /// Some features are not allowed to be used together at the same time, if
diff --git a/compiler/rustc_hir/src/def.rs b/compiler/rustc_hir/src/def.rs
index 62b12542877..193247af584 100644
--- a/compiler/rustc_hir/src/def.rs
+++ b/compiler/rustc_hir/src/def.rs
@@ -206,8 +206,10 @@ pub enum Res<Id = hir::HirId> {
     /// ```rust
     /// impl Foo { fn test() -> [u8; std::mem::size_of::<Self>()] {} }
     /// ```
+    /// We do however allow `Self` in repeat expression even if it is generic to not break code
+    /// which already works on stable while causing the `const_evaluatable_unchecked` future compat lint.
     ///
-    /// FIXME(lazy_normalization_consts): Remove this bodge once this feature is stable.
+    /// FIXME(lazy_normalization_consts): Remove this bodge once that feature is stable.
     SelfTy(Option<DefId> /* trait */, Option<(DefId, bool)> /* impl */),
     ToolMod, // e.g., `rustfmt` in `#[rustfmt::skip]`
 
diff --git a/compiler/rustc_infer/src/infer/error_reporting/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/mod.rs
index f7e4ace8fc5..1402f70c220 100644
--- a/compiler/rustc_infer/src/infer/error_reporting/mod.rs
+++ b/compiler/rustc_infer/src/infer/error_reporting/mod.rs
@@ -1669,7 +1669,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
         self.note_error_origin(diag, cause, exp_found);
     }
 
-    fn get_impl_future_output_ty(&self, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
+    pub fn get_impl_future_output_ty(&self, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
         if let ty::Opaque(def_id, substs) = ty.kind() {
             let future_trait = self.tcx.require_lang_item(LangItem::Future, None);
             // Future::Output
diff --git a/compiler/rustc_infer/src/infer/mod.rs b/compiler/rustc_infer/src/infer/mod.rs
index ff7bbf0562f..acded5351f8 100644
--- a/compiler/rustc_infer/src/infer/mod.rs
+++ b/compiler/rustc_infer/src/infer/mod.rs
@@ -678,8 +678,6 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
 
     pub fn unsolved_variables(&self) -> Vec<Ty<'tcx>> {
         let mut inner = self.inner.borrow_mut();
-        // FIXME(const_generics): should there be an equivalent function for const variables?
-
         let mut vars: Vec<Ty<'_>> = inner
             .type_variables()
             .unsolved_variables()
diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs
index bbb47a6e807..9dbd59506b1 100644
--- a/compiler/rustc_interface/src/passes.rs
+++ b/compiler/rustc_interface/src/passes.rs
@@ -63,6 +63,13 @@ impl mut_visit::MutVisitor for TokenStripper {
         i.tokens = None;
         mut_visit::noop_flat_map_item(i, self)
     }
+    fn flat_map_foreign_item(
+        &mut self,
+        mut i: P<ast::ForeignItem>,
+    ) -> SmallVec<[P<ast::ForeignItem>; 1]> {
+        i.tokens = None;
+        mut_visit::noop_flat_map_foreign_item(i, self)
+    }
     fn visit_block(&mut self, b: &mut P<ast::Block>) {
         b.tokens = None;
         mut_visit::noop_visit_block(b, self);
diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs
index 6553d0ecfdb..235e049c3f5 100644
--- a/compiler/rustc_interface/src/tests.rs
+++ b/compiler/rustc_interface/src/tests.rs
@@ -550,6 +550,7 @@ fn test_debugging_options_tracking_hash() {
     tracked!(force_overflow_checks, Some(true));
     tracked!(force_unstable_if_unmarked, true);
     tracked!(fuel, Some(("abc".to_string(), 99)));
+    tracked!(function_sections, Some(false));
     tracked!(human_readable_cgu_names, true);
     tracked!(inline_in_all_cgus, Some(true));
     tracked!(insert_sideeffect, true);
diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs
index 7658ffb0e3d..46a6c5861d5 100644
--- a/compiler/rustc_interface/src/util.rs
+++ b/compiler/rustc_interface/src/util.rs
@@ -115,6 +115,11 @@ impl Write for Sink {
         Ok(())
     }
 }
+impl io::LocalOutput for Sink {
+    fn clone_box(&self) -> Box<dyn io::LocalOutput> {
+        Box::new(Self(self.0.clone()))
+    }
+}
 
 /// Like a `thread::Builder::spawn` followed by a `join()`, but avoids the need
 /// for `'static` bounds.
diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs
index 1db59bfc39d..2ecdff1a18d 100644
--- a/compiler/rustc_lint/src/lib.rs
+++ b/compiler/rustc_lint/src/lib.rs
@@ -35,6 +35,8 @@
 #![feature(never_type)]
 #![feature(nll)]
 #![feature(or_patterns)]
+#![feature(half_open_range_patterns)]
+#![feature(exclusive_range_pattern)]
 #![recursion_limit = "256"]
 
 #[macro_use]
@@ -49,6 +51,7 @@ mod early;
 mod internal;
 mod late;
 mod levels;
+mod methods;
 mod non_ascii_idents;
 mod nonstandard_style;
 mod passes;
@@ -73,6 +76,7 @@ use rustc_span::Span;
 use array_into_iter::ArrayIntoIter;
 use builtin::*;
 use internal::*;
+use methods::*;
 use non_ascii_idents::*;
 use nonstandard_style::*;
 use redundant_semicolon::*;
@@ -160,6 +164,7 @@ macro_rules! late_lint_passes {
                 ArrayIntoIter: ArrayIntoIter,
                 ClashingExternDeclarations: ClashingExternDeclarations::new(),
                 DropTraitConstraints: DropTraitConstraints,
+                TemporaryCStringAsPtr: TemporaryCStringAsPtr,
             ]
         );
     };
diff --git a/compiler/rustc_lint/src/methods.rs b/compiler/rustc_lint/src/methods.rs
new file mode 100644
index 00000000000..8732845af0c
--- /dev/null
+++ b/compiler/rustc_lint/src/methods.rs
@@ -0,0 +1,106 @@
+use crate::LateContext;
+use crate::LateLintPass;
+use crate::LintContext;
+use rustc_hir::{Expr, ExprKind, PathSegment};
+use rustc_middle::ty;
+use rustc_span::{symbol::sym, ExpnKind, Span};
+
+declare_lint! {
+    /// The `temporary_cstring_as_ptr` lint detects getting the inner pointer of
+    /// a temporary `CString`.
+    ///
+    /// ### Example
+    ///
+    /// ```rust
+    /// # #![allow(unused)]
+    /// # use std::ffi::CString;
+    /// let c_str = CString::new("foo").unwrap().as_ptr();
+    /// ```
+    ///
+    /// {{produces}}
+    ///
+    /// ### Explanation
+    ///
+    /// The inner pointer of a `CString` lives only as long as the `CString` it
+    /// points to. Getting the inner pointer of a *temporary* `CString` allows the `CString`
+    /// to be dropped at the end of the statement, as it is not being referenced as far as the typesystem
+    /// is concerned. This means outside of the statement the pointer will point to freed memory, which
+    /// causes undefined behavior if the pointer is later dereferenced.
+    pub TEMPORARY_CSTRING_AS_PTR,
+    Warn,
+    "detects getting the inner pointer of a temporary `CString`"
+}
+
+declare_lint_pass!(TemporaryCStringAsPtr => [TEMPORARY_CSTRING_AS_PTR]);
+
+fn in_macro(span: Span) -> bool {
+    if span.from_expansion() {
+        !matches!(span.ctxt().outer_expn_data().kind, ExpnKind::Desugaring(..))
+    } else {
+        false
+    }
+}
+
+fn first_method_call<'tcx>(
+    expr: &'tcx Expr<'tcx>,
+) -> Option<(&'tcx PathSegment<'tcx>, &'tcx [Expr<'tcx>])> {
+    if let ExprKind::MethodCall(path, _, args, _) = &expr.kind {
+        if args.iter().any(|e| e.span.from_expansion()) { None } else { Some((path, *args)) }
+    } else {
+        None
+    }
+}
+
+impl<'tcx> LateLintPass<'tcx> for TemporaryCStringAsPtr {
+    fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+        if in_macro(expr.span) {
+            return;
+        }
+
+        match first_method_call(expr) {
+            Some((path, args)) if path.ident.name == sym::as_ptr => {
+                let unwrap_arg = &args[0];
+                let as_ptr_span = path.ident.span;
+                match first_method_call(unwrap_arg) {
+                    Some((path, args))
+                        if path.ident.name == sym::unwrap || path.ident.name == sym::expect =>
+                    {
+                        let source_arg = &args[0];
+                        lint_cstring_as_ptr(cx, as_ptr_span, source_arg, unwrap_arg);
+                    }
+                    _ => return,
+                }
+            }
+            _ => return,
+        }
+    }
+}
+
+fn lint_cstring_as_ptr(
+    cx: &LateContext<'_>,
+    as_ptr_span: Span,
+    source: &rustc_hir::Expr<'_>,
+    unwrap: &rustc_hir::Expr<'_>,
+) {
+    let source_type = cx.typeck_results().expr_ty(source);
+    if let ty::Adt(def, substs) = source_type.kind() {
+        if cx.tcx.is_diagnostic_item(sym::result_type, def.did) {
+            if let ty::Adt(adt, _) = substs.type_at(0).kind() {
+                if cx.tcx.is_diagnostic_item(sym::cstring_type, adt.did) {
+                    cx.struct_span_lint(TEMPORARY_CSTRING_AS_PTR, as_ptr_span, |diag| {
+                        let mut diag = diag
+                            .build("getting the inner pointer of a temporary `CString`");
+                        diag.span_label(as_ptr_span, "this pointer will be invalid");
+                        diag.span_label(
+                            unwrap.span,
+                            "this `CString` is deallocated at the end of the statement, bind it to a variable to extend its lifetime",
+                        );
+                        diag.note("pointers do not have a lifetime; when calling `as_ptr` the `CString` will be deallocated at the end of the statement because nothing is referencing it as far as the type system is concerned");
+                        diag.help("for more information, see https://doc.rust-lang.org/reference/destructors.html");
+                        diag.emit();
+                    });
+                }
+            }
+        }
+    }
+}
diff --git a/compiler/rustc_lint/src/unused.rs b/compiler/rustc_lint/src/unused.rs
index 2409069031d..17f0d5632e6 100644
--- a/compiler/rustc_lint/src/unused.rs
+++ b/compiler/rustc_lint/src/unused.rs
@@ -250,13 +250,13 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults {
                     has_emitted
                 }
                 ty::Array(ty, len) => match len.try_eval_usize(cx.tcx, cx.param_env) {
+                    // If the array is empty we don't lint, to avoid false positives
+                    Some(0) | None => false,
                     // If the array is definitely non-empty, we can do `#[must_use]` checking.
-                    Some(n) if n != 0 => {
+                    Some(n) => {
                         let descr_pre = &format!("{}array{} of ", descr_pre, plural_suffix,);
                         check_must_use_ty(cx, ty, expr, span, descr_pre, descr_post, n as usize + 1)
                     }
-                    // Otherwise, we don't lint, to avoid false positives.
-                    _ => false,
                 },
                 ty::Closure(..) => {
                     cx.struct_span_lint(UNUSED_MUST_USE, span, |lint| {
diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs
index fa885ce2e7c..5ccadb7e660 100644
--- a/compiler/rustc_middle/src/lib.rs
+++ b/compiler/rustc_middle/src/lib.rs
@@ -47,6 +47,8 @@
 #![feature(associated_type_bounds)]
 #![feature(rustc_attrs)]
 #![feature(int_error_matching)]
+#![feature(half_open_range_patterns)]
+#![feature(exclusive_range_pattern)]
 #![recursion_limit = "512"]
 
 #[macro_use]
diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs
index c4ce8667c90..a753732d364 100644
--- a/compiler/rustc_middle/src/mir/mod.rs
+++ b/compiler/rustc_middle/src/mir/mod.rs
@@ -210,16 +210,6 @@ pub struct Body<'tcx> {
     /// We hold in this field all the constants we are not able to evaluate yet.
     pub required_consts: Vec<Constant<'tcx>>,
 
-    /// The user may be writing e.g. `&[(SOME_CELL, 42)][i].1` and this would get promoted, because
-    /// we'd statically know that no thing with interior mutability will ever be available to the
-    /// user without some serious unsafe code.  Now this means that our promoted is actually
-    /// `&[(SOME_CELL, 42)]` and the MIR using it will do the `&promoted[i].1` projection because
-    /// the index may be a runtime value. Such a promoted value is illegal because it has reachable
-    /// interior mutability. This flag just makes this situation very obvious where the previous
-    /// implementation without the flag hid this situation silently.
-    /// FIXME(oli-obk): rewrite the promoted during promotion to eliminate the cell components.
-    pub ignore_interior_mut_in_const_validation: bool,
-
     /// Does this body use generic parameters. This is used for the `ConstEvaluatable` check.
     ///
     /// Note that this does not actually mean that this body is not computable right now.
@@ -276,7 +266,6 @@ impl<'tcx> Body<'tcx> {
             var_debug_info,
             span,
             required_consts: Vec::new(),
-            ignore_interior_mut_in_const_validation: false,
             is_polymorphic: false,
             predecessor_cache: PredecessorCache::new(),
         };
@@ -306,7 +295,6 @@ impl<'tcx> Body<'tcx> {
             required_consts: Vec::new(),
             generator_kind: None,
             var_debug_info: Vec::new(),
-            ignore_interior_mut_in_const_validation: false,
             is_polymorphic: false,
             predecessor_cache: PredecessorCache::new(),
         };
diff --git a/compiler/rustc_middle/src/ty/inhabitedness/mod.rs b/compiler/rustc_middle/src/ty/inhabitedness/mod.rs
index bf1f5b81c9f..2f7707b9498 100644
--- a/compiler/rustc_middle/src/ty/inhabitedness/mod.rs
+++ b/compiler/rustc_middle/src/ty/inhabitedness/mod.rs
@@ -201,13 +201,13 @@ impl<'tcx> TyS<'tcx> {
             ),
 
             Array(ty, len) => match len.try_eval_usize(tcx, param_env) {
+                Some(0) | None => DefIdForest::empty(),
                 // If the array is definitely non-empty, it's uninhabited if
                 // the type of its elements is uninhabited.
-                Some(n) if n != 0 => ty.uninhabited_from(tcx, param_env),
-                _ => DefIdForest::empty(),
+                Some(1..) => ty.uninhabited_from(tcx, param_env),
             },
 
-            // References to uninitialised memory is valid for any type, including
+            // References to uninitialised memory are valid for any type, including
             // uninhabited types, in unsafe code, so we treat all references as
             // inhabited.
             // The precise semantics of inhabitedness with respect to references is currently
diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs
index 845fa8a47ae..a400b85cdb7 100644
--- a/compiler/rustc_middle/src/ty/mod.rs
+++ b/compiler/rustc_middle/src/ty/mod.rs
@@ -266,6 +266,10 @@ impl<'tcx> AssociatedItems<'tcx> {
         self.items.iter().map(|(_, v)| *v)
     }
 
+    pub fn len(&self) -> usize {
+        self.items.len()
+    }
+
     /// Returns an iterator over all associated items with the given name, ignoring hygiene.
     pub fn filter_by_name_unhygienic(
         &self,
diff --git a/compiler/rustc_middle/src/ty/relate.rs b/compiler/rustc_middle/src/ty/relate.rs
index c4df0bba726..ef5034e218d 100644
--- a/compiler/rustc_middle/src/ty/relate.rs
+++ b/compiler/rustc_middle/src/ty/relate.rs
@@ -490,7 +490,7 @@ pub fn super_relate_consts<R: TypeRelation<'tcx>>(
     let eagerly_eval = |x: &'tcx ty::Const<'tcx>| x.eval(tcx, relation.param_env()).val;
 
     // FIXME(eddyb) doesn't look like everything below checks that `a.ty == b.ty`.
-    // We could probably always assert it early, as `const` generic parameters
+    // We could probably always assert it early, as const generic parameters
     // are not allowed to depend on other generic parameters, i.e. are concrete.
     // (although there could be normalization differences)
 
diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs
index 0fd48d09282..431fa30ed0f 100644
--- a/compiler/rustc_middle/src/ty/sty.rs
+++ b/compiler/rustc_middle/src/ty/sty.rs
@@ -1834,10 +1834,10 @@ impl<'tcx> TyS<'tcx> {
             }
             ty::Array(ty, len) => {
                 match len.try_eval_usize(tcx, ParamEnv::empty()) {
+                    Some(0) | None => false,
                     // If the array is definitely non-empty, it's uninhabited if
                     // the type of its elements is uninhabited.
-                    Some(n) if n != 0 => ty.conservative_is_privately_uninhabited(tcx),
-                    _ => false,
+                    Some(1..) => ty.conservative_is_privately_uninhabited(tcx),
                 }
             }
             ty::Ref(..) => {
diff --git a/compiler/rustc_mir/src/borrow_check/type_check/mod.rs b/compiler/rustc_mir/src/borrow_check/type_check/mod.rs
index 4fc1c570e46..409399094e8 100644
--- a/compiler/rustc_mir/src/borrow_check/type_check/mod.rs
+++ b/compiler/rustc_mir/src/borrow_check/type_check/mod.rs
@@ -974,6 +974,11 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
         checker
     }
 
+    fn unsized_feature_enabled(&self) -> bool {
+        let features = self.tcx().features();
+        features.unsized_locals || features.unsized_fn_params
+    }
+
     /// Equate the inferred type and the annotated type for user type annotations
     fn check_user_type_annotations(&mut self) {
         debug!(
@@ -1456,7 +1461,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
                 }
 
                 self.check_rvalue(body, rv, location);
-                if !self.tcx().features().unsized_locals {
+                if !self.unsized_feature_enabled() {
                     let trait_ref = ty::TraitRef {
                         def_id: tcx.require_lang_item(LangItem::Sized, Some(self.last_span)),
                         substs: tcx.mk_substs_trait(place_ty, &[]),
@@ -1717,9 +1722,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
                     );
                 }
 
-                // When `#![feature(unsized_locals)]` is not enabled,
+                // When `unsized_fn_params` and `unsized_locals` are both not enabled,
                 // this check is done at `check_local`.
-                if self.tcx().features().unsized_locals {
+                if self.unsized_feature_enabled() {
                     let span = term.source_info.span;
                     self.ensure_place_sized(dest_ty, span);
                 }
@@ -1880,9 +1885,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
             LocalKind::Var | LocalKind::Temp => {}
         }
 
-        // When `#![feature(unsized_locals)]` is enabled, only function calls
+        // When `unsized_fn_params` or `unsized_locals` is enabled, only function calls
         // and nullary ops are checked in `check_call_dest`.
-        if !self.tcx().features().unsized_locals {
+        if !self.unsized_feature_enabled() {
             let span = local_decl.source_info.span;
             let ty = local_decl.ty;
             self.ensure_place_sized(ty, span);
@@ -2024,7 +2029,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
 
             Rvalue::NullaryOp(_, ty) => {
                 // Even with unsized locals cannot box an unsized value.
-                if self.tcx().features().unsized_locals {
+                if self.unsized_feature_enabled() {
                     let span = body.source_info(location).span;
                     self.ensure_place_sized(ty, span);
                 }
diff --git a/compiler/rustc_mir/src/const_eval/eval_queries.rs b/compiler/rustc_mir/src/const_eval/eval_queries.rs
index 6ef73b04238..7b9a4ec873d 100644
--- a/compiler/rustc_mir/src/const_eval/eval_queries.rs
+++ b/compiler/rustc_mir/src/const_eval/eval_queries.rs
@@ -1,8 +1,8 @@
 use super::{CompileTimeEvalContext, CompileTimeInterpreter, ConstEvalErr, MemoryExtra};
 use crate::interpret::eval_nullary_intrinsic;
 use crate::interpret::{
-    intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, GlobalId, Immediate,
-    InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, Scalar,
+    intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
+    Immediate, InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, Scalar,
     ScalarMaybeUninit, StackPopCleanup,
 };
 
@@ -59,23 +59,15 @@ fn eval_body_using_ecx<'mir, 'tcx>(
     ecx.run()?;
 
     // Intern the result
-    // FIXME: since the DefId of a promoted is the DefId of its owner, this
-    // means that promoteds in statics are actually interned like statics!
-    // However, this is also currently crucial because we promote mutable
-    // non-empty slices in statics to extend their lifetime, and this
-    // ensures that they are put into a mutable allocation.
-    // For other kinds of promoteds in statics (like array initializers), this is rather silly.
-    let intern_kind = match tcx.static_mutability(cid.instance.def_id()) {
-        Some(m) => InternKind::Static(m),
-        None if cid.promoted.is_some() => InternKind::Promoted,
-        _ => InternKind::Constant,
+    let intern_kind = if cid.promoted.is_some() {
+        InternKind::Promoted
+    } else {
+        match tcx.static_mutability(cid.instance.def_id()) {
+            Some(m) => InternKind::Static(m),
+            None => InternKind::Constant,
+        }
     };
-    intern_const_alloc_recursive(
-        ecx,
-        intern_kind,
-        ret,
-        body.ignore_interior_mut_in_const_validation,
-    );
+    intern_const_alloc_recursive(ecx, intern_kind, ret);
 
     debug!("eval_body_using_ecx done: {:?}", *ret);
     Ok(ret)
@@ -376,16 +368,23 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
             // Since evaluation had no errors, valiate the resulting constant:
             let validation = try {
                 // FIXME do not validate promoteds until a decision on
-                // https://github.com/rust-lang/rust/issues/67465 is made
+                // https://github.com/rust-lang/rust/issues/67465 and
+                // https://github.com/rust-lang/rust/issues/67534 is made.
+                // Promoteds can contain unexpected `UnsafeCell` and reference `static`s, but their
+                // otherwise restricted form ensures that this is still sound. We just lose the
+                // extra safety net of some of the dynamic checks. They can also contain invalid
+                // values, but since we do not usually check intermediate results of a computation
+                // for validity, it might be surprising to do that here.
                 if cid.promoted.is_none() {
                     let mut ref_tracking = RefTracking::new(mplace);
+                    let mut inner = false;
                     while let Some((mplace, path)) = ref_tracking.todo.pop() {
-                        ecx.const_validate_operand(
-                            mplace.into(),
-                            path,
-                            &mut ref_tracking,
-                            /*may_ref_to_static*/ ecx.memory.extra.can_access_statics,
-                        )?;
+                        let mode = match tcx.static_mutability(cid.instance.def_id()) {
+                            Some(_) => CtfeValidationMode::Regular, // a `static`
+                            None => CtfeValidationMode::Const { inner },
+                        };
+                        ecx.const_validate_operand(mplace.into(), path, &mut ref_tracking, mode)?;
+                        inner = true;
                     }
                 }
             };
diff --git a/compiler/rustc_mir/src/const_eval/mod.rs b/compiler/rustc_mir/src/const_eval/mod.rs
index 4b235e1aa4a..11a211ef7b3 100644
--- a/compiler/rustc_mir/src/const_eval/mod.rs
+++ b/compiler/rustc_mir/src/const_eval/mod.rs
@@ -29,7 +29,7 @@ pub(crate) fn const_caller_location(
     let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all(), false);
 
     let loc_place = ecx.alloc_caller_location(file, line, col);
-    intern_const_alloc_recursive(&mut ecx, InternKind::Constant, loc_place, false);
+    intern_const_alloc_recursive(&mut ecx, InternKind::Constant, loc_place);
     ConstValue::Scalar(loc_place.ptr)
 }
 
diff --git a/compiler/rustc_mir/src/dataflow/impls/borrows.rs b/compiler/rustc_mir/src/dataflow/impls/borrows.rs
index 0be13b6ba81..6b7889c4d9e 100644
--- a/compiler/rustc_mir/src/dataflow/impls/borrows.rs
+++ b/compiler/rustc_mir/src/dataflow/impls/borrows.rs
@@ -177,7 +177,7 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> {
         //
         // We are careful always to call this function *before* we
         // set up the gen-bits for the statement or
-        // termanator. That way, if the effect of the statement or
+        // terminator. That way, if the effect of the statement or
         // terminator *does* introduce a new loan of the same
         // region, then setting that gen-bit will override any
         // potential kill introduced here.
diff --git a/compiler/rustc_mir/src/dataflow/impls/liveness.rs b/compiler/rustc_mir/src/dataflow/impls/liveness.rs
index b0da28156d1..a2b0713cd7d 100644
--- a/compiler/rustc_mir/src/dataflow/impls/liveness.rs
+++ b/compiler/rustc_mir/src/dataflow/impls/liveness.rs
@@ -8,7 +8,7 @@ use crate::dataflow::{AnalysisDomain, Backward, GenKill, GenKillAnalysis};
 ///
 /// This analysis considers references as being used only at the point of the
 /// borrow. In other words, this analysis does not track uses because of references that already
-/// exist. See [this `mir-datalow` test][flow-test] for an example. You almost never want to use
+/// exist. See [this `mir-dataflow` test][flow-test] for an example. You almost never want to use
 /// this analysis without also looking at the results of [`MaybeBorrowedLocals`].
 ///
 /// [`MaybeBorrowedLocals`]: ../struct.MaybeBorrowedLocals.html
@@ -134,7 +134,7 @@ impl DefUse {
 
             // `MutatingUseContext::Call` and `MutatingUseContext::Yield` indicate that this is the
             // destination place for a `Call` return or `Yield` resume respectively. Since this is
-            // only a `Def` when the function returns succesfully, we handle this case separately
+            // only a `Def` when the function returns successfully, we handle this case separately
             // in `call_return_effect` above.
             PlaceContext::MutatingUse(MutatingUseContext::Call | MutatingUseContext::Yield) => None,
 
diff --git a/compiler/rustc_mir/src/interpret/intern.rs b/compiler/rustc_mir/src/interpret/intern.rs
index 846ca189900..5e5c74a3723 100644
--- a/compiler/rustc_mir/src/interpret/intern.rs
+++ b/compiler/rustc_mir/src/interpret/intern.rs
@@ -2,12 +2,23 @@
 //!
 //! After a const evaluation has computed a value, before we destroy the const evaluator's session
 //! memory, we need to extract all memory allocations to the global memory pool so they stay around.
+//!
+//! In principle, this is not very complicated: we recursively walk the final value, follow all the
+//! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
+//! is picking the right mutability for the allocations in a `static` initializer: we want to make
+//! as many allocations as possible immutable so LLVM can put them into read-only memory. At the
+//! same time, we need to make memory that could be mutated by the program mutable to avoid
+//! incorrect compilations. To achieve this, we do a type-based traversal of the final value,
+//! tracking mutable and shared references and `UnsafeCell` to determine the current mutability.
+//! (In principle, we could skip this type-based part for `const` and promoteds, as they need to be
+//! always immutable. At least for `const` however we use this opportunity to reject any `const`
+//! that contains allocations whose mutability we cannot identify.)
 
 use super::validity::RefTracking;
 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
 use rustc_hir as hir;
 use rustc_middle::mir::interpret::InterpResult;
-use rustc_middle::ty::{self, layout::TyAndLayout, query::TyCtxtAt, Ty};
+use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
 use rustc_target::abi::Size;
 
 use rustc_ast::Mutability;
@@ -40,11 +51,6 @@ struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> {
     /// This field stores whether we are *currently* inside an `UnsafeCell`. This can affect
     /// the intern mode of references we encounter.
     inside_unsafe_cell: bool,
-
-    /// This flag is to avoid triggering UnsafeCells are not allowed behind references in constants
-    /// for promoteds.
-    /// It's a copy of `mir::Body`'s ignore_interior_mut_in_const_validation field
-    ignore_interior_mut_in_const: bool,
 }
 
 #[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
@@ -53,22 +59,14 @@ enum InternMode {
     /// this is *immutable*, and below mutable references inside an `UnsafeCell`, this
     /// is *mutable*.
     Static(hir::Mutability),
-    /// The "base value" of a const, which can have `UnsafeCell` (as in `const FOO: Cell<i32>`),
-    /// but that interior mutability is simply ignored.
-    ConstBase,
-    /// The "inner values" of a const with references, where `UnsafeCell` is an error.
-    ConstInner,
+    /// A `const`.
+    Const,
 }
 
 /// Signalling data structure to ensure we don't recurse
 /// into the memory of other constants or statics
 struct IsStaticOrFn;
 
-fn mutable_memory_in_const(tcx: TyCtxtAt<'_>, kind: &str) {
-    // FIXME: show this in validation instead so we can point at where in the value the error is?
-    tcx.sess.span_err(tcx.span, &format!("mutable memory ({}) is not allowed in constant", kind));
-}
-
 /// Intern an allocation without looking at its children.
 /// `mode` is the mode of the environment where we found this pointer.
 /// `mutablity` is the mutability of the place to be interned; even if that says
@@ -129,9 +127,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>>(
         // See const_eval::machine::MemoryExtra::can_access_statics for why
         // immutability is so important.
 
-        // There are no sensible checks we can do here; grep for `mutable_memory_in_const` to
-        // find the checks we are doing elsewhere to avoid even getting here for memory
-        // that "wants" to be mutable.
+        // Validation will ensure that there is no `UnsafeCell` on an immutable allocation.
         alloc.mutability = Mutability::Not;
     };
     // link the alloc id to the actual allocation
@@ -167,17 +163,13 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
         mplace: MPlaceTy<'tcx>,
         fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
     ) -> InterpResult<'tcx> {
+        // ZSTs cannot contain pointers, so we can skip them.
+        if mplace.layout.is_zst() {
+            return Ok(());
+        }
+
         if let Some(def) = mplace.layout.ty.ty_adt_def() {
             if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() {
-                if self.mode == InternMode::ConstInner && !self.ignore_interior_mut_in_const {
-                    // We do not actually make this memory mutable.  But in case the user
-                    // *expected* it to be mutable, make sure we error.  This is just a
-                    // sanity check to prevent users from accidentally exploiting the UB
-                    // they caused.  It also helps us to find cases where const-checking
-                    // failed to prevent an `UnsafeCell` (but as `ignore_interior_mut_in_const`
-                    // shows that part is not airtight).
-                    mutable_memory_in_const(self.ecx.tcx, "`UnsafeCell`");
-                }
                 // We are crossing over an `UnsafeCell`, we can mutate again. This means that
                 // References we encounter inside here are interned as pointing to mutable
                 // allocations.
@@ -189,11 +181,6 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
             }
         }
 
-        // ZSTs cannot contain pointers, so we can skip them.
-        if mplace.layout.is_zst() {
-            return Ok(());
-        }
-
         self.walk_aggregate(mplace, fields)
     }
 
@@ -213,7 +200,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
                 if let Scalar::Ptr(vtable) = mplace.meta.unwrap_meta() {
                     // Explicitly choose const mode here, since vtables are immutable, even
                     // if the reference of the fat pointer is mutable.
-                    self.intern_shallow(vtable.alloc_id, InternMode::ConstInner, None);
+                    self.intern_shallow(vtable.alloc_id, InternMode::Const, None);
                 } else {
                     // Validation will error (with a better message) on an invalid vtable pointer.
                     // Let validation show the error message, but make sure it *does* error.
@@ -225,7 +212,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
             // Only recurse for allocation-backed pointers.
             if let Scalar::Ptr(ptr) = mplace.ptr {
                 // Compute the mode with which we intern this. Our goal here is to make as many
-                // statics as we can immutable so they can be placed in const memory by LLVM.
+                // statics as we can immutable so they can be placed in read-only memory by LLVM.
                 let ref_mode = match self.mode {
                     InternMode::Static(mutbl) => {
                         // In statics, merge outer mutability with reference mutability and
@@ -259,27 +246,11 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
                             }
                         }
                     }
-                    InternMode::ConstBase | InternMode::ConstInner => {
-                        // Ignore `UnsafeCell`, everything is immutable.  Do some sanity checking
-                        // for mutable references that we encounter -- they must all be ZST.
-                        // This helps to prevent users from accidentally exploiting UB that they
-                        // caused (by somehow getting a mutable reference in a `const`).
-                        if ref_mutability == Mutability::Mut {
-                            match referenced_ty.kind() {
-                                ty::Array(_, n) if n.eval_usize(*tcx, self.ecx.param_env) == 0 => {}
-                                ty::Slice(_)
-                                    if mplace.meta.unwrap_meta().to_machine_usize(self.ecx)?
-                                        == 0 => {}
-                                _ => mutable_memory_in_const(tcx, "`&mut`"),
-                            }
-                        } else {
-                            // A shared reference. We cannot check `freeze` here due to references
-                            // like `&dyn Trait` that are actually immutable.  We do check for
-                            // concrete `UnsafeCell` when traversing the pointee though (if it is
-                            // a new allocation, not yet interned).
-                        }
-                        // Go on with the "inner" rules.
-                        InternMode::ConstInner
+                    InternMode::Const => {
+                        // Ignore `UnsafeCell`, everything is immutable.  Validity does some sanity
+                        // checking for mutable references that we encounter -- they must all be
+                        // ZST.
+                        InternMode::Const
                     }
                 };
                 match self.intern_shallow(ptr.alloc_id, ref_mode, Some(referenced_ty)) {
@@ -318,7 +289,6 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
     ecx: &mut InterpCx<'mir, 'tcx, M>,
     intern_kind: InternKind,
     ret: MPlaceTy<'tcx>,
-    ignore_interior_mut_in_const: bool,
 ) where
     'tcx: 'mir,
 {
@@ -327,7 +297,7 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
         InternKind::Static(mutbl) => InternMode::Static(mutbl),
         // `Constant` includes array lengths.
         // `Promoted` includes non-`Copy` array initializers and `rustc_args_required_const` arguments.
-        InternKind::Constant | InternKind::Promoted => InternMode::ConstBase,
+        InternKind::Constant | InternKind::Promoted => InternMode::Const,
     };
 
     // Type based interning.
@@ -357,7 +327,6 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
             ecx,
             mode,
             leftover_allocations,
-            ignore_interior_mut_in_const,
             inside_unsafe_cell: false,
         }
         .visit_value(mplace);
diff --git a/compiler/rustc_mir/src/interpret/mod.rs b/compiler/rustc_mir/src/interpret/mod.rs
index a931b0bbe97..a29ef117ace 100644
--- a/compiler/rustc_mir/src/interpret/mod.rs
+++ b/compiler/rustc_mir/src/interpret/mod.rs
@@ -24,7 +24,7 @@ pub use self::machine::{compile_time_machine, AllocMap, Machine, MayLeak, StackP
 pub use self::memory::{AllocCheck, FnVal, Memory, MemoryKind};
 pub use self::operand::{ImmTy, Immediate, OpTy, Operand};
 pub use self::place::{MPlaceTy, MemPlace, MemPlaceMeta, Place, PlaceTy};
-pub use self::validity::RefTracking;
+pub use self::validity::{CtfeValidationMode, RefTracking};
 pub use self::visitor::{MutValueVisitor, ValueVisitor};
 
 crate use self::intrinsics::eval_nullary_intrinsic;
diff --git a/compiler/rustc_mir/src/interpret/validity.rs b/compiler/rustc_mir/src/interpret/validity.rs
index c38f25564e8..2d235d65c4d 100644
--- a/compiler/rustc_mir/src/interpret/validity.rs
+++ b/compiler/rustc_mir/src/interpret/validity.rs
@@ -113,6 +113,17 @@ pub enum PathElem {
     DynDowncast,
 }
 
+/// Extra things to check for during validation of CTFE results.
+pub enum CtfeValidationMode {
+    /// Regular validation, nothing special happening.
+    Regular,
+    /// Validation of a `const`. `inner` says if this is an inner, indirect allocation (as opposed
+    /// to the top-level const allocation).
+    /// Being an inner allocation makes a difference because the top-level allocation of a `const`
+    /// is copied for each use, but the inner allocations are implicitly shared.
+    Const { inner: bool },
+}
+
 /// State for tracking recursive validation of references
 pub struct RefTracking<T, PATH = ()> {
     pub seen: FxHashSet<T>,
@@ -202,9 +213,9 @@ struct ValidityVisitor<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
     /// starts must not be changed!  `visit_fields` and `visit_array` rely on
     /// this stack discipline.
     path: Vec<PathElem>,
-    ref_tracking_for_consts:
-        Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>>,
-    may_ref_to_static: bool,
+    ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>>,
+    /// `None` indicates this is not validating for CTFE (but for runtime).
+    ctfe_mode: Option<CtfeValidationMode>,
     ecx: &'rt InterpCx<'mir, 'tcx, M>,
 }
 
@@ -418,7 +429,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
                 { "a dangling {} (use-after-free)", kind },
         );
         // Recursive checking
-        if let Some(ref mut ref_tracking) = self.ref_tracking_for_consts {
+        if let Some(ref mut ref_tracking) = self.ref_tracking {
             if let Some(ptr) = ptr {
                 // not a ZST
                 // Skip validation entirely for some external statics
@@ -426,19 +437,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
                 if let Some(GlobalAlloc::Static(did)) = alloc_kind {
                     assert!(!self.ecx.tcx.is_thread_local_static(did));
                     assert!(self.ecx.tcx.is_static(did));
-                    if self.may_ref_to_static {
-                        // We skip checking other statics. These statics must be sound by
-                        // themselves, and the only way to get broken statics here is by using
-                        // unsafe code.
-                        // The reasons we don't check other statics is twofold. For one, in all
-                        // sound cases, the static was already validated on its own, and second, we
-                        // trigger cycle errors if we try to compute the value of the other static
-                        // and that static refers back to us.
-                        // We might miss const-invalid data,
-                        // but things are still sound otherwise (in particular re: consts
-                        // referring to statics).
-                        return Ok(());
-                    } else {
+                    if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { .. })) {
                         // See const_eval::machine::MemoryExtra::can_access_statics for why
                         // this check is so important.
                         // This check is reachable when the const just referenced the static,
@@ -447,6 +446,17 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
                             { "a {} pointing to a static variable", kind }
                         );
                     }
+                    // We skip checking other statics. These statics must be sound by
+                    // themselves, and the only way to get broken statics here is by using
+                    // unsafe code.
+                    // The reasons we don't check other statics is twofold. For one, in all
+                    // sound cases, the static was already validated on its own, and second, we
+                    // trigger cycle errors if we try to compute the value of the other static
+                    // and that static refers back to us.
+                    // We might miss const-invalid data,
+                    // but things are still sound otherwise (in particular re: consts
+                    // referring to statics).
+                    return Ok(());
                 }
             }
             // Proceed recursively even for ZST, no reason to skip them!
@@ -504,7 +514,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
                 let value = self.ecx.read_scalar(value)?;
                 // NOTE: Keep this in sync with the array optimization for int/float
                 // types below!
-                if self.ref_tracking_for_consts.is_some() {
+                if self.ctfe_mode.is_some() {
                     // Integers/floats in CTFE: Must be scalar bits, pointers are dangerous
                     let is_bits = value.check_init().map_or(false, |v| v.is_bits());
                     if !is_bits {
@@ -532,7 +542,17 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
                 }
                 Ok(true)
             }
-            ty::Ref(..) => {
+            ty::Ref(_, ty, mutbl) => {
+                if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { .. }))
+                    && *mutbl == hir::Mutability::Mut
+                {
+                    // A mutable reference inside a const? That does not seem right (except if it is
+                    // a ZST).
+                    let layout = self.ecx.layout_of(ty)?;
+                    if !layout.is_zst() {
+                        throw_validation_failure!(self.path, { "mutable reference in a `const`" });
+                    }
+                }
                 self.check_safe_pointer(value, "reference")?;
                 Ok(true)
             }
@@ -559,9 +579,8 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
                 // Nothing to check.
                 Ok(true)
             }
-            // The above should be all the (inhabited) primitive types. The rest is compound, we
+            // The above should be all the primitive types. The rest is compound, we
             // check them by visiting their fields/variants.
-            // (`Str` UTF-8 check happens in `visit_aggregate`, too.)
             ty::Adt(..)
             | ty::Tuple(..)
             | ty::Array(..)
@@ -723,6 +742,15 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
         // Sanity check: `builtin_deref` does not know any pointers that are not primitive.
         assert!(op.layout.ty.builtin_deref(true).is_none());
 
+        // Special check preventing `UnsafeCell` in constants
+        if let Some(def) = op.layout.ty.ty_adt_def() {
+            if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { inner: true }))
+                && Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type()
+            {
+                throw_validation_failure!(self.path, { "`UnsafeCell` in a `const`" });
+            }
+        }
+
         // Recursively walk the value at its type.
         self.walk_value(op)?;
 
@@ -814,7 +842,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
                     self.ecx,
                     ptr,
                     size,
-                    /*allow_uninit_and_ptr*/ self.ref_tracking_for_consts.is_none(),
+                    /*allow_uninit_and_ptr*/ self.ctfe_mode.is_none(),
                 ) {
                     // In the happy case, we needn't check anything else.
                     Ok(()) => {}
@@ -865,16 +893,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
         &self,
         op: OpTy<'tcx, M::PointerTag>,
         path: Vec<PathElem>,
-        ref_tracking_for_consts: Option<
-            &mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>,
-        >,
-        may_ref_to_static: bool,
+        ref_tracking: Option<&mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>>,
+        ctfe_mode: Option<CtfeValidationMode>,
     ) -> InterpResult<'tcx> {
         trace!("validate_operand_internal: {:?}, {:?}", *op, op.layout.ty);
 
         // Construct a visitor
-        let mut visitor =
-            ValidityVisitor { path, ref_tracking_for_consts, may_ref_to_static, ecx: self };
+        let mut visitor = ValidityVisitor { path, ref_tracking, ctfe_mode, ecx: self };
 
         // Try to cast to ptr *once* instead of all the time.
         let op = self.force_op_ptr(op).unwrap_or(op);
@@ -902,16 +927,18 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
     /// `ref_tracking` is used to record references that we encounter so that they
     /// can be checked recursively by an outside driving loop.
     ///
-    /// `may_ref_to_static` controls whether references are allowed to point to statics.
+    /// `constant` controls whether this must satisfy the rules for constants:
+    /// - no pointers to statics.
+    /// - no `UnsafeCell` or non-ZST `&mut`.
     #[inline(always)]
     pub fn const_validate_operand(
         &self,
         op: OpTy<'tcx, M::PointerTag>,
         path: Vec<PathElem>,
         ref_tracking: &mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>,
-        may_ref_to_static: bool,
+        ctfe_mode: CtfeValidationMode,
     ) -> InterpResult<'tcx> {
-        self.validate_operand_internal(op, path, Some(ref_tracking), may_ref_to_static)
+        self.validate_operand_internal(op, path, Some(ref_tracking), Some(ctfe_mode))
     }
 
     /// This function checks the data at `op` to be runtime-valid.
@@ -919,6 +946,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
     /// It will error if the bits at the destination do not match the ones described by the layout.
     #[inline(always)]
     pub fn validate_operand(&self, op: OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx> {
-        self.validate_operand_internal(op, vec![], None, false)
+        self.validate_operand_internal(op, vec![], None, None)
     }
 }
diff --git a/compiler/rustc_mir/src/transform/const_prop.rs b/compiler/rustc_mir/src/transform/const_prop.rs
index c743104f6ba..d47e549b7be 100644
--- a/compiler/rustc_mir/src/transform/const_prop.rs
+++ b/compiler/rustc_mir/src/transform/const_prop.rs
@@ -9,7 +9,6 @@ use rustc_hir::def::DefKind;
 use rustc_hir::HirId;
 use rustc_index::bit_set::BitSet;
 use rustc_index::vec::IndexVec;
-use rustc_middle::mir::interpret::{InterpResult, Scalar};
 use rustc_middle::mir::visit::{
     MutVisitor, MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor,
 };
@@ -28,9 +27,10 @@ use rustc_trait_selection::traits;
 
 use crate::const_eval::ConstEvalErr;
 use crate::interpret::{
-    self, compile_time_machine, truncate, AllocId, Allocation, ConstValue, Frame, ImmTy, Immediate,
-    InterpCx, LocalState, LocalValue, MemPlace, Memory, MemoryKind, OpTy, Operand as InterpOperand,
-    PlaceTy, Pointer, ScalarMaybeUninit, StackPopCleanup,
+    self, compile_time_machine, truncate, AllocId, Allocation, ConstValue, CtfeValidationMode,
+    Frame, ImmTy, Immediate, InterpCx, InterpResult, LocalState, LocalValue, MemPlace, Memory,
+    MemoryKind, OpTy, Operand as InterpOperand, PlaceTy, Pointer, Scalar, ScalarMaybeUninit,
+    StackPopCleanup,
 };
 use crate::transform::MirPass;
 
@@ -805,8 +805,9 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
             value,
             vec![],
             // FIXME: is ref tracking too expensive?
+            // FIXME: what is the point of ref tracking if we do not even check the tracked refs?
             &mut interpret::RefTracking::empty(),
-            /*may_ref_to_static*/ true,
+            CtfeValidationMode::Regular,
         ) {
             trace!("validation error, attempt failed: {:?}", e);
             return;
diff --git a/compiler/rustc_mir/src/transform/function_item_references.rs b/compiler/rustc_mir/src/transform/function_item_references.rs
new file mode 100644
index 00000000000..61427422e4b
--- /dev/null
+++ b/compiler/rustc_mir/src/transform/function_item_references.rs
@@ -0,0 +1,205 @@
+use rustc_errors::Applicability;
+use rustc_hir::def_id::DefId;
+use rustc_middle::mir::visit::Visitor;
+use rustc_middle::mir::*;
+use rustc_middle::ty::{
+    self,
+    subst::{GenericArgKind, Subst, SubstsRef},
+    PredicateAtom, Ty, TyCtxt, TyS,
+};
+use rustc_session::lint::builtin::FUNCTION_ITEM_REFERENCES;
+use rustc_span::{symbol::sym, Span};
+use rustc_target::spec::abi::Abi;
+
+use crate::transform::MirPass;
+
+pub struct FunctionItemReferences;
+
+impl<'tcx> MirPass<'tcx> for FunctionItemReferences {
+    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
+        let mut checker = FunctionItemRefChecker { tcx, body };
+        checker.visit_body(&body);
+    }
+}
+
+struct FunctionItemRefChecker<'a, 'tcx> {
+    tcx: TyCtxt<'tcx>,
+    body: &'a Body<'tcx>,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for FunctionItemRefChecker<'a, 'tcx> {
+    /// Emits a lint for function reference arguments bound by `fmt::Pointer` or passed to
+    /// `transmute`. This only handles arguments in calls outside macro expansions to avoid double
+    /// counting function references formatted as pointers by macros.
+    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
+        if let TerminatorKind::Call {
+            func,
+            args,
+            destination: _,
+            cleanup: _,
+            from_hir_call: _,
+            fn_span: _,
+        } = &terminator.kind
+        {
+            let source_info = *self.body.source_info(location);
+            // Only handle function calls outside macros
+            if !source_info.span.from_expansion() {
+                let func_ty = func.ty(self.body, self.tcx);
+                if let ty::FnDef(def_id, substs_ref) = *func_ty.kind() {
+                    // Handle calls to `transmute`
+                    if self.tcx.is_diagnostic_item(sym::transmute, def_id) {
+                        let arg_ty = args[0].ty(self.body, self.tcx);
+                        for generic_inner_ty in arg_ty.walk() {
+                            if let GenericArgKind::Type(inner_ty) = generic_inner_ty.unpack() {
+                                if let Some(fn_id) = FunctionItemRefChecker::is_fn_ref(inner_ty) {
+                                    let ident = self.tcx.item_name(fn_id).to_ident_string();
+                                    let span = self.nth_arg_span(&args, 0);
+                                    self.emit_lint(ident, fn_id, source_info, span);
+                                }
+                            }
+                        }
+                    } else {
+                        self.check_bound_args(def_id, substs_ref, &args, source_info);
+                    }
+                }
+            }
+        }
+        self.super_terminator(terminator, location);
+    }
+    /// Emits a lint for function references formatted with `fmt::Pointer::fmt` by macros. These
+    /// cases are handled as operands instead of call terminators to avoid any dependence on
+    /// unstable, internal formatting details like whether `fmt` is called directly or not.
+    fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
+        let source_info = *self.body.source_info(location);
+        if source_info.span.from_expansion() {
+            let op_ty = operand.ty(self.body, self.tcx);
+            if let ty::FnDef(def_id, substs_ref) = *op_ty.kind() {
+                if self.tcx.is_diagnostic_item(sym::pointer_trait_fmt, def_id) {
+                    let param_ty = substs_ref.type_at(0);
+                    if let Some(fn_id) = FunctionItemRefChecker::is_fn_ref(param_ty) {
+                        // The operand's ctxt wouldn't display the lint since it's inside a macro so
+                        // we have to use the callsite's ctxt.
+                        let callsite_ctxt = source_info.span.source_callsite().ctxt();
+                        let span = source_info.span.with_ctxt(callsite_ctxt);
+                        let ident = self.tcx.item_name(fn_id).to_ident_string();
+                        self.emit_lint(ident, fn_id, source_info, span);
+                    }
+                }
+            }
+        }
+        self.super_operand(operand, location);
+    }
+}
+
+impl<'a, 'tcx> FunctionItemRefChecker<'a, 'tcx> {
+    /// Emits a lint for function reference arguments bound by `fmt::Pointer` in calls to the
+    /// function defined by `def_id` with the substitutions `substs_ref`.
+    fn check_bound_args(
+        &self,
+        def_id: DefId,
+        substs_ref: SubstsRef<'tcx>,
+        args: &Vec<Operand<'tcx>>,
+        source_info: SourceInfo,
+    ) {
+        let param_env = self.tcx.param_env(def_id);
+        let bounds = param_env.caller_bounds();
+        for bound in bounds {
+            if let Some(bound_ty) = self.is_pointer_trait(&bound.skip_binders()) {
+                // Get the argument types as they appear in the function signature.
+                let arg_defs = self.tcx.fn_sig(def_id).skip_binder().inputs();
+                for (arg_num, arg_def) in arg_defs.iter().enumerate() {
+                    // For all types reachable from the argument type in the fn sig
+                    for generic_inner_ty in arg_def.walk() {
+                        if let GenericArgKind::Type(inner_ty) = generic_inner_ty.unpack() {
+                            // If the inner type matches the type bound by `Pointer`
+                            if TyS::same_type(inner_ty, bound_ty) {
+                                // Do a substitution using the parameters from the callsite
+                                let subst_ty = inner_ty.subst(self.tcx, substs_ref);
+                                if let Some(fn_id) = FunctionItemRefChecker::is_fn_ref(subst_ty) {
+                                    let ident = self.tcx.item_name(fn_id).to_ident_string();
+                                    let span = self.nth_arg_span(args, arg_num);
+                                    self.emit_lint(ident, fn_id, source_info, span);
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+        }
+    }
+    /// If the given predicate is the trait `fmt::Pointer`, returns the bound parameter type.
+    fn is_pointer_trait(&self, bound: &PredicateAtom<'tcx>) -> Option<Ty<'tcx>> {
+        if let ty::PredicateAtom::Trait(predicate, _) = bound {
+            if self.tcx.is_diagnostic_item(sym::pointer_trait, predicate.def_id()) {
+                Some(predicate.trait_ref.self_ty())
+            } else {
+                None
+            }
+        } else {
+            None
+        }
+    }
+    /// If a type is a reference or raw pointer to the anonymous type of a function definition,
+    /// returns that function's `DefId`.
+    fn is_fn_ref(ty: Ty<'tcx>) -> Option<DefId> {
+        let referent_ty = match ty.kind() {
+            ty::Ref(_, referent_ty, _) => Some(referent_ty),
+            ty::RawPtr(ty_and_mut) => Some(&ty_and_mut.ty),
+            _ => None,
+        };
+        referent_ty
+            .map(
+                |ref_ty| {
+                    if let ty::FnDef(def_id, _) = *ref_ty.kind() { Some(def_id) } else { None }
+                },
+            )
+            .unwrap_or(None)
+    }
+    fn nth_arg_span(&self, args: &Vec<Operand<'tcx>>, n: usize) -> Span {
+        match &args[n] {
+            Operand::Copy(place) | Operand::Move(place) => {
+                self.body.local_decls[place.local].source_info.span
+            }
+            Operand::Constant(constant) => constant.span,
+        }
+    }
+    fn emit_lint(&self, ident: String, fn_id: DefId, source_info: SourceInfo, span: Span) {
+        let lint_root = self.body.source_scopes[source_info.scope]
+            .local_data
+            .as_ref()
+            .assert_crate_local()
+            .lint_root;
+        let fn_sig = self.tcx.fn_sig(fn_id);
+        let unsafety = fn_sig.unsafety().prefix_str();
+        let abi = match fn_sig.abi() {
+            Abi::Rust => String::from(""),
+            other_abi => {
+                let mut s = String::from("extern \"");
+                s.push_str(other_abi.name());
+                s.push_str("\" ");
+                s
+            }
+        };
+        let num_args = fn_sig.inputs().map_bound(|inputs| inputs.len()).skip_binder();
+        let variadic = if fn_sig.c_variadic() { ", ..." } else { "" };
+        let ret = if fn_sig.output().skip_binder().is_unit() { "" } else { " -> _" };
+        self.tcx.struct_span_lint_hir(FUNCTION_ITEM_REFERENCES, lint_root, span, |lint| {
+            lint.build("taking a reference to a function item does not give a function pointer")
+                .span_suggestion(
+                    span,
+                    &format!("cast `{}` to obtain a function pointer", ident),
+                    format!(
+                        "{} as {}{}fn({}{}){}",
+                        ident,
+                        unsafety,
+                        abi,
+                        vec!["_"; num_args].join(", "),
+                        variadic,
+                        ret,
+                    ),
+                    Applicability::Unspecified,
+                )
+                .emit();
+        });
+    }
+}
diff --git a/compiler/rustc_mir/src/transform/instcombine.rs b/compiler/rustc_mir/src/transform/instcombine.rs
index c5c14ca7cae..59b7db24319 100644
--- a/compiler/rustc_mir/src/transform/instcombine.rs
+++ b/compiler/rustc_mir/src/transform/instcombine.rs
@@ -119,6 +119,11 @@ impl OptimizationFinder<'b, 'tcx> {
     }
 
     fn find_deref_of_address(&mut self, rvalue: &Rvalue<'tcx>, location: Location) -> Option<()> {
+        // FIXME(#78192): This optimization can result in unsoundness.
+        if !self.tcx.sess.opts.debugging_opts.unsound_mir_opts {
+            return None;
+        }
+
         // Look for the sequence
         //
         // _2 = &_1;
diff --git a/compiler/rustc_mir/src/transform/mod.rs b/compiler/rustc_mir/src/transform/mod.rs
index 20b8c90a9dc..89db6bb13ca 100644
--- a/compiler/rustc_mir/src/transform/mod.rs
+++ b/compiler/rustc_mir/src/transform/mod.rs
@@ -27,6 +27,7 @@ pub mod dest_prop;
 pub mod dump_mir;
 pub mod early_otherwise_branch;
 pub mod elaborate_drops;
+pub mod function_item_references;
 pub mod generator;
 pub mod inline;
 pub mod instcombine;
@@ -266,6 +267,7 @@ fn mir_const<'tcx>(
             // MIR-level lints.
             &check_packed_ref::CheckPackedRef,
             &check_const_item_mutation::CheckConstItemMutation,
+            &function_item_references::FunctionItemReferences,
             // What we need to do constant evaluation.
             &simplify::SimplifyCfg::new("initial"),
             &rustc_peek::SanityCheck,
diff --git a/compiler/rustc_mir/src/transform/promote_consts.rs b/compiler/rustc_mir/src/transform/promote_consts.rs
index 292380d7fec..927aae82a36 100644
--- a/compiler/rustc_mir/src/transform/promote_consts.rs
+++ b/compiler/rustc_mir/src/transform/promote_consts.rs
@@ -1170,7 +1170,7 @@ pub fn promote_candidates<'tcx>(
         let mut scope = body.source_scopes[candidate.source_info(body).scope].clone();
         scope.parent_scope = None;
 
-        let mut promoted = Body::new(
+        let promoted = Body::new(
             body.source, // `promoted` gets filled in below
             IndexVec::new(),
             IndexVec::from_elem_n(scope, 1),
@@ -1181,7 +1181,6 @@ pub fn promote_candidates<'tcx>(
             body.span,
             body.generator_kind,
         );
-        promoted.ignore_interior_mut_in_const_validation = true;
 
         let promoter = Promoter {
             promoted,
diff --git a/compiler/rustc_mir_build/src/build/expr/as_operand.rs b/compiler/rustc_mir_build/src/build/expr/as_operand.rs
index aac93f313f4..cf075abc94b 100644
--- a/compiler/rustc_mir_build/src/build/expr/as_operand.rs
+++ b/compiler/rustc_mir_build/src/build/expr/as_operand.rs
@@ -165,7 +165,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
 
         let tcx = this.hir.tcx();
 
-        if tcx.features().unsized_locals {
+        if tcx.features().unsized_fn_params {
             let ty = expr.ty;
             let span = expr.span;
             let param_env = this.hir.param_env;
diff --git a/compiler/rustc_mir_build/src/lib.rs b/compiler/rustc_mir_build/src/lib.rs
index 714041ad4e8..0866892265b 100644
--- a/compiler/rustc_mir_build/src/lib.rs
+++ b/compiler/rustc_mir_build/src/lib.rs
@@ -9,6 +9,7 @@
 #![feature(control_flow_enum)]
 #![feature(crate_visibility_modifier)]
 #![feature(bool_to_option)]
+#![feature(once_cell)]
 #![feature(or_patterns)]
 #![recursion_limit = "256"]
 
diff --git a/compiler/rustc_mir_build/src/thir/pattern/_match.rs b/compiler/rustc_mir_build/src/thir/pattern/_match.rs
index 0449e149831..843a6c0e461 100644
--- a/compiler/rustc_mir_build/src/thir/pattern/_match.rs
+++ b/compiler/rustc_mir_build/src/thir/pattern/_match.rs
@@ -137,8 +137,8 @@
 //!                 S(c, (r_1, p_2, .., p_n))
 //!                 S(c, (r_2, p_2, .., p_n))
 //!
-//! 2. We can pop a wildcard off the top of the stack. This is called `D(p)`, where `p` is
-//!    a pattern-stack.
+//! 2. We can pop a wildcard off the top of the stack. This is called `S(_, p)`, where `p` is
+//!    a pattern-stack. Note: the paper calls this `D(p)`.
 //!    This is used when we know there are missing constructor cases, but there might be
 //!    existing wildcard patterns, so to check the usefulness of the matrix, we have to check
 //!    all its *other* components.
@@ -150,8 +150,8 @@
 //!                 p_2, .., p_n
 //!         2.3. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting
 //!           stack.
-//!                 D((r_1, p_2, .., p_n))
-//!                 D((r_2, p_2, .., p_n))
+//!                 S(_, (r_1, p_2, .., p_n))
+//!                 S(_, (r_2, p_2, .., p_n))
 //!
 //! Note that the OR-patterns are not always used directly in Rust, but are used to derive the
 //! exhaustive integer matching rules, so they're written here for posterity.
@@ -181,7 +181,6 @@
 //! we ignore all the patterns in the first column of `P` that involve other constructors.
 //! This is where `S(c, P)` comes in:
 //! `U(P, p) := U(S(c, P), S(c, p))`
-//! This special case is handled in `is_useful_specialized`.
 //!
 //! For example, if `P` is:
 //!
@@ -204,8 +203,8 @@
 //! before.
 //! That's almost correct, but only works if there were no wildcards in those first
 //! components. So we need to check that `p` is useful with respect to the rows that
-//! start with a wildcard, if there are any. This is where `D` comes in:
-//! `U(P, p) := U(D(P), D(p))`
+//! start with a wildcard, if there are any. This is where `S(_, x)` comes in:
+//! `U(P, p) := U(S(_, P), S(_, p))`
 //!
 //! For example, if `P` is:
 //!
@@ -284,7 +283,7 @@
 //!       disjunction over every range. This is a bit more tricky to deal with: essentially we need
 //!       to form equivalence classes of subranges of the constructor range for which the behaviour
 //!       of the matrix `P` and new pattern `p` are the same. This is described in more
-//!       detail in `split_grouped_constructors`.
+//!       detail in `Constructor::split`.
 //!     + If some constructors are missing from the matrix, it turns out we don't need to do
 //!       anything special (because we know none of the integers are actually wildcards: i.e., we
 //!       can't span wildcards using ranges).
@@ -294,7 +293,8 @@ use self::Usefulness::*;
 use self::WitnessPreference::*;
 
 use rustc_data_structures::captures::Captures;
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::sync::OnceCell;
 use rustc_index::vec::Idx;
 
 use super::{compare_const_vals, PatternFoldable, PatternFolder};
@@ -346,40 +346,40 @@ impl<'tcx> Pat<'tcx> {
 
 /// A row of a matrix. Rows of len 1 are very common, which is why `SmallVec[_; 2]`
 /// works well.
-#[derive(Debug, Clone, PartialEq)]
-crate struct PatStack<'p, 'tcx>(SmallVec<[&'p Pat<'tcx>; 2]>);
+#[derive(Debug, Clone)]
+crate struct PatStack<'p, 'tcx> {
+    pats: SmallVec<[&'p Pat<'tcx>; 2]>,
+    /// Cache for the constructor of the head
+    head_ctor: OnceCell<Constructor<'tcx>>,
+}
 
 impl<'p, 'tcx> PatStack<'p, 'tcx> {
     crate fn from_pattern(pat: &'p Pat<'tcx>) -> Self {
-        PatStack(smallvec![pat])
+        Self::from_vec(smallvec![pat])
     }
 
     fn from_vec(vec: SmallVec<[&'p Pat<'tcx>; 2]>) -> Self {
-        PatStack(vec)
-    }
-
-    fn from_slice(s: &[&'p Pat<'tcx>]) -> Self {
-        PatStack(SmallVec::from_slice(s))
+        PatStack { pats: vec, head_ctor: OnceCell::new() }
     }
 
     fn is_empty(&self) -> bool {
-        self.0.is_empty()
+        self.pats.is_empty()
     }
 
     fn len(&self) -> usize {
-        self.0.len()
+        self.pats.len()
     }
 
     fn head(&self) -> &'p Pat<'tcx> {
-        self.0[0]
+        self.pats[0]
     }
 
-    fn to_tail(&self) -> Self {
-        PatStack::from_slice(&self.0[1..])
+    fn head_ctor<'a>(&'a self, cx: &MatchCheckCtxt<'p, 'tcx>) -> &'a Constructor<'tcx> {
+        self.head_ctor.get_or_init(|| pat_constructor(cx, self.head()))
     }
 
     fn iter(&self) -> impl Iterator<Item = &Pat<'tcx>> {
-        self.0.iter().copied()
+        self.pats.iter().copied()
     }
 
     // If the first pattern is an or-pattern, expand this pattern. Otherwise, return `None`.
@@ -391,7 +391,7 @@ impl<'p, 'tcx> PatStack<'p, 'tcx> {
                 pats.iter()
                     .map(|pat| {
                         let mut new_patstack = PatStack::from_pattern(pat);
-                        new_patstack.0.extend_from_slice(&self.0[1..]);
+                        new_patstack.pats.extend_from_slice(&self.pats[1..]);
                         new_patstack
                     })
                     .collect(),
@@ -401,33 +401,29 @@ impl<'p, 'tcx> PatStack<'p, 'tcx> {
         }
     }
 
-    /// This computes `D(self)`. See top of the file for explanations.
-    fn specialize_wildcard(&self) -> Option<Self> {
-        if self.head().is_wildcard() { Some(self.to_tail()) } else { None }
-    }
-
-    /// This computes `S(constructor, self)`. See top of the file for explanations.
-    fn specialize_constructor(
-        &self,
-        cx: &mut MatchCheckCtxt<'p, 'tcx>,
-        constructor: &Constructor<'tcx>,
-        ctor_wild_subpatterns: &Fields<'p, 'tcx>,
-        is_my_head_ctor: bool,
-    ) -> Option<PatStack<'p, 'tcx>> {
-        let new_fields = specialize_one_pattern(
-            cx,
-            self.head(),
-            constructor,
-            ctor_wild_subpatterns,
-            is_my_head_ctor,
-        )?;
-        Some(new_fields.push_on_patstack(&self.0[1..]))
+    /// This computes `S(self.head_ctor(), self)`. See top of the file for explanations.
+    ///
+    /// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing
+    /// fields filled with wild patterns.
+    ///
+    /// This is roughly the inverse of `Constructor::apply`.
+    fn pop_head_constructor(&self, ctor_wild_subpatterns: &Fields<'p, 'tcx>) -> PatStack<'p, 'tcx> {
+        // We pop the head pattern and push the new fields extracted from the arguments of
+        // `self.head()`.
+        let new_fields = ctor_wild_subpatterns.replace_with_pattern_arguments(self.head());
+        new_fields.push_on_patstack(&self.pats[1..])
     }
 }
 
 impl<'p, 'tcx> Default for PatStack<'p, 'tcx> {
     fn default() -> Self {
-        PatStack(smallvec![])
+        Self::from_vec(smallvec![])
+    }
+}
+
+impl<'p, 'tcx> PartialEq for PatStack<'p, 'tcx> {
+    fn eq(&self, other: &Self) -> bool {
+        self.pats == other.pats
     }
 }
 
@@ -436,40 +432,19 @@ impl<'p, 'tcx> FromIterator<&'p Pat<'tcx>> for PatStack<'p, 'tcx> {
     where
         T: IntoIterator<Item = &'p Pat<'tcx>>,
     {
-        PatStack(iter.into_iter().collect())
+        Self::from_vec(iter.into_iter().collect())
     }
 }
 
-/// Depending on the match patterns, the specialization process might be able to use a fast path.
-/// Tracks whether we can use the fast path and the lookup table needed in those cases.
-#[derive(Clone, Debug, PartialEq)]
-enum SpecializationCache {
-    /// Patterns consist of only enum variants.
-    /// Variant patterns does not intersect with each other (in contrast to range patterns),
-    /// so it is possible to precompute the result of `Matrix::specialize_constructor` at a
-    /// lower computational complexity.
-    /// `lookup` is responsible for holding the precomputed result of
-    /// `Matrix::specialize_constructor`, while `wilds` is used for two purposes: the first one is
-    /// the precomputed result of `Matrix::specialize_wildcard`, and the second is to be used as a
-    /// fallback for `Matrix::specialize_constructor` when it tries to apply a constructor that
-    /// has not been seen in the `Matrix`. See `update_cache` for further explanations.
-    Variants { lookup: FxHashMap<DefId, SmallVec<[usize; 1]>>, wilds: SmallVec<[usize; 1]> },
-    /// Does not belong to the cases above, use the slow path.
-    Incompatible,
-}
-
 /// A 2D matrix.
 #[derive(Clone, PartialEq)]
 crate struct Matrix<'p, 'tcx> {
     patterns: Vec<PatStack<'p, 'tcx>>,
-    cache: SpecializationCache,
 }
 
 impl<'p, 'tcx> Matrix<'p, 'tcx> {
     crate fn empty() -> Self {
-        // Use `SpecializationCache::Incompatible` as a placeholder; we will initialize it on the
-        // first call to `push`. See the first half of `update_cache`.
-        Matrix { patterns: vec![], cache: SpecializationCache::Incompatible }
+        Matrix { patterns: vec![] }
     }
 
     /// Pushes a new row to the matrix. If the row starts with an or-pattern, this expands it.
@@ -482,70 +457,6 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> {
             }
         } else {
             self.patterns.push(row);
-            self.update_cache(self.patterns.len() - 1);
-        }
-    }
-
-    fn update_cache(&mut self, idx: usize) {
-        let row = &self.patterns[idx];
-        // We don't know which kind of cache could be used until we see the first row; therefore an
-        // empty `Matrix` is initialized with `SpecializationCache::Empty`, then the cache is
-        // assigned the appropriate variant below on the first call to `push`.
-        if self.patterns.is_empty() {
-            self.cache = if row.is_empty() {
-                SpecializationCache::Incompatible
-            } else {
-                match *row.head().kind {
-                    PatKind::Variant { .. } => SpecializationCache::Variants {
-                        lookup: FxHashMap::default(),
-                        wilds: SmallVec::new(),
-                    },
-                    // Note: If the first pattern is a wildcard, then all patterns after that is not
-                    // useful. The check is simple enough so we treat it as the same as unsupported
-                    // patterns.
-                    _ => SpecializationCache::Incompatible,
-                }
-            };
-        }
-        // Update the cache.
-        match &mut self.cache {
-            SpecializationCache::Variants { ref mut lookup, ref mut wilds } => {
-                let head = row.head();
-                match *head.kind {
-                    _ if head.is_wildcard() => {
-                        // Per rule 1.3 in the top-level comments, a wildcard pattern is included in
-                        // the result of `specialize_constructor` for *any* `Constructor`.
-                        // We push the wildcard pattern to the precomputed result for constructors
-                        // that we have seen before; results for constructors we have not yet seen
-                        // defaults to `wilds`, which is updated right below.
-                        for (_, v) in lookup.iter_mut() {
-                            v.push(idx);
-                        }
-                        // Per rule 2.1 and 2.2 in the top-level comments, only wildcard patterns
-                        // are included in the result of `specialize_wildcard`.
-                        // What we do here is to track the wildcards we have seen; so in addition to
-                        // acting as the precomputed result of `specialize_wildcard`, `wilds` also
-                        // serves as the default value of `specialize_constructor` for constructors
-                        // that are not in `lookup`.
-                        wilds.push(idx);
-                    }
-                    PatKind::Variant { adt_def, variant_index, .. } => {
-                        // Handle the cases of rule 1.1 and 1.2 in the top-level comments.
-                        // A variant pattern can only be included in the results of
-                        // `specialize_constructor` for a particular constructor, therefore we are
-                        // using a HashMap to track that.
-                        lookup
-                            .entry(adt_def.variants[variant_index].def_id)
-                            // Default to `wilds` for absent keys. See above for an explanation.
-                            .or_insert_with(|| wilds.clone())
-                            .push(idx);
-                    }
-                    _ => {
-                        self.cache = SpecializationCache::Incompatible;
-                    }
-                }
-            }
-            SpecializationCache::Incompatible => {}
         }
     }
 
@@ -554,81 +465,26 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> {
         self.patterns.iter().map(|r| r.head())
     }
 
-    /// This computes `D(self)`. See top of the file for explanations.
-    fn specialize_wildcard(&self) -> Self {
-        match &self.cache {
-            SpecializationCache::Variants { wilds, .. } => {
-                let result =
-                    wilds.iter().filter_map(|&i| self.patterns[i].specialize_wildcard()).collect();
-                // When debug assertions are enabled, check the results against the "slow path"
-                // result.
-                debug_assert_eq!(
-                    result,
-                    Self {
-                        patterns: self.patterns.clone(),
-                        cache: SpecializationCache::Incompatible
-                    }
-                    .specialize_wildcard()
-                );
-                result
-            }
-            SpecializationCache::Incompatible => {
-                self.patterns.iter().filter_map(|r| r.specialize_wildcard()).collect()
-            }
-        }
+    /// Iterate over the first constructor of each row
+    fn head_ctors<'a>(
+        &'a self,
+        cx: &'a MatchCheckCtxt<'p, 'tcx>,
+    ) -> impl Iterator<Item = &'a Constructor<'tcx>> + Captures<'a> + Captures<'p> {
+        self.patterns.iter().map(move |r| r.head_ctor(cx))
     }
 
     /// This computes `S(constructor, self)`. See top of the file for explanations.
     fn specialize_constructor(
         &self,
-        cx: &mut MatchCheckCtxt<'p, 'tcx>,
-        constructor: &Constructor<'tcx>,
+        pcx: PatCtxt<'_, 'p, 'tcx>,
+        ctor: &Constructor<'tcx>,
         ctor_wild_subpatterns: &Fields<'p, 'tcx>,
     ) -> Matrix<'p, 'tcx> {
-        match &self.cache {
-            SpecializationCache::Variants { lookup, wilds } => {
-                let result: Self = if let Constructor::Variant(id) = constructor {
-                    lookup
-                        .get(id)
-                        // Default to `wilds` for absent keys. See `update_cache` for an explanation.
-                        .unwrap_or(&wilds)
-                        .iter()
-                        .filter_map(|&i| {
-                            self.patterns[i].specialize_constructor(
-                                cx,
-                                constructor,
-                                ctor_wild_subpatterns,
-                                false,
-                            )
-                        })
-                        .collect()
-                } else {
-                    unreachable!()
-                };
-                // When debug assertions are enabled, check the results against the "slow path"
-                // result.
-                debug_assert_eq!(
-                    result,
-                    Matrix {
-                        patterns: self.patterns.clone(),
-                        cache: SpecializationCache::Incompatible
-                    }
-                    .specialize_constructor(
-                        cx,
-                        constructor,
-                        ctor_wild_subpatterns
-                    )
-                );
-                result
-            }
-            SpecializationCache::Incompatible => self
-                .patterns
-                .iter()
-                .filter_map(|r| {
-                    r.specialize_constructor(cx, constructor, ctor_wild_subpatterns, false)
-                })
-                .collect(),
-        }
+        self.patterns
+            .iter()
+            .filter(|r| ctor.is_covered_by(pcx, r.head_ctor(pcx.cx)))
+            .map(|r| r.pop_head_constructor(ctor_wild_subpatterns))
+            .collect()
     }
 }
 
@@ -752,46 +608,6 @@ impl SliceKind {
             VarLen(prefix, suffix) => prefix + suffix <= other_len,
         }
     }
-
-    /// Returns a collection of slices that spans the values covered by `self`, subtracted by the
-    /// values covered by `other`: i.e., `self \ other` (in set notation).
-    fn subtract(self, other: Self) -> SmallVec<[Self; 1]> {
-        // Remember, `VarLen(i, j)` covers the union of `FixedLen` from `i + j` to infinity.
-        // Naming: we remove the "neg" constructors from the "pos" ones.
-        match self {
-            FixedLen(pos_len) => {
-                if other.covers_length(pos_len) {
-                    smallvec![]
-                } else {
-                    smallvec![self]
-                }
-            }
-            VarLen(pos_prefix, pos_suffix) => {
-                let pos_len = pos_prefix + pos_suffix;
-                match other {
-                    FixedLen(neg_len) => {
-                        if neg_len < pos_len {
-                            smallvec![self]
-                        } else {
-                            (pos_len..neg_len)
-                                .map(FixedLen)
-                                // We know that `neg_len + 1 >= pos_len >= pos_suffix`.
-                                .chain(Some(VarLen(neg_len + 1 - pos_suffix, pos_suffix)))
-                                .collect()
-                        }
-                    }
-                    VarLen(neg_prefix, neg_suffix) => {
-                        let neg_len = neg_prefix + neg_suffix;
-                        if neg_len <= pos_len {
-                            smallvec![]
-                        } else {
-                            (pos_len..neg_len).map(FixedLen).collect()
-                        }
-                    }
-                }
-            }
-        }
-    }
 }
 
 /// A constructor for array and slice patterns.
@@ -804,33 +620,142 @@ struct Slice {
 }
 
 impl Slice {
-    /// Returns what patterns this constructor covers: either fixed-length patterns or
-    /// variable-length patterns.
-    fn pattern_kind(self) -> SliceKind {
-        match self {
-            Slice { array_len: Some(len), kind: VarLen(prefix, suffix) }
-                if prefix + suffix == len =>
-            {
-                FixedLen(len)
-            }
-            _ => self.kind,
-        }
+    fn new(array_len: Option<u64>, kind: SliceKind) -> Self {
+        let kind = match (array_len, kind) {
+            // If the middle `..` is empty, we effectively have a fixed-length pattern.
+            (Some(len), VarLen(prefix, suffix)) if prefix + suffix >= len => FixedLen(len),
+            _ => kind,
+        };
+        Slice { array_len, kind }
+    }
+
+    fn arity(self) -> u64 {
+        self.kind.arity()
     }
 
-    /// Returns what values this constructor covers: either values of only one given length, or
-    /// values of length above a given length.
-    /// This is different from `pattern_kind()` because in some cases the pattern only takes into
-    /// account a subset of the entries of the array, but still only captures values of a given
+    /// The exhaustiveness-checking paper does not include any details on
+    /// checking variable-length slice patterns. However, they may be
+    /// matched by an infinite collection of fixed-length array patterns.
+    ///
+    /// Checking the infinite set directly would take an infinite amount
+    /// of time. However, it turns out that for each finite set of
+    /// patterns `P`, all sufficiently large array lengths are equivalent:
+    ///
+    /// Each slice `s` with a "sufficiently-large" length `l ≥ L` that applies
+    /// to exactly the subset `Pₜ` of `P` can be transformed to a slice
+    /// `sₘ` for each sufficiently-large length `m` that applies to exactly
+    /// the same subset of `P`.
+    ///
+    /// Because of that, each witness for reachability-checking of one
+    /// of the sufficiently-large lengths can be transformed to an
+    /// equally-valid witness of any other length, so we only have
+    /// to check slices of the "minimal sufficiently-large length"
+    /// and less.
+    ///
+    /// Note that the fact that there is a *single* `sₘ` for each `m`
+    /// not depending on the specific pattern in `P` is important: if
+    /// you look at the pair of patterns
+    ///     `[true, ..]`
+    ///     `[.., false]`
+    /// Then any slice of length ≥1 that matches one of these two
+    /// patterns can be trivially turned to a slice of any
+    /// other length ≥1 that matches them and vice-versa,
+    /// but the slice of length 2 `[false, true]` that matches neither
+    /// of these patterns can't be turned to a slice from length 1 that
+    /// matches neither of these patterns, so we have to consider
+    /// slices from length 2 there.
+    ///
+    /// Now, to see that that length exists and find it, observe that slice
+    /// patterns are either "fixed-length" patterns (`[_, _, _]`) or
+    /// "variable-length" patterns (`[_, .., _]`).
+    ///
+    /// For fixed-length patterns, all slices with lengths *longer* than
+    /// the pattern's length have the same outcome (of not matching), so
+    /// as long as `L` is greater than the pattern's length we can pick
+    /// any `sₘ` from that length and get the same result.
+    ///
+    /// For variable-length patterns, the situation is more complicated,
+    /// because as seen above the precise value of `sₘ` matters.
+    ///
+    /// However, for each variable-length pattern `p` with a prefix of length
+    /// `plₚ` and suffix of length `slₚ`, only the first `plₚ` and the last
+    /// `slₚ` elements are examined.
+    ///
+    /// Therefore, as long as `L` is positive (to avoid concerns about empty
+    /// types), all elements after the maximum prefix length and before
+    /// the maximum suffix length are not examined by any variable-length
+    /// pattern, and therefore can be added/removed without affecting
+    /// them - creating equivalent patterns from any sufficiently-large
     /// length.
-    fn value_kind(self) -> SliceKind {
-        match self {
-            Slice { array_len: Some(len), kind: VarLen(_, _) } => FixedLen(len),
-            _ => self.kind,
+    ///
+    /// Of course, if fixed-length patterns exist, we must be sure
+    /// that our length is large enough to miss them all, so
+    /// we can pick `L = max(max(FIXED_LEN)+1, max(PREFIX_LEN) + max(SUFFIX_LEN))`
+    ///
+    /// for example, with the above pair of patterns, all elements
+    /// but the first and last can be added/removed, so any
+    /// witness of length ≥2 (say, `[false, false, true]`) can be
+    /// turned to a witness from any other length ≥2.
+    fn split<'p, 'tcx>(self, pcx: PatCtxt<'_, 'p, 'tcx>) -> SmallVec<[Constructor<'tcx>; 1]> {
+        let (self_prefix, self_suffix) = match self.kind {
+            VarLen(self_prefix, self_suffix) => (self_prefix, self_suffix),
+            _ => return smallvec![Slice(self)],
+        };
+
+        let head_ctors = pcx.matrix.head_ctors(pcx.cx).filter(|c| !c.is_wildcard());
+
+        let mut max_prefix_len = self_prefix;
+        let mut max_suffix_len = self_suffix;
+        let mut max_fixed_len = 0;
+
+        for ctor in head_ctors {
+            if let Slice(slice) = ctor {
+                match slice.kind {
+                    FixedLen(len) => {
+                        max_fixed_len = cmp::max(max_fixed_len, len);
+                    }
+                    VarLen(prefix, suffix) => {
+                        max_prefix_len = cmp::max(max_prefix_len, prefix);
+                        max_suffix_len = cmp::max(max_suffix_len, suffix);
+                    }
+                }
+            } else {
+                bug!("unexpected ctor for slice type: {:?}", ctor);
+            }
+        }
+
+        // For diagnostics, we keep the prefix and suffix lengths separate, so in the case
+        // where `max_fixed_len + 1` is the largest, we adapt `max_prefix_len` accordingly,
+        // so that `L = max_prefix_len + max_suffix_len`.
+        if max_fixed_len + 1 >= max_prefix_len + max_suffix_len {
+            // The subtraction can't overflow thanks to the above check.
+            // The new `max_prefix_len` is also guaranteed to be larger than its previous
+            // value.
+            max_prefix_len = max_fixed_len + 1 - max_suffix_len;
+        }
+
+        let final_slice = VarLen(max_prefix_len, max_suffix_len);
+        let final_slice = Slice::new(self.array_len, final_slice);
+        match self.array_len {
+            Some(_) => smallvec![Slice(final_slice)],
+            None => {
+                // `self` originally covered the range `(self.arity()..infinity)`. We split that
+                // range into two: lengths smaller than `final_slice.arity()` are treated
+                // independently as fixed-lengths slices, and lengths above are captured by
+                // `final_slice`.
+                let smaller_lengths = (self.arity()..final_slice.arity()).map(FixedLen);
+                smaller_lengths
+                    .map(|kind| Slice::new(self.array_len, kind))
+                    .chain(Some(final_slice))
+                    .map(Slice)
+                    .collect()
+            }
         }
     }
 
-    fn arity(self) -> u64 {
-        self.pattern_kind().arity()
+    /// See `Constructor::is_covered_by`
+    fn is_covered_by(self, other: Self) -> bool {
+        other.kind.covers_length(self.arity())
     }
 }
 
@@ -838,7 +763,7 @@ impl Slice {
 /// the constructor. See also `Fields`.
 ///
 /// `pat_constructor` retrieves the constructor corresponding to a pattern.
-/// `specialize_one_pattern` returns the list of fields corresponding to a pattern, given a
+/// `specialize_constructor` returns the list of fields corresponding to a pattern, given a
 /// constructor. `Constructor::apply` reconstructs the pattern from a pair of `Constructor` and
 /// `Fields`.
 #[derive(Clone, Debug, PartialEq)]
@@ -862,9 +787,32 @@ enum Constructor<'tcx> {
     Opaque,
     /// Fake extra constructor for enums that aren't allowed to be matched exhaustively.
     NonExhaustive,
+    /// Fake constructor for those types for which we can't list constructors explicitly, like
+    /// `f64` and `&str`.
+    Unlistable,
+    /// Wildcard pattern.
+    Wildcard,
 }
 
 impl<'tcx> Constructor<'tcx> {
+    fn is_wildcard(&self) -> bool {
+        matches!(self, Wildcard)
+    }
+
+    fn as_int_range(&self) -> Option<&IntRange<'tcx>> {
+        match self {
+            IntRange(range) => Some(range),
+            _ => None,
+        }
+    }
+
+    fn as_slice(&self) -> Option<Slice> {
+        match self {
+            Slice(slice) => Some(*slice),
+            _ => None,
+        }
+    }
+
     fn variant_index_for_adt(&self, adt: &'tcx ty::AdtDef) -> VariantIdx {
         match *self {
             Variant(id) => adt.variant_index_with_id(id),
@@ -876,94 +824,154 @@ impl<'tcx> Constructor<'tcx> {
         }
     }
 
-    // Returns the set of constructors covered by `self` but not by
-    // anything in `other_ctors`.
-    fn subtract_ctors(&self, other_ctors: &Vec<Constructor<'tcx>>) -> Vec<Constructor<'tcx>> {
-        if other_ctors.is_empty() {
-            return vec![self.clone()];
-        }
+    /// Some constructors (namely `Wildcard`, `IntRange` and `Slice`) actually stand for a set of actual
+    /// constructors (like variants, integers or fixed-sized slices). When specializing for these
+    /// constructors, we want to be specialising for the actual underlying constructors.
+    /// Naively, we would simply return the list of constructors they correspond to. We instead are
+    /// more clever: if there are constructors that we know will behave the same wrt the current
+    /// matrix, we keep them grouped. For example, all slices of a sufficiently large length
+    /// will either be all useful or all non-useful with a given matrix.
+    ///
+    /// See the branches for details on how the splitting is done.
+    ///
+    /// This function may discard some irrelevant constructors if this preserves behavior and
+    /// diagnostics. Eg. for the `_` case, we ignore the constructors already present in the
+    /// matrix, unless all of them are.
+    ///
+    /// `hir_id` is `None` when we're evaluating the wildcard pattern. In that case we do not want
+    /// to lint for overlapping ranges.
+    fn split<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>, hir_id: Option<HirId>) -> SmallVec<[Self; 1]> {
+        debug!("Constructor::split({:#?}, {:#?})", self, pcx.matrix);
 
         match self {
-            // Those constructors can only match themselves.
-            Single | Variant(_) | Str(..) | FloatRange(..) => {
-                if other_ctors.iter().any(|c| c == self) { vec![] } else { vec![self.clone()] }
+            Wildcard => Constructor::split_wildcard(pcx),
+            // Fast-track if the range is trivial. In particular, we don't do the overlapping
+            // ranges check.
+            IntRange(ctor_range)
+                if ctor_range.treat_exhaustively(pcx.cx.tcx) && !ctor_range.is_singleton() =>
+            {
+                ctor_range.split(pcx, hir_id)
             }
-            &Slice(slice) => {
-                let mut other_slices = other_ctors
-                    .iter()
-                    .filter_map(|c: &Constructor<'_>| match c {
-                        Slice(slice) => Some(*slice),
-                        _ => bug!("bad slice pattern constructor {:?}", c),
-                    })
-                    .map(Slice::value_kind);
+            Slice(slice @ Slice { kind: VarLen(..), .. }) => slice.split(pcx),
+            // Any other constructor can be used unchanged.
+            _ => smallvec![self.clone()],
+        }
+    }
 
-                match slice.value_kind() {
-                    FixedLen(self_len) => {
-                        if other_slices.any(|other_slice| other_slice.covers_length(self_len)) {
-                            vec![]
-                        } else {
-                            vec![Slice(slice)]
-                        }
-                    }
-                    kind @ VarLen(..) => {
-                        let mut remaining_slices = vec![kind];
-
-                        // For each used slice, subtract from the current set of slices.
-                        for other_slice in other_slices {
-                            remaining_slices = remaining_slices
-                                .into_iter()
-                                .flat_map(|remaining_slice| remaining_slice.subtract(other_slice))
-                                .collect();
+    /// For wildcards, there are two groups of constructors: there are the constructors actually
+    /// present in the matrix (`head_ctors`), and the constructors not present (`missing_ctors`).
+    /// Two constructors that are not in the matrix will either both be caught (by a wildcard), or
+    /// both not be caught. Therefore we can keep the missing constructors grouped together.
+    fn split_wildcard<'p>(pcx: PatCtxt<'_, 'p, 'tcx>) -> SmallVec<[Self; 1]> {
+        // Missing constructors are those that are not matched by any non-wildcard patterns in the
+        // current column. We only fully construct them on-demand, because they're rarely used and
+        // can be big.
+        let missing_ctors = MissingConstructors::new(pcx);
+        if missing_ctors.is_empty(pcx) {
+            // All the constructors are present in the matrix, so we just go through them all.
+            // We must also split them first.
+            missing_ctors.all_ctors
+        } else {
+            // Some constructors are missing, thus we can specialize with the wildcard constructor,
+            // which will stand for those constructors that are missing, and behaves like any of
+            // them.
+            smallvec![Wildcard]
+        }
+    }
 
-                            // If the constructors that have been considered so far already cover
-                            // the entire range of `self`, no need to look at more constructors.
-                            if remaining_slices.is_empty() {
-                                break;
-                            }
-                        }
+    /// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`.
+    /// For the simple cases, this is simply checking for equality. For the "grouped" constructors,
+    /// this checks for inclusion.
+    fn is_covered_by<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>, other: &Self) -> bool {
+        match (self, other) {
+            // Wildcards cover anything
+            (_, Wildcard) => true,
+            // Wildcards are only covered by wildcards
+            (Wildcard, _) => false,
+
+            (Single, Single) => true,
+            (Variant(self_id), Variant(other_id)) => self_id == other_id,
 
-                        remaining_slices
-                            .into_iter()
-                            .map(|kind| Slice { array_len: slice.array_len, kind })
-                            .map(Slice)
-                            .collect()
+            (IntRange(self_range), IntRange(other_range)) => {
+                self_range.is_covered_by(pcx, other_range)
+            }
+            (
+                FloatRange(self_from, self_to, self_end),
+                FloatRange(other_from, other_to, other_end),
+            ) => {
+                match (
+                    compare_const_vals(pcx.cx.tcx, self_to, other_to, pcx.cx.param_env, pcx.ty),
+                    compare_const_vals(pcx.cx.tcx, self_from, other_from, pcx.cx.param_env, pcx.ty),
+                ) {
+                    (Some(to), Some(from)) => {
+                        (from == Ordering::Greater || from == Ordering::Equal)
+                            && (to == Ordering::Less
+                                || (other_end == self_end && to == Ordering::Equal))
                     }
+                    _ => false,
                 }
             }
-            IntRange(self_range) => {
-                let mut remaining_ranges = vec![self_range.clone()];
-                for other_ctor in other_ctors {
-                    if let IntRange(other_range) = other_ctor {
-                        if other_range == self_range {
-                            // If the `self` range appears directly in a `match` arm, we can
-                            // eliminate it straight away.
-                            remaining_ranges = vec![];
-                        } else {
-                            // Otherwise explicitly compute the remaining ranges.
-                            remaining_ranges = other_range.subtract_from(remaining_ranges);
-                        }
-
-                        // If the ranges that have been considered so far already cover the entire
-                        // range of values, we can return early.
-                        if remaining_ranges.is_empty() {
-                            break;
-                        }
-                    }
+            (Str(self_val), Str(other_val)) => {
+                // FIXME: there's probably a more direct way of comparing for equality
+                match compare_const_vals(pcx.cx.tcx, self_val, other_val, pcx.cx.param_env, pcx.ty)
+                {
+                    Some(comparison) => comparison == Ordering::Equal,
+                    None => false,
                 }
-
-                // Convert the ranges back into constructors.
-                remaining_ranges.into_iter().map(IntRange).collect()
             }
+            (Slice(self_slice), Slice(other_slice)) => self_slice.is_covered_by(*other_slice),
+
+            // We are trying to inspect an opaque constant. Thus we skip the row.
+            (Opaque, _) | (_, Opaque) => false,
+            // Only a wildcard pattern can match the special extra constructor.
+            (NonExhaustive, _) => false,
+            // If we encounter a `Single` here, this means there was only one constructor for this
+            // type after all.
+            (Unlistable, Single) => true,
+            // Otherwise, only a wildcard pattern can match the special extra constructor.
+            (Unlistable, _) => false,
+
+            _ => bug!("trying to compare incompatible constructors {:?} and {:?}", self, other),
+        }
+    }
+
+    /// Faster version of `is_covered_by` when applied to many constructors. `used_ctors` is
+    /// assumed to be built from `matrix.head_ctors()`, and `self` is assumed to have been split.
+    fn is_covered_by_any<'p>(
+        &self,
+        pcx: PatCtxt<'_, 'p, 'tcx>,
+        used_ctors: &[Constructor<'tcx>],
+    ) -> bool {
+        if used_ctors.is_empty() {
+            return false;
+        }
+
+        match self {
+            // `used_ctors` cannot contain anything else than `Single`s.
+            Single => !used_ctors.is_empty(),
+            Variant(_) => used_ctors.iter().any(|c| c == self),
+            IntRange(range) => used_ctors
+                .iter()
+                .filter_map(|c| c.as_int_range())
+                .any(|other| range.is_covered_by(pcx, other)),
+            Slice(slice) => used_ctors
+                .iter()
+                .filter_map(|c| c.as_slice())
+                .any(|other| slice.is_covered_by(other)),
             // This constructor is never covered by anything else
-            NonExhaustive => vec![NonExhaustive],
-            Opaque => bug!("unexpected opaque ctor {:?} found in all_ctors", self),
+            NonExhaustive => false,
+            // This constructor is only covered by `Single`s
+            Unlistable => used_ctors.iter().any(|c| *c == Single),
+            Str(..) | FloatRange(..) | Opaque | Wildcard => {
+                bug!("found unexpected ctor in all_ctors: {:?}", self)
+            }
         }
     }
 
     /// Apply a constructor to a list of patterns, yielding a new pattern. `pats`
     /// must have as many elements as this constructor's arity.
     ///
-    /// This is roughly the inverse of `specialize_one_pattern`.
+    /// This is roughly the inverse of `specialize_constructor`.
     ///
     /// Examples:
     /// `self`: `Constructor::Single`
@@ -975,23 +983,18 @@ impl<'tcx> Constructor<'tcx> {
     /// `ty`: `Option<bool>`
     /// `pats`: `[false]`
     /// returns `Some(false)`
-    fn apply<'p>(
-        &self,
-        cx: &MatchCheckCtxt<'p, 'tcx>,
-        ty: Ty<'tcx>,
-        fields: Fields<'p, 'tcx>,
-    ) -> Pat<'tcx> {
+    fn apply<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>, fields: Fields<'p, 'tcx>) -> Pat<'tcx> {
         let mut subpatterns = fields.all_patterns();
 
         let pat = match self {
-            Single | Variant(_) => match ty.kind() {
+            Single | Variant(_) => match pcx.ty.kind() {
                 ty::Adt(..) | ty::Tuple(..) => {
                     let subpatterns = subpatterns
                         .enumerate()
                         .map(|(i, p)| FieldPat { field: Field::new(i), pattern: p })
                         .collect();
 
-                    if let ty::Adt(adt, substs) = ty.kind() {
+                    if let ty::Adt(adt, substs) = pcx.ty.kind() {
                         if adt.is_enum() {
                             PatKind::Variant {
                                 adt_def: adt,
@@ -1007,10 +1010,10 @@ impl<'tcx> Constructor<'tcx> {
                     }
                 }
                 ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
-                ty::Slice(_) | ty::Array(..) => bug!("bad slice pattern {:?} {:?}", self, ty),
+                ty::Slice(_) | ty::Array(..) => bug!("bad slice pattern {:?} {:?}", self, pcx.ty),
                 _ => PatKind::Wild,
             },
-            Slice(slice) => match slice.pattern_kind() {
+            Slice(slice) => match slice.kind {
                 FixedLen(_) => {
                     PatKind::Slice { prefix: subpatterns.collect(), slice: None, suffix: vec![] }
                 }
@@ -1031,23 +1034,21 @@ impl<'tcx> Constructor<'tcx> {
                     } else {
                         subpatterns.collect()
                     };
-                    let wild = Pat::wildcard_from_ty(ty);
+                    let wild = Pat::wildcard_from_ty(pcx.ty);
                     PatKind::Slice { prefix, slice: Some(wild), suffix }
                 }
             },
             &Str(value) => PatKind::Constant { value },
             &FloatRange(lo, hi, end) => PatKind::Range(PatRange { lo, hi, end }),
-            IntRange(range) => return range.to_pat(cx.tcx),
-            NonExhaustive => PatKind::Wild,
-            Opaque => bug!("we should not try to apply an opaque constructor {:?}", self),
+            IntRange(range) => return range.to_pat(pcx.cx.tcx),
+            NonExhaustive | Unlistable => PatKind::Wild,
+            Opaque => bug!("we should not try to apply an opaque constructor"),
+            Wildcard => bug!(
+                "trying to apply a wildcard constructor; this should have been done in `apply_constructors`"
+            ),
         };
 
-        Pat { ty, span: DUMMY_SP, kind: Box::new(pat) }
-    }
-
-    /// Like `apply`, but where all the subpatterns are wildcards `_`.
-    fn apply_wildcards<'a>(&self, cx: &MatchCheckCtxt<'a, 'tcx>, ty: Ty<'tcx>) -> Pat<'tcx> {
-        self.apply(cx, ty, Fields::wildcards(cx, self, ty))
+        Pat { ty: pcx.ty, span: DUMMY_SP, kind: Box::new(pat) }
     }
 }
 
@@ -1125,11 +1126,9 @@ impl<'p, 'tcx> Fields<'p, 'tcx> {
     }
 
     /// Creates a new list of wildcard fields for a given constructor.
-    fn wildcards(
-        cx: &MatchCheckCtxt<'p, 'tcx>,
-        constructor: &Constructor<'tcx>,
-        ty: Ty<'tcx>,
-    ) -> Self {
+    fn wildcards(pcx: PatCtxt<'_, 'p, 'tcx>, constructor: &Constructor<'tcx>) -> Self {
+        let ty = pcx.ty;
+        let cx = pcx.cx;
         let wildcard_from_ty = |ty| &*cx.pattern_arena.alloc(Pat::wildcard_from_ty(ty));
 
         let ret = match constructor {
@@ -1182,7 +1181,7 @@ impl<'p, 'tcx> Fields<'p, 'tcx> {
                         }
                     }
                 }
-                _ => Fields::empty(),
+                _ => bug!("Unexpected type for `Single` constructor: {:?}", ty),
             },
             Slice(slice) => match *ty.kind() {
                 ty::Slice(ty) | ty::Array(ty, _) => {
@@ -1191,7 +1190,8 @@ impl<'p, 'tcx> Fields<'p, 'tcx> {
                 }
                 _ => bug!("bad slice pattern {:?} {:?}", constructor, ty),
             },
-            Str(..) | FloatRange(..) | IntRange(..) | NonExhaustive | Opaque => Fields::empty(),
+            Str(..) | FloatRange(..) | IntRange(..) | NonExhaustive | Opaque | Unlistable
+            | Wildcard => Fields::empty(),
         };
         debug!("Fields::wildcards({:?}, {:?}) = {:#?}", constructor, ty, ret);
         ret
@@ -1289,6 +1289,41 @@ impl<'p, 'tcx> Fields<'p, 'tcx> {
         }
     }
 
+    /// Replaces contained fields with the arguments of the given pattern. Only use on a pattern
+    /// that is compatible with the constructor used to build `self`.
+    /// This is meant to be used on the result of `Fields::wildcards()`. The idea is that
+    /// `wildcards` constructs a list of fields where all entries are wildcards, and the pattern
+    /// provided to this function fills some of the fields with non-wildcards.
+    /// In the following example `Fields::wildcards` would return `[_, _, _, _]`. If we call
+    /// `replace_with_pattern_arguments` on it with the pattern, the result will be `[Some(0), _,
+    /// _, _]`.
+    /// ```rust
+    /// let x: [Option<u8>; 4] = foo();
+    /// match x {
+    ///     [Some(0), ..] => {}
+    /// }
+    /// ```
+    fn replace_with_pattern_arguments(&self, pat: &'p Pat<'tcx>) -> Self {
+        match pat.kind.as_ref() {
+            PatKind::Deref { subpattern } => Self::from_single_pattern(subpattern),
+            PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
+                self.replace_with_fieldpats(subpatterns)
+            }
+            PatKind::Array { prefix, suffix, .. } | PatKind::Slice { prefix, suffix, .. } => {
+                // Number of subpatterns for the constructor
+                let ctor_arity = self.len();
+
+                // Replace the prefix and the suffix with the given patterns, leaving wildcards in
+                // the middle if there was a subslice pattern `..`.
+                let prefix = prefix.iter().enumerate();
+                let suffix =
+                    suffix.iter().enumerate().map(|(i, p)| (ctor_arity - suffix.len() + i, p));
+                self.replace_fields_indexed(prefix.chain(suffix))
+            }
+            _ => self.clone(),
+        }
+    }
+
     fn push_on_patstack(self, stack: &[&'p Pat<'tcx>]) -> PatStack<'p, 'tcx> {
         let pats: SmallVec<_> = match self {
             Fields::Slice(pats) => pats.iter().chain(stack.iter().copied()).collect(),
@@ -1331,52 +1366,16 @@ impl<'tcx> Usefulness<'tcx> {
 
     fn apply_constructor<'p>(
         self,
-        cx: &MatchCheckCtxt<'p, 'tcx>,
+        pcx: PatCtxt<'_, 'p, 'tcx>,
         ctor: &Constructor<'tcx>,
-        ty: Ty<'tcx>,
         ctor_wild_subpatterns: &Fields<'p, 'tcx>,
-    ) -> Self {
-        match self {
-            UsefulWithWitness(witnesses) => UsefulWithWitness(
-                witnesses
-                    .into_iter()
-                    .map(|witness| witness.apply_constructor(cx, &ctor, ty, ctor_wild_subpatterns))
-                    .collect(),
-            ),
-            x => x,
-        }
-    }
-
-    fn apply_wildcard(self, ty: Ty<'tcx>) -> Self {
-        match self {
-            UsefulWithWitness(witnesses) => {
-                let wild = Pat::wildcard_from_ty(ty);
-                UsefulWithWitness(
-                    witnesses
-                        .into_iter()
-                        .map(|mut witness| {
-                            witness.0.push(wild.clone());
-                            witness
-                        })
-                        .collect(),
-                )
-            }
-            x => x,
-        }
-    }
-
-    fn apply_missing_ctors(
-        self,
-        cx: &MatchCheckCtxt<'_, 'tcx>,
-        ty: Ty<'tcx>,
-        missing_ctors: &MissingConstructors<'tcx>,
+        is_top_level: bool,
     ) -> Self {
         match self {
             UsefulWithWitness(witnesses) => {
-                let new_patterns: Vec<_> =
-                    missing_ctors.iter().map(|ctor| ctor.apply_wildcards(cx, ty)).collect();
-                // Add the new patterns to each witness
-                UsefulWithWitness(
+                let new_witnesses = if ctor.is_wildcard() {
+                    let missing_ctors = MissingConstructors::new(pcx);
+                    let new_patterns = missing_ctors.report_patterns(pcx, is_top_level);
                     witnesses
                         .into_iter()
                         .flat_map(|witness| {
@@ -1386,8 +1385,14 @@ impl<'tcx> Usefulness<'tcx> {
                                 witness
                             })
                         })
-                        .collect(),
-                )
+                        .collect()
+                } else {
+                    witnesses
+                        .into_iter()
+                        .map(|witness| witness.apply_constructor(pcx, &ctor, ctor_wild_subpatterns))
+                        .collect()
+                };
+                UsefulWithWitness(new_witnesses)
             }
             x => x,
         }
@@ -1400,9 +1405,14 @@ crate enum WitnessPreference {
     LeaveOutWitness,
 }
 
-#[derive(Copy, Clone, Debug)]
-struct PatCtxt<'tcx> {
+#[derive(Copy, Clone)]
+struct PatCtxt<'a, 'p, 'tcx> {
+    cx: &'a MatchCheckCtxt<'p, 'tcx>,
+    /// Current state of the matrix.
+    matrix: &'a Matrix<'p, 'tcx>,
+    /// Type of the current column under investigation.
     ty: Ty<'tcx>,
+    /// Span of the current pattern under investigation.
     span: Span,
 }
 
@@ -1463,17 +1473,16 @@ impl<'tcx> Witness<'tcx> {
     /// pats: [(false, "foo"), 42]  => X { a: (false, "foo"), b: 42 }
     fn apply_constructor<'p>(
         mut self,
-        cx: &MatchCheckCtxt<'p, 'tcx>,
+        pcx: PatCtxt<'_, 'p, 'tcx>,
         ctor: &Constructor<'tcx>,
-        ty: Ty<'tcx>,
         ctor_wild_subpatterns: &Fields<'p, 'tcx>,
     ) -> Self {
         let pat = {
             let len = self.0.len();
             let arity = ctor_wild_subpatterns.len();
             let pats = self.0.drain((len - arity)..).rev();
-            let fields = ctor_wild_subpatterns.replace_fields(cx, pats);
-            ctor.apply(cx, ty, fields)
+            let fields = ctor_wild_subpatterns.replace_fields(pcx.cx, pats);
+            ctor.apply(pcx, fields)
         };
 
         self.0.push(pat);
@@ -1491,11 +1500,9 @@ impl<'tcx> Witness<'tcx> {
 /// `Option<!>`, we do not include `Some(_)` in the returned list of constructors.
 /// Invariant: this returns an empty `Vec` if and only if the type is uninhabited (as determined by
 /// `cx.is_uninhabited()`).
-fn all_constructors<'a, 'tcx>(
-    cx: &mut MatchCheckCtxt<'a, 'tcx>,
-    pcx: PatCtxt<'tcx>,
-) -> Vec<Constructor<'tcx>> {
+fn all_constructors<'p, 'tcx>(pcx: PatCtxt<'_, 'p, 'tcx>) -> Vec<Constructor<'tcx>> {
     debug!("all_constructors({:?})", pcx.ty);
+    let cx = pcx.cx;
     let make_range = |start, end| {
         IntRange(
             // `unwrap()` is ok because we know the type is an integer.
@@ -1503,37 +1510,22 @@ fn all_constructors<'a, 'tcx>(
                 .unwrap(),
         )
     };
-    match *pcx.ty.kind() {
+    match pcx.ty.kind() {
         ty::Bool => vec![make_range(0, 1)],
-        ty::Array(ref sub_ty, len) if len.try_eval_usize(cx.tcx, cx.param_env).is_some() => {
+        ty::Array(sub_ty, len) if len.try_eval_usize(cx.tcx, cx.param_env).is_some() => {
             let len = len.eval_usize(cx.tcx, cx.param_env);
             if len != 0 && cx.is_uninhabited(sub_ty) {
                 vec![]
             } else {
-                vec![Slice(Slice { array_len: Some(len), kind: VarLen(0, 0) })]
+                vec![Slice(Slice::new(Some(len), VarLen(0, 0)))]
             }
         }
         // Treat arrays of a constant but unknown length like slices.
-        ty::Array(ref sub_ty, _) | ty::Slice(ref sub_ty) => {
+        ty::Array(sub_ty, _) | ty::Slice(sub_ty) => {
             let kind = if cx.is_uninhabited(sub_ty) { FixedLen(0) } else { VarLen(0, 0) };
-            vec![Slice(Slice { array_len: None, kind })]
+            vec![Slice(Slice::new(None, kind))]
         }
         ty::Adt(def, substs) if def.is_enum() => {
-            let ctors: Vec<_> = if cx.tcx.features().exhaustive_patterns {
-                // If `exhaustive_patterns` is enabled, we exclude variants known to be
-                // uninhabited.
-                def.variants
-                    .iter()
-                    .filter(|v| {
-                        !v.uninhabited_from(cx.tcx, substs, def.adt_kind(), cx.param_env)
-                            .contains(cx.tcx, cx.module)
-                    })
-                    .map(|v| Variant(v.def_id))
-                    .collect()
-            } else {
-                def.variants.iter().map(|v| Variant(v.def_id)).collect()
-            };
-
             // If the enum is declared as `#[non_exhaustive]`, we treat it as if it had an
             // additional "unknown" constructor.
             // There is no point in enumerating all possible variants, because the user can't
@@ -1559,7 +1551,22 @@ fn all_constructors<'a, 'tcx>(
             let is_secretly_empty =
                 def.variants.is_empty() && !cx.tcx.features().exhaustive_patterns;
 
-            if is_secretly_empty || is_declared_nonexhaustive { vec![NonExhaustive] } else { ctors }
+            if is_secretly_empty || is_declared_nonexhaustive {
+                vec![NonExhaustive]
+            } else if cx.tcx.features().exhaustive_patterns {
+                // If `exhaustive_patterns` is enabled, we exclude variants known to be
+                // uninhabited.
+                def.variants
+                    .iter()
+                    .filter(|v| {
+                        !v.uninhabited_from(cx.tcx, substs, def.adt_kind(), cx.param_env)
+                            .contains(cx.tcx, cx.module)
+                    })
+                    .map(|v| Variant(v.def_id))
+                    .collect()
+            } else {
+                def.variants.iter().map(|v| Variant(v.def_id)).collect()
+            }
         }
         ty::Char => {
             vec![
@@ -1577,24 +1584,22 @@ fn all_constructors<'a, 'tcx>(
             // `#[non_exhaustive]` enums by returning a special unmatcheable constructor.
             vec![NonExhaustive]
         }
-        ty::Int(ity) => {
+        &ty::Int(ity) => {
             let bits = Integer::from_attr(&cx.tcx, SignedInt(ity)).size().bits() as u128;
             let min = 1u128 << (bits - 1);
             let max = min - 1;
             vec![make_range(min, max)]
         }
-        ty::Uint(uty) => {
+        &ty::Uint(uty) => {
             let size = Integer::from_attr(&cx.tcx, UnsignedInt(uty)).size();
             let max = truncate(u128::MAX, size);
             vec![make_range(0, max)]
         }
-        _ => {
-            if cx.is_uninhabited(pcx.ty) {
-                vec![]
-            } else {
-                vec![Single]
-            }
-        }
+        _ if cx.is_uninhabited(pcx.ty) => vec![],
+        ty::Adt(..) | ty::Tuple(..) => vec![Single],
+        ty::Ref(_, t, _) if !t.is_str() => vec![Single],
+        // This type is one for which we don't know how to list constructors, like `&str` or `f64`.
+        _ => vec![Unlistable],
     }
 }
 
@@ -1706,40 +1711,6 @@ impl<'tcx> IntRange<'tcx> {
         }
     }
 
-    fn from_pat(
-        tcx: TyCtxt<'tcx>,
-        param_env: ty::ParamEnv<'tcx>,
-        pat: &Pat<'tcx>,
-    ) -> Option<IntRange<'tcx>> {
-        // This MUST be kept in sync with `pat_constructor`.
-        match *pat.kind {
-            PatKind::AscribeUserType { .. } => bug!(), // Handled by `expand_pattern`
-            PatKind::Or { .. } => bug!("Or-pattern should have been expanded earlier on."),
-
-            PatKind::Binding { .. }
-            | PatKind::Wild
-            | PatKind::Leaf { .. }
-            | PatKind::Deref { .. }
-            | PatKind::Variant { .. }
-            | PatKind::Array { .. }
-            | PatKind::Slice { .. } => None,
-
-            PatKind::Constant { value } => Self::from_const(tcx, param_env, value, pat.span),
-
-            PatKind::Range(PatRange { lo, hi, end }) => {
-                let ty = lo.ty;
-                Self::from_range(
-                    tcx,
-                    lo.eval_bits(tcx, param_env, lo.ty),
-                    hi.eval_bits(tcx, param_env, hi.ty),
-                    ty,
-                    &end,
-                    pat.span,
-                )
-            }
-        }
-    }
-
     // The return value of `signed_bias` should be XORed with an endpoint to encode/decode it.
     fn signed_bias(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> u128 {
         match *ty.kind() {
@@ -1751,35 +1722,6 @@ impl<'tcx> IntRange<'tcx> {
         }
     }
 
-    /// Returns a collection of ranges that spans the values covered by `ranges`, subtracted
-    /// by the values covered by `self`: i.e., `ranges \ self` (in set notation).
-    fn subtract_from(&self, ranges: Vec<IntRange<'tcx>>) -> Vec<IntRange<'tcx>> {
-        let mut remaining_ranges = vec![];
-        let ty = self.ty;
-        let span = self.span;
-        let (lo, hi) = self.boundaries();
-        for subrange in ranges {
-            let (subrange_lo, subrange_hi) = subrange.range.into_inner();
-            if lo > subrange_hi || subrange_lo > hi {
-                // The pattern doesn't intersect with the subrange at all,
-                // so the subrange remains untouched.
-                remaining_ranges.push(IntRange { range: subrange_lo..=subrange_hi, ty, span });
-            } else {
-                if lo > subrange_lo {
-                    // The pattern intersects an upper section of the
-                    // subrange, so a lower section will remain.
-                    remaining_ranges.push(IntRange { range: subrange_lo..=(lo - 1), ty, span });
-                }
-                if hi < subrange_hi {
-                    // The pattern intersects a lower section of the
-                    // subrange, so an upper section will remain.
-                    remaining_ranges.push(IntRange { range: (hi + 1)..=subrange_hi, ty, span });
-                }
-            }
-        }
-        remaining_ranges
-    }
-
     fn is_subrange(&self, other: &Self) -> bool {
         other.range.start() <= self.range.start() && self.range.end() <= other.range.end()
     }
@@ -1837,6 +1779,162 @@ impl<'tcx> IntRange<'tcx> {
         // This is a brand new pattern, so we don't reuse `self.span`.
         Pat { ty: self.ty, span: DUMMY_SP, kind: Box::new(kind) }
     }
+
+    /// For exhaustive integer matching, some constructors are grouped within other constructors
+    /// (namely integer typed values are grouped within ranges). However, when specialising these
+    /// constructors, we want to be specialising for the underlying constructors (the integers), not
+    /// the groups (the ranges). Thus we need to split the groups up. Splitting them up naïvely would
+    /// mean creating a separate constructor for every single value in the range, which is clearly
+    /// impractical. However, observe that for some ranges of integers, the specialisation will be
+    /// identical across all values in that range (i.e., there are equivalence classes of ranges of
+    /// constructors based on their `U(S(c, P), S(c, p))` outcome). These classes are grouped by
+    /// the patterns that apply to them (in the matrix `P`). We can split the range whenever the
+    /// patterns that apply to that range (specifically: the patterns that *intersect* with that range)
+    /// change.
+    /// Our solution, therefore, is to split the range constructor into subranges at every single point
+    /// the group of intersecting patterns changes (using the method described below).
+    /// And voilà! We're testing precisely those ranges that we need to, without any exhaustive matching
+    /// on actual integers. The nice thing about this is that the number of subranges is linear in the
+    /// number of rows in the matrix (i.e., the number of cases in the `match` statement), so we don't
+    /// need to be worried about matching over gargantuan ranges.
+    ///
+    /// Essentially, given the first column of a matrix representing ranges, looking like the following:
+    ///
+    /// |------|  |----------| |-------|    ||
+    ///    |-------| |-------|            |----| ||
+    ///       |---------|
+    ///
+    /// We split the ranges up into equivalence classes so the ranges are no longer overlapping:
+    ///
+    /// |--|--|||-||||--||---|||-------|  |-|||| ||
+    ///
+    /// The logic for determining how to split the ranges is fairly straightforward: we calculate
+    /// boundaries for each interval range, sort them, then create constructors for each new interval
+    /// between every pair of boundary points. (This essentially sums up to performing the intuitive
+    /// merging operation depicted above.)
+    fn split<'p>(
+        &self,
+        pcx: PatCtxt<'_, 'p, 'tcx>,
+        hir_id: Option<HirId>,
+    ) -> SmallVec<[Constructor<'tcx>; 1]> {
+        let ty = pcx.ty;
+
+        /// Represents a border between 2 integers. Because the intervals spanning borders
+        /// must be able to cover every integer, we need to be able to represent
+        /// 2^128 + 1 such borders.
+        #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
+        enum Border {
+            JustBefore(u128),
+            AfterMax,
+        }
+
+        // A function for extracting the borders of an integer interval.
+        fn range_borders(r: IntRange<'_>) -> impl Iterator<Item = Border> {
+            let (lo, hi) = r.range.into_inner();
+            let from = Border::JustBefore(lo);
+            let to = match hi.checked_add(1) {
+                Some(m) => Border::JustBefore(m),
+                None => Border::AfterMax,
+            };
+            vec![from, to].into_iter()
+        }
+
+        // Collect the span and range of all the intersecting ranges to lint on likely
+        // incorrect range patterns. (#63987)
+        let mut overlaps = vec![];
+        let row_len = pcx.matrix.patterns.get(0).map(|r| r.len()).unwrap_or(0);
+        // `borders` is the set of borders between equivalence classes: each equivalence
+        // class lies between 2 borders.
+        let row_borders = pcx
+            .matrix
+            .head_ctors(pcx.cx)
+            .filter_map(|ctor| ctor.as_int_range())
+            .filter_map(|range| {
+                let intersection = self.intersection(pcx.cx.tcx, &range);
+                let should_lint = self.suspicious_intersection(&range);
+                if let (Some(range), 1, true) = (&intersection, row_len, should_lint) {
+                    // FIXME: for now, only check for overlapping ranges on simple range
+                    // patterns. Otherwise with the current logic the following is detected
+                    // as overlapping:
+                    //   match (10u8, true) {
+                    //    (0 ..= 125, false) => {}
+                    //    (126 ..= 255, false) => {}
+                    //    (0 ..= 255, true) => {}
+                    //  }
+                    overlaps.push(range.clone());
+                }
+                intersection
+            })
+            .flat_map(range_borders);
+        let self_borders = range_borders(self.clone());
+        let mut borders: Vec<_> = row_borders.chain(self_borders).collect();
+        borders.sort_unstable();
+
+        self.lint_overlapping_patterns(pcx.cx.tcx, hir_id, ty, overlaps);
+
+        // We're going to iterate through every adjacent pair of borders, making sure that
+        // each represents an interval of nonnegative length, and convert each such
+        // interval into a constructor.
+        borders
+            .array_windows()
+            .filter_map(|&pair| match pair {
+                [Border::JustBefore(n), Border::JustBefore(m)] => {
+                    if n < m {
+                        Some(n..=(m - 1))
+                    } else {
+                        None
+                    }
+                }
+                [Border::JustBefore(n), Border::AfterMax] => Some(n..=u128::MAX),
+                [Border::AfterMax, _] => None,
+            })
+            .map(|range| IntRange { range, ty, span: pcx.span })
+            .map(IntRange)
+            .collect()
+    }
+
+    fn lint_overlapping_patterns(
+        &self,
+        tcx: TyCtxt<'tcx>,
+        hir_id: Option<HirId>,
+        ty: Ty<'tcx>,
+        overlaps: Vec<IntRange<'tcx>>,
+    ) {
+        if let (true, Some(hir_id)) = (!overlaps.is_empty(), hir_id) {
+            tcx.struct_span_lint_hir(
+                lint::builtin::OVERLAPPING_PATTERNS,
+                hir_id,
+                self.span,
+                |lint| {
+                    let mut err = lint.build("multiple patterns covering the same range");
+                    err.span_label(self.span, "overlapping patterns");
+                    for int_range in overlaps {
+                        // Use the real type for user display of the ranges:
+                        err.span_label(
+                            int_range.span,
+                            &format!(
+                                "this range overlaps on `{}`",
+                                IntRange { range: int_range.range, ty, span: DUMMY_SP }.to_pat(tcx),
+                            ),
+                        );
+                    }
+                    err.emit();
+                },
+            );
+        }
+    }
+
+    /// See `Constructor::is_covered_by`
+    fn is_covered_by<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>, other: &Self) -> bool {
+        if self.intersection(pcx.cx.tcx, other).is_some() {
+            // Constructor splitting should ensure that all intersections we encounter are actually
+            // inclusions.
+            assert!(self.is_subrange(other));
+            true
+        } else {
+            false
+        }
+    }
 }
 
 /// Ignore spans when comparing, they don't carry semantic information as they are only for lints.
@@ -1847,39 +1945,86 @@ impl<'tcx> std::cmp::PartialEq for IntRange<'tcx> {
 }
 
 // A struct to compute a set of constructors equivalent to `all_ctors \ used_ctors`.
+#[derive(Debug)]
 struct MissingConstructors<'tcx> {
-    all_ctors: Vec<Constructor<'tcx>>,
+    all_ctors: SmallVec<[Constructor<'tcx>; 1]>,
     used_ctors: Vec<Constructor<'tcx>>,
 }
 
 impl<'tcx> MissingConstructors<'tcx> {
-    fn new(all_ctors: Vec<Constructor<'tcx>>, used_ctors: Vec<Constructor<'tcx>>) -> Self {
-        MissingConstructors { all_ctors, used_ctors }
-    }
+    fn new<'p>(pcx: PatCtxt<'_, 'p, 'tcx>) -> Self {
+        let used_ctors: Vec<Constructor<'_>> =
+            pcx.matrix.head_ctors(pcx.cx).cloned().filter(|c| !c.is_wildcard()).collect();
+        // Since `all_ctors` never contains wildcards, this won't recurse further.
+        let all_ctors =
+            all_constructors(pcx).into_iter().flat_map(|ctor| ctor.split(pcx, None)).collect();
 
-    fn into_inner(self) -> (Vec<Constructor<'tcx>>, Vec<Constructor<'tcx>>) {
-        (self.all_ctors, self.used_ctors)
+        MissingConstructors { all_ctors, used_ctors }
     }
 
-    fn is_empty(&self) -> bool {
-        self.iter().next().is_none()
-    }
-    /// Whether this contains all the constructors for the given type or only a
-    /// subset.
-    fn all_ctors_are_missing(&self) -> bool {
-        self.used_ctors.is_empty()
+    fn is_empty<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>) -> bool {
+        self.iter(pcx).next().is_none()
     }
 
     /// Iterate over all_ctors \ used_ctors
-    fn iter<'a>(&'a self) -> impl Iterator<Item = Constructor<'tcx>> + Captures<'a> {
-        self.all_ctors.iter().flat_map(move |req_ctor| req_ctor.subtract_ctors(&self.used_ctors))
+    fn iter<'a, 'p>(
+        &'a self,
+        pcx: PatCtxt<'a, 'p, 'tcx>,
+    ) -> impl Iterator<Item = &'a Constructor<'tcx>> + Captures<'p> {
+        self.all_ctors.iter().filter(move |ctor| !ctor.is_covered_by_any(pcx, &self.used_ctors))
     }
-}
 
-impl<'tcx> fmt::Debug for MissingConstructors<'tcx> {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        let ctors: Vec<_> = self.iter().collect();
-        write!(f, "{:?}", ctors)
+    /// List the patterns corresponding to the missing constructors. In some cases, instead of
+    /// listing all constructors of a given type, we prefer to simply report a wildcard.
+    fn report_patterns<'p>(
+        &self,
+        pcx: PatCtxt<'_, 'p, 'tcx>,
+        is_top_level: bool,
+    ) -> SmallVec<[Pat<'tcx>; 1]> {
+        // There are 2 ways we can report a witness here.
+        // Commonly, we can report all the "free"
+        // constructors as witnesses, e.g., if we have:
+        //
+        // ```
+        //     enum Direction { N, S, E, W }
+        //     let Direction::N = ...;
+        // ```
+        //
+        // we can report 3 witnesses: `S`, `E`, and `W`.
+        //
+        // However, there is a case where we don't want
+        // to do this and instead report a single `_` witness:
+        // if the user didn't actually specify a constructor
+        // in this arm, e.g., in
+        //
+        // ```
+        //     let x: (Direction, Direction, bool) = ...;
+        //     let (_, _, false) = x;
+        // ```
+        //
+        // we don't want to show all 16 possible witnesses
+        // `(<direction-1>, <direction-2>, true)` - we are
+        // satisfied with `(_, _, true)`. In this case,
+        // `used_ctors` is empty.
+        // The exception is: if we are at the top-level, for example in an empty match, we
+        // sometimes prefer reporting the list of constructors instead of just `_`.
+        let report_when_all_missing = is_top_level && !IntRange::is_integral(pcx.ty);
+        if self.used_ctors.is_empty() && !report_when_all_missing {
+            // All constructors are unused. Report only a wildcard
+            // rather than each individual constructor.
+            smallvec![Pat::wildcard_from_ty(pcx.ty)]
+        } else {
+            // Construct for each missing constructor a "wild" version of this
+            // constructor, that matches everything that can be built with
+            // it. For example, if `ctor` is a `Constructor::Variant` for
+            // `Option::Some`, we get the pattern `Some(_)`.
+            self.iter(pcx)
+                .map(|missing_ctor| {
+                    let fields = Fields::wildcards(pcx, &missing_ctor);
+                    missing_ctor.apply(pcx, fields)
+                })
+                .collect()
+        }
     }
 }
 
@@ -1906,7 +2051,7 @@ impl<'tcx> fmt::Debug for MissingConstructors<'tcx> {
 /// has one it must not be inserted into the matrix. This shouldn't be
 /// relied on for soundness.
 crate fn is_useful<'p, 'tcx>(
-    cx: &mut MatchCheckCtxt<'p, 'tcx>,
+    cx: &MatchCheckCtxt<'p, 'tcx>,
     matrix: &Matrix<'p, 'tcx>,
     v: &PatStack<'p, 'tcx>,
     witness_preference: WitnessPreference,
@@ -1987,206 +2132,75 @@ crate fn is_useful<'p, 'tcx>(
 
     // FIXME(Nadrieril): Hack to work around type normalization issues (see #72476).
     let ty = matrix.heads().next().map(|r| r.ty).unwrap_or(v.head().ty);
-    let pcx = PatCtxt { ty, span: v.head().span };
-
-    debug!("is_useful_expand_first_col: pcx={:#?}, expanding {:#?}", pcx, v.head());
-
-    let ret = if let Some(constructor) = pat_constructor(cx.tcx, cx.param_env, v.head()) {
-        debug!("is_useful - expanding constructor: {:#?}", constructor);
-        split_grouped_constructors(
-            cx.tcx,
-            cx.param_env,
-            pcx,
-            vec![constructor],
-            matrix,
-            pcx.span,
-            Some(hir_id),
-        )
-        .into_iter()
-        .map(|c| {
-            is_useful_specialized(
-                cx,
-                matrix,
-                v,
-                c,
-                pcx.ty,
-                witness_preference,
-                hir_id,
-                is_under_guard,
-            )
-        })
-        .find(|result| result.is_useful())
-        .unwrap_or(NotUseful)
-    } else {
-        debug!("is_useful - expanding wildcard");
+    let pcx = PatCtxt { cx, matrix, ty, span: v.head().span };
 
-        let used_ctors: Vec<Constructor<'_>> =
-            matrix.heads().filter_map(|p| pat_constructor(cx.tcx, cx.param_env, p)).collect();
-        debug!("is_useful_used_ctors = {:#?}", used_ctors);
-        // `all_ctors` are all the constructors for the given type, which
-        // should all be represented (or caught with the wild pattern `_`).
-        let all_ctors = all_constructors(cx, pcx);
-        debug!("is_useful_all_ctors = {:#?}", all_ctors);
-
-        // `missing_ctors` is the set of constructors from the same type as the
-        // first column of `matrix` that are matched only by wildcard patterns
-        // from the first column.
-        //
-        // Therefore, if there is some pattern that is unmatched by `matrix`,
-        // it will still be unmatched if the first constructor is replaced by
-        // any of the constructors in `missing_ctors`
+    debug!("is_useful_expand_first_col: ty={:#?}, expanding {:#?}", pcx.ty, v.head());
 
-        // Missing constructors are those that are not matched by any non-wildcard patterns in the
-        // current column. We only fully construct them on-demand, because they're rarely used and
-        // can be big.
-        let missing_ctors = MissingConstructors::new(all_ctors, used_ctors);
-
-        debug!("is_useful_missing_ctors.empty()={:#?}", missing_ctors.is_empty(),);
-
-        if missing_ctors.is_empty() {
-            let (all_ctors, _) = missing_ctors.into_inner();
-            split_grouped_constructors(cx.tcx, cx.param_env, pcx, all_ctors, matrix, DUMMY_SP, None)
-                .into_iter()
-                .map(|c| {
-                    is_useful_specialized(
-                        cx,
-                        matrix,
-                        v,
-                        c,
-                        pcx.ty,
-                        witness_preference,
-                        hir_id,
-                        is_under_guard,
-                    )
-                })
-                .find(|result| result.is_useful())
-                .unwrap_or(NotUseful)
-        } else {
-            let matrix = matrix.specialize_wildcard();
-            let v = v.to_tail();
+    let ret = v
+        .head_ctor(cx)
+        .split(pcx, Some(hir_id))
+        .into_iter()
+        .map(|ctor| {
+            // We cache the result of `Fields::wildcards` because it is used a lot.
+            let ctor_wild_subpatterns = Fields::wildcards(pcx, &ctor);
+            let matrix = pcx.matrix.specialize_constructor(pcx, &ctor, &ctor_wild_subpatterns);
+            let v = v.pop_head_constructor(&ctor_wild_subpatterns);
             let usefulness =
-                is_useful(cx, &matrix, &v, witness_preference, hir_id, is_under_guard, false);
-
-            // In this case, there's at least one "free"
-            // constructor that is only matched against by
-            // wildcard patterns.
-            //
-            // There are 2 ways we can report a witness here.
-            // Commonly, we can report all the "free"
-            // constructors as witnesses, e.g., if we have:
-            //
-            // ```
-            //     enum Direction { N, S, E, W }
-            //     let Direction::N = ...;
-            // ```
-            //
-            // we can report 3 witnesses: `S`, `E`, and `W`.
-            //
-            // However, there is a case where we don't want
-            // to do this and instead report a single `_` witness:
-            // if the user didn't actually specify a constructor
-            // in this arm, e.g., in
-            //
-            // ```
-            //     let x: (Direction, Direction, bool) = ...;
-            //     let (_, _, false) = x;
-            // ```
-            //
-            // we don't want to show all 16 possible witnesses
-            // `(<direction-1>, <direction-2>, true)` - we are
-            // satisfied with `(_, _, true)`. In this case,
-            // `used_ctors` is empty.
-            // The exception is: if we are at the top-level, for example in an empty match, we
-            // sometimes prefer reporting the list of constructors instead of just `_`.
-            let report_ctors_rather_than_wildcard = is_top_level && !IntRange::is_integral(pcx.ty);
-            if missing_ctors.all_ctors_are_missing() && !report_ctors_rather_than_wildcard {
-                // All constructors are unused. Add a wild pattern
-                // rather than each individual constructor.
-                usefulness.apply_wildcard(pcx.ty)
-            } else {
-                // Construct for each missing constructor a "wild" version of this
-                // constructor, that matches everything that can be built with
-                // it. For example, if `ctor` is a `Constructor::Variant` for
-                // `Option::Some`, we get the pattern `Some(_)`.
-                usefulness.apply_missing_ctors(cx, pcx.ty, &missing_ctors)
-            }
-        }
-    };
+                is_useful(pcx.cx, &matrix, &v, witness_preference, hir_id, is_under_guard, false);
+            usefulness.apply_constructor(pcx, &ctor, &ctor_wild_subpatterns, is_top_level)
+        })
+        .find(|result| result.is_useful())
+        .unwrap_or(NotUseful);
     debug!("is_useful::returns({:#?}, {:#?}) = {:?}", matrix, v, ret);
     ret
 }
 
-/// A shorthand for the `U(S(c, P), S(c, q))` operation from the paper. I.e., `is_useful` applied
-/// to the specialised version of both the pattern matrix `P` and the new pattern `q`.
-fn is_useful_specialized<'p, 'tcx>(
-    cx: &mut MatchCheckCtxt<'p, 'tcx>,
-    matrix: &Matrix<'p, 'tcx>,
-    v: &PatStack<'p, 'tcx>,
-    ctor: Constructor<'tcx>,
-    ty: Ty<'tcx>,
-    witness_preference: WitnessPreference,
-    hir_id: HirId,
-    is_under_guard: bool,
-) -> Usefulness<'tcx> {
-    debug!("is_useful_specialized({:#?}, {:#?}, {:?})", v, ctor, ty);
-
-    // We cache the result of `Fields::wildcards` because it is used a lot.
-    let ctor_wild_subpatterns = Fields::wildcards(cx, &ctor, ty);
-    let matrix = matrix.specialize_constructor(cx, &ctor, &ctor_wild_subpatterns);
-    v.specialize_constructor(cx, &ctor, &ctor_wild_subpatterns, true)
-        .map(|v| is_useful(cx, &matrix, &v, witness_preference, hir_id, is_under_guard, false))
-        .map(|u| u.apply_constructor(cx, &ctor, ty, &ctor_wild_subpatterns))
-        .unwrap_or(NotUseful)
-}
-
 /// Determines the constructor that the given pattern can be specialized to.
 /// Returns `None` in case of a catch-all, which can't be specialized.
-fn pat_constructor<'tcx>(
-    tcx: TyCtxt<'tcx>,
-    param_env: ty::ParamEnv<'tcx>,
-    pat: &Pat<'tcx>,
-) -> Option<Constructor<'tcx>> {
-    // This MUST be kept in sync with `IntRange::from_pat`.
+fn pat_constructor<'p, 'tcx>(
+    cx: &MatchCheckCtxt<'p, 'tcx>,
+    pat: &'p Pat<'tcx>,
+) -> Constructor<'tcx> {
     match *pat.kind {
         PatKind::AscribeUserType { .. } => bug!(), // Handled by `expand_pattern`
-        PatKind::Binding { .. } | PatKind::Wild => None,
-        PatKind::Leaf { .. } | PatKind::Deref { .. } => Some(Single),
+        PatKind::Binding { .. } | PatKind::Wild => Wildcard,
+        PatKind::Leaf { .. } | PatKind::Deref { .. } => Single,
         PatKind::Variant { adt_def, variant_index, .. } => {
-            Some(Variant(adt_def.variants[variant_index].def_id))
+            Variant(adt_def.variants[variant_index].def_id)
         }
         PatKind::Constant { value } => {
-            if let Some(int_range) = IntRange::from_const(tcx, param_env, value, pat.span) {
-                Some(IntRange(int_range))
+            if let Some(int_range) = IntRange::from_const(cx.tcx, cx.param_env, value, pat.span) {
+                IntRange(int_range)
             } else {
                 match value.ty.kind() {
-                    ty::Float(_) => Some(FloatRange(value, value, RangeEnd::Included)),
-                    ty::Ref(_, t, _) if t.is_str() => Some(Str(value)),
+                    ty::Float(_) => FloatRange(value, value, RangeEnd::Included),
+                    ty::Ref(_, t, _) if t.is_str() => Str(value),
                     // All constants that can be structurally matched have already been expanded
                     // into the corresponding `Pat`s by `const_to_pat`. Constants that remain are
                     // opaque.
-                    _ => Some(Opaque),
+                    _ => Opaque,
                 }
             }
         }
         PatKind::Range(PatRange { lo, hi, end }) => {
             let ty = lo.ty;
             if let Some(int_range) = IntRange::from_range(
-                tcx,
-                lo.eval_bits(tcx, param_env, lo.ty),
-                hi.eval_bits(tcx, param_env, hi.ty),
+                cx.tcx,
+                lo.eval_bits(cx.tcx, cx.param_env, lo.ty),
+                hi.eval_bits(cx.tcx, cx.param_env, hi.ty),
                 ty,
                 &end,
                 pat.span,
             ) {
-                Some(IntRange(int_range))
+                IntRange(int_range)
             } else {
-                Some(FloatRange(lo, hi, end))
+                FloatRange(lo, hi, end)
             }
         }
         PatKind::Array { ref prefix, ref slice, ref suffix }
         | PatKind::Slice { ref prefix, ref slice, ref suffix } => {
             let array_len = match pat.ty.kind() {
-                ty::Array(_, length) => Some(length.eval_usize(tcx, param_env)),
+                ty::Array(_, length) => Some(length.eval_usize(cx.tcx, cx.param_env)),
                 ty::Slice(_) => None,
                 _ => span_bug!(pat.span, "bad ty {:?} for slice pattern", pat.ty),
             };
@@ -2194,458 +2208,8 @@ fn pat_constructor<'tcx>(
             let suffix = suffix.len() as u64;
             let kind =
                 if slice.is_some() { VarLen(prefix, suffix) } else { FixedLen(prefix + suffix) };
-            Some(Slice(Slice { array_len, kind }))
+            Slice(Slice::new(array_len, kind))
         }
         PatKind::Or { .. } => bug!("Or-pattern should have been expanded earlier on."),
     }
 }
-
-/// For exhaustive integer matching, some constructors are grouped within other constructors
-/// (namely integer typed values are grouped within ranges). However, when specialising these
-/// constructors, we want to be specialising for the underlying constructors (the integers), not
-/// the groups (the ranges). Thus we need to split the groups up. Splitting them up naïvely would
-/// mean creating a separate constructor for every single value in the range, which is clearly
-/// impractical. However, observe that for some ranges of integers, the specialisation will be
-/// identical across all values in that range (i.e., there are equivalence classes of ranges of
-/// constructors based on their `is_useful_specialized` outcome). These classes are grouped by
-/// the patterns that apply to them (in the matrix `P`). We can split the range whenever the
-/// patterns that apply to that range (specifically: the patterns that *intersect* with that range)
-/// change.
-/// Our solution, therefore, is to split the range constructor into subranges at every single point
-/// the group of intersecting patterns changes (using the method described below).
-/// And voilà! We're testing precisely those ranges that we need to, without any exhaustive matching
-/// on actual integers. The nice thing about this is that the number of subranges is linear in the
-/// number of rows in the matrix (i.e., the number of cases in the `match` statement), so we don't
-/// need to be worried about matching over gargantuan ranges.
-///
-/// Essentially, given the first column of a matrix representing ranges, looking like the following:
-///
-/// |------|  |----------| |-------|    ||
-///    |-------| |-------|            |----| ||
-///       |---------|
-///
-/// We split the ranges up into equivalence classes so the ranges are no longer overlapping:
-///
-/// |--|--|||-||||--||---|||-------|  |-|||| ||
-///
-/// The logic for determining how to split the ranges is fairly straightforward: we calculate
-/// boundaries for each interval range, sort them, then create constructors for each new interval
-/// between every pair of boundary points. (This essentially sums up to performing the intuitive
-/// merging operation depicted above.)
-///
-/// `hir_id` is `None` when we're evaluating the wildcard pattern, do not lint for overlapping in
-/// ranges that case.
-///
-/// This also splits variable-length slices into fixed-length slices.
-fn split_grouped_constructors<'p, 'tcx>(
-    tcx: TyCtxt<'tcx>,
-    param_env: ty::ParamEnv<'tcx>,
-    pcx: PatCtxt<'tcx>,
-    ctors: Vec<Constructor<'tcx>>,
-    matrix: &Matrix<'p, 'tcx>,
-    span: Span,
-    hir_id: Option<HirId>,
-) -> Vec<Constructor<'tcx>> {
-    let ty = pcx.ty;
-    let mut split_ctors = Vec::with_capacity(ctors.len());
-    debug!("split_grouped_constructors({:#?}, {:#?})", matrix, ctors);
-
-    for ctor in ctors.into_iter() {
-        match ctor {
-            IntRange(ctor_range) if ctor_range.treat_exhaustively(tcx) => {
-                // Fast-track if the range is trivial. In particular, don't do the overlapping
-                // ranges check.
-                if ctor_range.is_singleton() {
-                    split_ctors.push(IntRange(ctor_range));
-                    continue;
-                }
-
-                /// Represents a border between 2 integers. Because the intervals spanning borders
-                /// must be able to cover every integer, we need to be able to represent
-                /// 2^128 + 1 such borders.
-                #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
-                enum Border {
-                    JustBefore(u128),
-                    AfterMax,
-                }
-
-                // A function for extracting the borders of an integer interval.
-                fn range_borders(r: IntRange<'_>) -> impl Iterator<Item = Border> {
-                    let (lo, hi) = r.range.into_inner();
-                    let from = Border::JustBefore(lo);
-                    let to = match hi.checked_add(1) {
-                        Some(m) => Border::JustBefore(m),
-                        None => Border::AfterMax,
-                    };
-                    vec![from, to].into_iter()
-                }
-
-                // Collect the span and range of all the intersecting ranges to lint on likely
-                // incorrect range patterns. (#63987)
-                let mut overlaps = vec![];
-                // `borders` is the set of borders between equivalence classes: each equivalence
-                // class lies between 2 borders.
-                let row_borders = matrix
-                    .patterns
-                    .iter()
-                    .flat_map(|row| {
-                        IntRange::from_pat(tcx, param_env, row.head()).map(|r| (r, row.len()))
-                    })
-                    .flat_map(|(range, row_len)| {
-                        let intersection = ctor_range.intersection(tcx, &range);
-                        let should_lint = ctor_range.suspicious_intersection(&range);
-                        if let (Some(range), 1, true) = (&intersection, row_len, should_lint) {
-                            // FIXME: for now, only check for overlapping ranges on simple range
-                            // patterns. Otherwise with the current logic the following is detected
-                            // as overlapping:
-                            //   match (10u8, true) {
-                            //    (0 ..= 125, false) => {}
-                            //    (126 ..= 255, false) => {}
-                            //    (0 ..= 255, true) => {}
-                            //  }
-                            overlaps.push(range.clone());
-                        }
-                        intersection
-                    })
-                    .flat_map(range_borders);
-                let ctor_borders = range_borders(ctor_range.clone());
-                let mut borders: Vec<_> = row_borders.chain(ctor_borders).collect();
-                borders.sort_unstable();
-
-                lint_overlapping_patterns(tcx, hir_id, ctor_range, ty, overlaps);
-
-                // We're going to iterate through every adjacent pair of borders, making sure that
-                // each represents an interval of nonnegative length, and convert each such
-                // interval into a constructor.
-                split_ctors.extend(
-                    borders
-                        .array_windows()
-                        .filter_map(|&pair| match pair {
-                            [Border::JustBefore(n), Border::JustBefore(m)] => {
-                                if n < m {
-                                    Some(IntRange { range: n..=(m - 1), ty, span })
-                                } else {
-                                    None
-                                }
-                            }
-                            [Border::JustBefore(n), Border::AfterMax] => {
-                                Some(IntRange { range: n..=u128::MAX, ty, span })
-                            }
-                            [Border::AfterMax, _] => None,
-                        })
-                        .map(IntRange),
-                );
-            }
-            Slice(Slice { array_len, kind: VarLen(self_prefix, self_suffix) }) => {
-                // The exhaustiveness-checking paper does not include any details on
-                // checking variable-length slice patterns. However, they are matched
-                // by an infinite collection of fixed-length array patterns.
-                //
-                // Checking the infinite set directly would take an infinite amount
-                // of time. However, it turns out that for each finite set of
-                // patterns `P`, all sufficiently large array lengths are equivalent:
-                //
-                // Each slice `s` with a "sufficiently-large" length `l ≥ L` that applies
-                // to exactly the subset `Pₜ` of `P` can be transformed to a slice
-                // `sₘ` for each sufficiently-large length `m` that applies to exactly
-                // the same subset of `P`.
-                //
-                // Because of that, each witness for reachability-checking from one
-                // of the sufficiently-large lengths can be transformed to an
-                // equally-valid witness from any other length, so we only have
-                // to check slice lengths from the "minimal sufficiently-large length"
-                // and below.
-                //
-                // Note that the fact that there is a *single* `sₘ` for each `m`
-                // not depending on the specific pattern in `P` is important: if
-                // you look at the pair of patterns
-                //     `[true, ..]`
-                //     `[.., false]`
-                // Then any slice of length ≥1 that matches one of these two
-                // patterns can be trivially turned to a slice of any
-                // other length ≥1 that matches them and vice-versa - for
-                // but the slice from length 2 `[false, true]` that matches neither
-                // of these patterns can't be turned to a slice from length 1 that
-                // matches neither of these patterns, so we have to consider
-                // slices from length 2 there.
-                //
-                // Now, to see that that length exists and find it, observe that slice
-                // patterns are either "fixed-length" patterns (`[_, _, _]`) or
-                // "variable-length" patterns (`[_, .., _]`).
-                //
-                // For fixed-length patterns, all slices with lengths *longer* than
-                // the pattern's length have the same outcome (of not matching), so
-                // as long as `L` is greater than the pattern's length we can pick
-                // any `sₘ` from that length and get the same result.
-                //
-                // For variable-length patterns, the situation is more complicated,
-                // because as seen above the precise value of `sₘ` matters.
-                //
-                // However, for each variable-length pattern `p` with a prefix of length
-                // `plₚ` and suffix of length `slₚ`, only the first `plₚ` and the last
-                // `slₚ` elements are examined.
-                //
-                // Therefore, as long as `L` is positive (to avoid concerns about empty
-                // types), all elements after the maximum prefix length and before
-                // the maximum suffix length are not examined by any variable-length
-                // pattern, and therefore can be added/removed without affecting
-                // them - creating equivalent patterns from any sufficiently-large
-                // length.
-                //
-                // Of course, if fixed-length patterns exist, we must be sure
-                // that our length is large enough to miss them all, so
-                // we can pick `L = max(max(FIXED_LEN)+1, max(PREFIX_LEN) + max(SUFFIX_LEN))`
-                //
-                // for example, with the above pair of patterns, all elements
-                // but the first and last can be added/removed, so any
-                // witness of length ≥2 (say, `[false, false, true]`) can be
-                // turned to a witness from any other length ≥2.
-
-                let mut max_prefix_len = self_prefix;
-                let mut max_suffix_len = self_suffix;
-                let mut max_fixed_len = 0;
-
-                let head_ctors =
-                    matrix.heads().filter_map(|pat| pat_constructor(tcx, param_env, pat));
-                for ctor in head_ctors {
-                    if let Slice(slice) = ctor {
-                        match slice.pattern_kind() {
-                            FixedLen(len) => {
-                                max_fixed_len = cmp::max(max_fixed_len, len);
-                            }
-                            VarLen(prefix, suffix) => {
-                                max_prefix_len = cmp::max(max_prefix_len, prefix);
-                                max_suffix_len = cmp::max(max_suffix_len, suffix);
-                            }
-                        }
-                    }
-                }
-
-                // For diagnostics, we keep the prefix and suffix lengths separate, so in the case
-                // where `max_fixed_len + 1` is the largest, we adapt `max_prefix_len` accordingly,
-                // so that `L = max_prefix_len + max_suffix_len`.
-                if max_fixed_len + 1 >= max_prefix_len + max_suffix_len {
-                    // The subtraction can't overflow thanks to the above check.
-                    // The new `max_prefix_len` is also guaranteed to be larger than its previous
-                    // value.
-                    max_prefix_len = max_fixed_len + 1 - max_suffix_len;
-                }
-
-                match array_len {
-                    Some(len) => {
-                        let kind = if max_prefix_len + max_suffix_len < len {
-                            VarLen(max_prefix_len, max_suffix_len)
-                        } else {
-                            FixedLen(len)
-                        };
-                        split_ctors.push(Slice(Slice { array_len, kind }));
-                    }
-                    None => {
-                        // `ctor` originally covered the range `(self_prefix +
-                        // self_suffix..infinity)`. We now split it into two: lengths smaller than
-                        // `max_prefix_len + max_suffix_len` are treated independently as
-                        // fixed-lengths slices, and lengths above are captured by a final VarLen
-                        // constructor.
-                        split_ctors.extend(
-                            (self_prefix + self_suffix..max_prefix_len + max_suffix_len)
-                                .map(|len| Slice(Slice { array_len, kind: FixedLen(len) })),
-                        );
-                        split_ctors.push(Slice(Slice {
-                            array_len,
-                            kind: VarLen(max_prefix_len, max_suffix_len),
-                        }));
-                    }
-                }
-            }
-            // Any other constructor can be used unchanged.
-            _ => split_ctors.push(ctor),
-        }
-    }
-
-    debug!("split_grouped_constructors(..)={:#?}", split_ctors);
-    split_ctors
-}
-
-fn lint_overlapping_patterns<'tcx>(
-    tcx: TyCtxt<'tcx>,
-    hir_id: Option<HirId>,
-    ctor_range: IntRange<'tcx>,
-    ty: Ty<'tcx>,
-    overlaps: Vec<IntRange<'tcx>>,
-) {
-    if let (true, Some(hir_id)) = (!overlaps.is_empty(), hir_id) {
-        tcx.struct_span_lint_hir(
-            lint::builtin::OVERLAPPING_PATTERNS,
-            hir_id,
-            ctor_range.span,
-            |lint| {
-                let mut err = lint.build("multiple patterns covering the same range");
-                err.span_label(ctor_range.span, "overlapping patterns");
-                for int_range in overlaps {
-                    // Use the real type for user display of the ranges:
-                    err.span_label(
-                        int_range.span,
-                        &format!(
-                            "this range overlaps on `{}`",
-                            IntRange { range: int_range.range, ty, span: DUMMY_SP }.to_pat(tcx),
-                        ),
-                    );
-                }
-                err.emit();
-            },
-        );
-    }
-}
-
-/// This is the main specialization step. It expands the pattern
-/// into `arity` patterns based on the constructor. For most patterns, the step is trivial,
-/// for instance tuple patterns are flattened and box patterns expand into their inner pattern.
-/// Returns `None` if the pattern does not have the given constructor.
-///
-/// OTOH, slice patterns with a subslice pattern (tail @ ..) can be expanded into multiple
-/// different patterns.
-/// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing
-/// fields filled with wild patterns.
-///
-/// This is roughly the inverse of `Constructor::apply`.
-fn specialize_one_pattern<'p, 'tcx>(
-    cx: &mut MatchCheckCtxt<'p, 'tcx>,
-    pat: &'p Pat<'tcx>,
-    constructor: &Constructor<'tcx>,
-    ctor_wild_subpatterns: &Fields<'p, 'tcx>,
-    is_its_own_ctor: bool, // Whether `ctor` is known to be derived from `pat`
-) -> Option<Fields<'p, 'tcx>> {
-    if let NonExhaustive = constructor {
-        // Only a wildcard pattern can match the special extra constructor.
-        if !pat.is_wildcard() {
-            return None;
-        }
-        return Some(Fields::empty());
-    }
-
-    if let Opaque = constructor {
-        // Only a wildcard pattern can match an opaque constant, unless we're specializing the
-        // value against its own constructor. That happens when we call
-        // `v.specialize_constructor(ctor)` with `ctor` obtained from `pat_constructor(v.head())`.
-        // For example, in the following match, when we are dealing with the third branch, we will
-        // specialize with an `Opaque` ctor. We want to ignore the second branch because opaque
-        // constants should not be inspected, but we don't want to ignore the current (third)
-        // branch, as that would cause us to always conclude that such a branch is unreachable.
-        // ```rust
-        // #[derive(PartialEq)]
-        // struct Foo(i32);
-        // impl Eq for Foo {}
-        // const FOO: Foo = Foo(42);
-        //
-        // match (Foo(0), true) {
-        //     (_, true) => {}
-        //     (FOO, true) => {}
-        //     (FOO, false) => {}
-        // }
-        // ```
-        if is_its_own_ctor || pat.is_wildcard() {
-            return Some(Fields::empty());
-        } else {
-            return None;
-        }
-    }
-
-    let result = match *pat.kind {
-        PatKind::AscribeUserType { .. } => bug!(), // Handled by `expand_pattern`
-
-        PatKind::Binding { .. } | PatKind::Wild => Some(ctor_wild_subpatterns.clone()),
-
-        PatKind::Variant { adt_def, variant_index, ref subpatterns, .. } => {
-            let variant = &adt_def.variants[variant_index];
-            if constructor != &Variant(variant.def_id) {
-                return None;
-            }
-            Some(ctor_wild_subpatterns.replace_with_fieldpats(subpatterns))
-        }
-
-        PatKind::Leaf { ref subpatterns } => {
-            Some(ctor_wild_subpatterns.replace_with_fieldpats(subpatterns))
-        }
-
-        PatKind::Deref { ref subpattern } => Some(Fields::from_single_pattern(subpattern)),
-
-        PatKind::Constant { .. } | PatKind::Range { .. } => {
-            match constructor {
-                IntRange(ctor) => {
-                    let pat = IntRange::from_pat(cx.tcx, cx.param_env, pat)?;
-                    ctor.intersection(cx.tcx, &pat)?;
-                    // Constructor splitting should ensure that all intersections we encounter
-                    // are actually inclusions.
-                    assert!(ctor.is_subrange(&pat));
-                }
-                FloatRange(ctor_from, ctor_to, ctor_end) => {
-                    let (pat_from, pat_to, pat_end, ty) = match *pat.kind {
-                        PatKind::Constant { value } => (value, value, RangeEnd::Included, value.ty),
-                        PatKind::Range(PatRange { lo, hi, end }) => (lo, hi, end, lo.ty),
-                        _ => unreachable!(), // This is ensured by the branch we're in
-                    };
-                    let to = compare_const_vals(cx.tcx, ctor_to, pat_to, cx.param_env, ty)?;
-                    let from = compare_const_vals(cx.tcx, ctor_from, pat_from, cx.param_env, ty)?;
-                    let intersects = (from == Ordering::Greater || from == Ordering::Equal)
-                        && (to == Ordering::Less
-                            || (pat_end == *ctor_end && to == Ordering::Equal));
-                    if !intersects {
-                        return None;
-                    }
-                }
-                Str(ctor_value) => {
-                    let pat_value = match *pat.kind {
-                        PatKind::Constant { value } => value,
-                        _ => span_bug!(
-                            pat.span,
-                            "unexpected range pattern {:?} for constant value ctor",
-                            pat
-                        ),
-                    };
-
-                    // FIXME: there's probably a more direct way of comparing for equality
-                    if compare_const_vals(cx.tcx, ctor_value, pat_value, cx.param_env, pat.ty)?
-                        != Ordering::Equal
-                    {
-                        return None;
-                    }
-                }
-                _ => {
-                    // If we reach here, we must be trying to inspect an opaque constant. Thus we skip
-                    // the row.
-                    return None;
-                }
-            }
-            Some(Fields::empty())
-        }
-
-        PatKind::Array { ref prefix, ref slice, ref suffix }
-        | PatKind::Slice { ref prefix, ref slice, ref suffix } => match *constructor {
-            Slice(_) => {
-                // Number of subpatterns for this pattern
-                let pat_len = prefix.len() + suffix.len();
-                // Number of subpatterns for this constructor
-                let arity = ctor_wild_subpatterns.len();
-
-                if (slice.is_none() && arity != pat_len) || pat_len > arity {
-                    return None;
-                }
-
-                // Replace the prefix and the suffix with the given patterns, leaving wildcards in
-                // the middle if there was a subslice pattern `..`.
-                let prefix = prefix.iter().enumerate();
-                let suffix = suffix.iter().enumerate().map(|(i, p)| (arity - suffix.len() + i, p));
-                Some(ctor_wild_subpatterns.replace_fields_indexed(prefix.chain(suffix)))
-            }
-            _ => span_bug!(pat.span, "unexpected ctor {:?} for slice pat", constructor),
-        },
-
-        PatKind::Or { .. } => bug!("Or-pattern should have been expanded earlier on."),
-    };
-    debug!(
-        "specialize({:#?}, {:#?}, {:#?}) = {:#?}",
-        pat, constructor, ctor_wild_subpatterns, result
-    );
-
-    result
-}
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 39e1256a578..f13a4329d3b 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -1,13 +1,14 @@
 use super::ty::AllowPlus;
-use super::{BlockMode, Parser, PathStyle, SemiColonMode, SeqSep, TokenExpectType, TokenType};
+use super::TokenType;
+use super::{BlockMode, Parser, PathStyle, Restrictions, SemiColonMode, SeqSep, TokenExpectType};
 
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Lit, LitKind, TokenKind};
 use rustc_ast::util::parser::AssocOp;
 use rustc_ast::{
-    self as ast, AngleBracketedArgs, AttrVec, BinOpKind, BindingMode, Block, BlockCheckMode, Expr,
-    ExprKind, Item, ItemKind, Mutability, Param, Pat, PatKind, Path, PathSegment, QSelf, Ty,
-    TyKind,
+    self as ast, AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode,
+    Block, BlockCheckMode, Expr, ExprKind, GenericArg, Item, ItemKind, Mutability, Param, Pat,
+    PatKind, Path, PathSegment, QSelf, Ty, TyKind,
 };
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashSet;
@@ -19,7 +20,8 @@ use rustc_span::{MultiSpan, Span, SpanSnippetError, DUMMY_SP};
 
 use tracing::{debug, trace};
 
-const TURBOFISH: &str = "use `::<...>` instead of `<...>` to specify type arguments";
+const TURBOFISH_SUGGESTION_STR: &str =
+    "use `::<...>` instead of `<...>` to specify type or const arguments";
 
 /// Creates a placeholder argument.
 pub(super) fn dummy_arg(ident: Ident) -> Param {
@@ -658,7 +660,7 @@ impl<'a> Parser<'a> {
                                 Ok(_) => {
                                     e.span_suggestion_verbose(
                                         binop.span.shrink_to_lo(),
-                                        "use `::<...>` instead of `<...>` to specify type arguments",
+                                        TURBOFISH_SUGGESTION_STR,
                                         "::".to_string(),
                                         Applicability::MaybeIncorrect,
                                     );
@@ -813,7 +815,7 @@ impl<'a> Parser<'a> {
                 let suggest = |err: &mut DiagnosticBuilder<'_>| {
                     err.span_suggestion_verbose(
                         op.span.shrink_to_lo(),
-                        TURBOFISH,
+                        TURBOFISH_SUGGESTION_STR,
                         "::".to_string(),
                         Applicability::MaybeIncorrect,
                     );
@@ -887,7 +889,7 @@ impl<'a> Parser<'a> {
                         {
                             // All we know is that this is `foo < bar >` and *nothing* else. Try to
                             // be helpful, but don't attempt to recover.
-                            err.help(TURBOFISH);
+                            err.help(TURBOFISH_SUGGESTION_STR);
                             err.help("or use `(...)` if you meant to specify fn arguments");
                         }
 
@@ -1556,14 +1558,6 @@ impl<'a> Parser<'a> {
         }
     }
 
-    pub(super) fn expected_semi_or_open_brace<T>(&mut self) -> PResult<'a, T> {
-        let token_str = super::token_descr(&self.token);
-        let msg = &format!("expected `;` or `{{`, found {}", token_str);
-        let mut err = self.struct_span_err(self.token.span, msg);
-        err.span_label(self.token.span, "expected `;` or `{`");
-        Err(err)
-    }
-
     pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) {
         if let token::DocComment(..) = self.token.kind {
             self.struct_span_err(
@@ -1780,4 +1774,142 @@ impl<'a> Parser<'a> {
             }
         }
     }
+
+    /// Handle encountering a symbol in a generic argument list that is not a `,` or `>`. In this
+    /// case, we emit an error and try to suggest enclosing a const argument in braces if it looks
+    /// like the user has forgotten them.
+    pub fn handle_ambiguous_unbraced_const_arg(
+        &mut self,
+        args: &mut Vec<AngleBracketedArg>,
+    ) -> PResult<'a, bool> {
+        // If we haven't encountered a closing `>`, then the argument is malformed.
+        // It's likely that the user has written a const expression without enclosing it
+        // in braces, so we try to recover here.
+        let arg = args.pop().unwrap();
+        // FIXME: for some reason using `unexpected` or `expected_one_of_not_found` has
+        // adverse side-effects to subsequent errors and seems to advance the parser.
+        // We are causing this error here exclusively in case that a `const` expression
+        // could be recovered from the current parser state, even if followed by more
+        // arguments after a comma.
+        let mut err = self.struct_span_err(
+            self.token.span,
+            &format!("expected one of `,` or `>`, found {}", super::token_descr(&self.token)),
+        );
+        err.span_label(self.token.span, "expected one of `,` or `>`");
+        match self.recover_const_arg(arg.span(), err) {
+            Ok(arg) => {
+                args.push(AngleBracketedArg::Arg(arg));
+                if self.eat(&token::Comma) {
+                    return Ok(true); // Continue
+                }
+            }
+            Err(mut err) => {
+                args.push(arg);
+                // We will emit a more generic error later.
+                err.delay_as_bug();
+            }
+        }
+        return Ok(false); // Don't continue.
+    }
+
+    /// Handle a generic const argument that had not been enclosed in braces, and suggest enclosing
+    /// it braces. In this situation, unlike in `handle_ambiguous_unbraced_const_arg`, this is
+    /// almost certainly a const argument, so we always offer a suggestion.
+    pub fn handle_unambiguous_unbraced_const_arg(&mut self) -> PResult<'a, P<Expr>> {
+        let start = self.token.span;
+        let expr = self.parse_expr_res(Restrictions::CONST_EXPR, None).map_err(|mut err| {
+            err.span_label(
+                start.shrink_to_lo(),
+                "while parsing a const generic argument starting here",
+            );
+            err
+        })?;
+        if !self.expr_is_valid_const_arg(&expr) {
+            self.struct_span_err(
+                expr.span,
+                "expressions must be enclosed in braces to be used as const generic \
+                    arguments",
+            )
+            .multipart_suggestion(
+                "enclose the `const` expression in braces",
+                vec![
+                    (expr.span.shrink_to_lo(), "{ ".to_string()),
+                    (expr.span.shrink_to_hi(), " }".to_string()),
+                ],
+                Applicability::MachineApplicable,
+            )
+            .emit();
+        }
+        Ok(expr)
+    }
+
+    /// Try to recover from possible generic const argument without `{` and `}`.
+    ///
+    /// When encountering code like `foo::< bar + 3 >` or `foo::< bar - baz >` we suggest
+    /// `foo::<{ bar + 3 }>` and `foo::<{ bar - baz }>`, respectively. We only provide a suggestion
+    /// if we think that that the resulting expression would be well formed.
+    pub fn recover_const_arg(
+        &mut self,
+        start: Span,
+        mut err: DiagnosticBuilder<'a>,
+    ) -> PResult<'a, GenericArg> {
+        let is_op = AssocOp::from_token(&self.token)
+            .and_then(|op| {
+                if let AssocOp::Greater
+                | AssocOp::Less
+                | AssocOp::ShiftRight
+                | AssocOp::GreaterEqual
+                // Don't recover from `foo::<bar = baz>`, because this could be an attempt to
+                // assign a value to a defaulted generic parameter.
+                | AssocOp::Assign
+                | AssocOp::AssignOp(_) = op
+                {
+                    None
+                } else {
+                    Some(op)
+                }
+            })
+            .is_some();
+        // This will be true when a trait object type `Foo +` or a path which was a `const fn` with
+        // type params has been parsed.
+        let was_op =
+            matches!(self.prev_token.kind, token::BinOp(token::Plus | token::Shr) | token::Gt);
+        if !is_op && !was_op {
+            // We perform these checks and early return to avoid taking a snapshot unnecessarily.
+            return Err(err);
+        }
+        let snapshot = self.clone();
+        if is_op {
+            self.bump();
+        }
+        match self.parse_expr_res(Restrictions::CONST_EXPR, None) {
+            Ok(expr) => {
+                if token::Comma == self.token.kind || self.token.kind.should_end_const_arg() {
+                    // Avoid the following output by checking that we consumed a full const arg:
+                    // help: expressions must be enclosed in braces to be used as const generic
+                    //       arguments
+                    //    |
+                    // LL |     let sr: Vec<{ (u32, _, _) = vec![] };
+                    //    |                 ^                      ^
+                    err.multipart_suggestion(
+                        "expressions must be enclosed in braces to be used as const generic \
+                         arguments",
+                        vec![
+                            (start.shrink_to_lo(), "{ ".to_string()),
+                            (expr.span.shrink_to_hi(), " }".to_string()),
+                        ],
+                        Applicability::MaybeIncorrect,
+                    );
+                    let value = self.mk_expr_err(start.to(expr.span));
+                    err.emit();
+                    return Ok(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value }));
+                }
+            }
+            Err(mut err) => {
+                err.cancel();
+            }
+        }
+        *self = snapshot;
+        Err(err)
+    }
 }
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index c44e00f861d..c2a13d4b0de 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -359,6 +359,18 @@ impl<'a> Parser<'a> {
     /// Also performs recovery for `and` / `or` which are mistaken for `&&` and `||` respectively.
     fn check_assoc_op(&self) -> Option<Spanned<AssocOp>> {
         let (op, span) = match (AssocOp::from_token(&self.token), self.token.ident()) {
+            // When parsing const expressions, stop parsing when encountering `>`.
+            (
+                Some(
+                    AssocOp::ShiftRight
+                    | AssocOp::Greater
+                    | AssocOp::GreaterEqual
+                    | AssocOp::AssignOp(token::BinOpToken::Shr),
+                ),
+                _,
+            ) if self.restrictions.contains(Restrictions::CONST_EXPR) => {
+                return None;
+            }
             (Some(op), _) => (op, self.token.span),
             (None, Some((Ident { name: sym::and, span }, false))) => {
                 self.error_bad_logical_op("and", "&&", "conjunction");
@@ -819,7 +831,7 @@ impl<'a> Parser<'a> {
         self.struct_span_err(self.token.span, &format!("unexpected token: `{}`", actual)).emit();
     }
 
-    // We need and identifier or integer, but the next token is a float.
+    // We need an identifier or integer, but the next token is a float.
     // Break the float into components to extract the identifier or integer.
     // FIXME: With current `TokenCursor` it's hard to break tokens into more than 2
     // parts unless those parts are processed immediately. `TokenCursor` should either
@@ -1715,7 +1727,7 @@ impl<'a> Parser<'a> {
         let lo = self.prev_token.span;
         let pat = self.parse_top_pat(GateOr::No)?;
         self.expect(&token::Eq)?;
-        let expr = self.with_res(Restrictions::NO_STRUCT_LITERAL, |this| {
+        let expr = self.with_res(self.restrictions | Restrictions::NO_STRUCT_LITERAL, |this| {
             this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into())
         })?;
         let span = lo.to(expr.span);
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index e57a2e42b5d..39d4875f37b 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -1538,7 +1538,7 @@ impl<'a> Parser<'a> {
         generics.where_clause = self.parse_where_clause()?; // `where T: Ord`
 
         let mut sig_hi = self.prev_token.span;
-        let body = self.parse_fn_body(attrs, &mut sig_hi)?; // `;` or `{ ... }`.
+        let body = self.parse_fn_body(attrs, &ident, &mut sig_hi)?; // `;` or `{ ... }`.
         let fn_sig_span = sig_lo.to(sig_hi);
         Ok((ident, FnSig { header, decl, span: fn_sig_span }, generics, body))
     }
@@ -1549,12 +1549,12 @@ impl<'a> Parser<'a> {
     fn parse_fn_body(
         &mut self,
         attrs: &mut Vec<Attribute>,
+        ident: &Ident,
         sig_hi: &mut Span,
     ) -> PResult<'a, Option<P<Block>>> {
-        let (inner_attrs, body) = if self.check(&token::Semi) {
+        let (inner_attrs, body) = if self.eat(&token::Semi) {
             // Include the trailing semicolon in the span of the signature
-            *sig_hi = self.token.span;
-            self.bump(); // `;`
+            *sig_hi = self.prev_token.span;
             (Vec::new(), None)
         } else if self.check(&token::OpenDelim(token::Brace)) || self.token.is_whole_block() {
             self.parse_inner_attrs_and_block().map(|(attrs, body)| (attrs, Some(body)))?
@@ -1574,7 +1574,21 @@ impl<'a> Parser<'a> {
                 .emit();
             (Vec::new(), Some(self.mk_block_err(span)))
         } else {
-            return self.expected_semi_or_open_brace();
+            if let Err(mut err) =
+                self.expected_one_of_not_found(&[], &[token::Semi, token::OpenDelim(token::Brace)])
+            {
+                if self.token.kind == token::CloseDelim(token::Brace) {
+                    // The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in
+                    // the AST for typechecking.
+                    err.span_label(ident.span, "while parsing this `fn`");
+                    err.emit();
+                    (Vec::new(), None)
+                } else {
+                    return Err(err);
+                }
+            } else {
+                unreachable!()
+            }
         };
         attrs.extend(inner_attrs);
         Ok(body)
@@ -1652,10 +1666,19 @@ impl<'a> Parser<'a> {
         req_name: ReqName,
         ret_allow_plus: AllowPlus,
     ) -> PResult<'a, P<FnDecl>> {
-        Ok(P(FnDecl {
-            inputs: self.parse_fn_params(req_name)?,
-            output: self.parse_ret_ty(ret_allow_plus, RecoverQPath::Yes)?,
-        }))
+        let inputs = self.parse_fn_params(req_name)?;
+        let output = self.parse_ret_ty(ret_allow_plus, RecoverQPath::Yes)?;
+
+        if let ast::FnRetTy::Ty(ty) = &output {
+            if let TyKind::Path(_, Path { segments, .. }) = &ty.kind {
+                if let [.., last] = &segments[..] {
+                    // Detect and recover `fn foo() -> Vec<i32>> {}`
+                    self.check_trailing_angle_brackets(last, &[&token::OpenDelim(token::Brace)]);
+                }
+            }
+        }
+
+        Ok(P(FnDecl { inputs, output }))
     }
 
     /// Parses the parameter list of a function, including the `(` and `)` delimiters.
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 175dd3fa53a..d99fcb0c4a1 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -36,6 +36,7 @@ bitflags::bitflags! {
     struct Restrictions: u8 {
         const STMT_EXPR         = 1 << 0;
         const NO_STRUCT_LITERAL = 1 << 1;
+        const CONST_EXPR        = 1 << 2;
     }
 }
 
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index 66ce015d02e..06760547eba 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -397,6 +397,13 @@ impl<'a> Parser<'a> {
         while let Some(arg) = self.parse_angle_arg()? {
             args.push(arg);
             if !self.eat(&token::Comma) {
+                if !self.token.kind.should_end_const_arg() {
+                    if self.handle_ambiguous_unbraced_const_arg(&mut args)? {
+                        // We've managed to (partially) recover, so continue trying to parse
+                        // arguments.
+                        continue;
+                    }
+                }
                 break;
             }
         }
@@ -476,41 +483,50 @@ impl<'a> Parser<'a> {
         Ok(self.mk_ty(span, ast::TyKind::Err))
     }
 
+    /// We do not permit arbitrary expressions as const arguments. They must be one of:
+    /// - An expression surrounded in `{}`.
+    /// - A literal.
+    /// - A numeric literal prefixed by `-`.
+    pub(super) fn expr_is_valid_const_arg(&self, expr: &P<rustc_ast::Expr>) -> bool {
+        match &expr.kind {
+            ast::ExprKind::Block(_, _) | ast::ExprKind::Lit(_) => true,
+            ast::ExprKind::Unary(ast::UnOp::Neg, expr) => match &expr.kind {
+                ast::ExprKind::Lit(_) => true,
+                _ => false,
+            },
+            _ => false,
+        }
+    }
+
     /// Parse a generic argument in a path segment.
     /// This does not include constraints, e.g., `Item = u8`, which is handled in `parse_angle_arg`.
     fn parse_generic_arg(&mut self) -> PResult<'a, Option<GenericArg>> {
+        let start = self.token.span;
         let arg = if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
             // Parse lifetime argument.
             GenericArg::Lifetime(self.expect_lifetime())
         } else if self.check_const_arg() {
             // Parse const argument.
-            let expr = if let token::OpenDelim(token::Brace) = self.token.kind {
+            let value = if let token::OpenDelim(token::Brace) = self.token.kind {
                 self.parse_block_expr(
                     None,
                     self.token.span,
                     BlockCheckMode::Default,
                     ast::AttrVec::new(),
                 )?
-            } else if self.token.is_ident() {
-                // FIXME(const_generics): to distinguish between idents for types and consts,
-                // we should introduce a GenericArg::Ident in the AST and distinguish when
-                // lowering to the HIR. For now, idents for const args are not permitted.
-                if self.token.is_bool_lit() {
-                    self.parse_literal_maybe_minus()?
-                } else {
-                    let span = self.token.span;
-                    let msg = "identifiers may currently not be used for const generics";
-                    self.struct_span_err(span, msg).emit();
-                    let block = self.mk_block_err(span);
-                    self.mk_expr(span, ast::ExprKind::Block(block, None), ast::AttrVec::new())
-                }
             } else {
-                self.parse_literal_maybe_minus()?
+                self.handle_unambiguous_unbraced_const_arg()?
             };
-            GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value: expr })
+            GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value })
         } else if self.check_type() {
             // Parse type argument.
-            GenericArg::Type(self.parse_ty()?)
+            match self.parse_ty() {
+                Ok(ty) => GenericArg::Type(ty),
+                Err(err) => {
+                    // Try to recover from possible `const` arg without braces.
+                    return self.recover_const_arg(start, err).map(Some);
+                }
+            }
         } else {
             return Ok(None);
         };
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index d42a786a18f..7a6ebca4e15 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -265,7 +265,19 @@ impl<'a> Parser<'a> {
     /// Parses an array (`[TYPE; EXPR]`) or slice (`[TYPE]`) type.
     /// The opening `[` bracket is already eaten.
     fn parse_array_or_slice_ty(&mut self) -> PResult<'a, TyKind> {
-        let elt_ty = self.parse_ty()?;
+        let elt_ty = match self.parse_ty() {
+            Ok(ty) => ty,
+            Err(mut err)
+                if self.look_ahead(1, |t| t.kind == token::CloseDelim(token::Bracket))
+                    | self.look_ahead(1, |t| t.kind == token::Semi) =>
+            {
+                // Recover from `[LIT; EXPR]` and `[LIT]`
+                self.bump();
+                err.emit();
+                self.mk_ty(self.prev_token.span, TyKind::Err)
+            }
+            Err(err) => return Err(err),
+        };
         let ty = if self.eat(&token::Semi) {
             TyKind::Array(elt_ty, self.parse_anon_const_expr()?)
         } else {
diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs
index 7517ab66170..d323aebe597 100644
--- a/compiler/rustc_resolve/src/late.rs
+++ b/compiler/rustc_resolve/src/late.rs
@@ -57,6 +57,12 @@ enum PatternSource {
     FnParam,
 }
 
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+enum IsRepeatExpr {
+    No,
+    Yes,
+}
+
 impl PatternSource {
     fn descr(self) -> &'static str {
         match self {
@@ -437,10 +443,8 @@ impl<'a: 'ast, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> {
         self.resolve_block(block);
     }
     fn visit_anon_const(&mut self, constant: &'ast AnonConst) {
-        debug!("visit_anon_const {:?}", constant);
-        self.with_constant_rib(constant.value.is_potential_trivial_const_param(), |this| {
-            visit::walk_anon_const(this, constant);
-        });
+        // We deal with repeat expressions explicitly in `resolve_expr`.
+        self.resolve_anon_const(constant, IsRepeatExpr::No);
     }
     fn visit_expr(&mut self, expr: &'ast Expr) {
         self.resolve_expr(expr, None);
@@ -647,7 +651,11 @@ impl<'a: 'ast, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> {
                         if !check_ns(TypeNS) && check_ns(ValueNS) {
                             // This must be equivalent to `visit_anon_const`, but we cannot call it
                             // directly due to visitor lifetimes so we have to copy-paste some code.
-                            self.with_constant_rib(true, |this| {
+                            //
+                            // Note that we might not be inside of an repeat expression here,
+                            // but considering that `IsRepeatExpr` is only relevant for
+                            // non-trivial constants this is doesn't matter.
+                            self.with_constant_rib(IsRepeatExpr::No, true, |this| {
                                 this.smart_resolve_path(
                                     ty.id,
                                     qself.as_ref(),
@@ -980,9 +988,11 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
                                             //
                                             // Type parameters can already be used and as associated consts are
                                             // not used as part of the type system, this is far less surprising.
-                                            this.with_constant_rib(true, |this| {
-                                                this.visit_expr(expr)
-                                            });
+                                            this.with_constant_rib(
+                                                IsRepeatExpr::No,
+                                                true,
+                                                |this| this.visit_expr(expr),
+                                            );
                                         }
                                     }
                                     AssocItemKind::Fn(_, _, generics, _) => {
@@ -1023,7 +1033,9 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
                 self.with_item_rib(HasGenericParams::No, |this| {
                     this.visit_ty(ty);
                     if let Some(expr) = expr {
-                        this.with_constant_rib(expr.is_potential_trivial_const_param(), |this| {
+                        // We already forbid generic params because of the above item rib,
+                        // so it doesn't matter whether this is a trivial constant.
+                        this.with_constant_rib(IsRepeatExpr::No, true, |this| {
                             this.visit_expr(expr)
                         });
                     }
@@ -1122,12 +1134,29 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
         self.with_rib(ValueNS, kind, |this| this.with_rib(TypeNS, kind, f))
     }
 
-    fn with_constant_rib(&mut self, trivial: bool, f: impl FnOnce(&mut Self)) {
-        debug!("with_constant_rib");
-        self.with_rib(ValueNS, ConstantItemRibKind(trivial), |this| {
-            this.with_rib(TypeNS, ConstantItemRibKind(trivial), |this| {
-                this.with_label_rib(ConstantItemRibKind(trivial), f);
-            })
+    // HACK(min_const_generics,const_evaluatable_unchecked): We
+    // want to keep allowing `[0; std::mem::size_of::<*mut T>()]`
+    // with a future compat lint for now. We do this by adding an
+    // additional special case for repeat expressions.
+    //
+    // Note that we intentionally still forbid `[0; N + 1]` during
+    // name resolution so that we don't extend the future
+    // compat lint to new cases.
+    fn with_constant_rib(
+        &mut self,
+        is_repeat: IsRepeatExpr,
+        is_trivial: bool,
+        f: impl FnOnce(&mut Self),
+    ) {
+        debug!("with_constant_rib: is_repeat={:?} is_trivial={}", is_repeat, is_trivial);
+        self.with_rib(ValueNS, ConstantItemRibKind(is_trivial), |this| {
+            this.with_rib(
+                TypeNS,
+                ConstantItemRibKind(is_repeat == IsRepeatExpr::Yes || is_trivial),
+                |this| {
+                    this.with_label_rib(ConstantItemRibKind(is_trivial), f);
+                },
+            )
         });
     }
 
@@ -1272,9 +1301,17 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
                                             //
                                             // Type parameters can already be used and as associated consts are
                                             // not used as part of the type system, this is far less surprising.
-                                            this.with_constant_rib(true, |this| {
-                                                visit::walk_assoc_item(this, item, AssocCtxt::Impl)
-                                            });
+                                            this.with_constant_rib(
+                                                IsRepeatExpr::No,
+                                                true,
+                                                |this| {
+                                                    visit::walk_assoc_item(
+                                                        this,
+                                                        item,
+                                                        AssocCtxt::Impl,
+                                                    )
+                                                },
+                                            );
                                         }
                                         AssocItemKind::Fn(_, _, generics, _) => {
                                             // We also need a new scope for the impl item type parameters.
@@ -2199,6 +2236,17 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
         debug!("(resolving block) leaving block");
     }
 
+    fn resolve_anon_const(&mut self, constant: &'ast AnonConst, is_repeat: IsRepeatExpr) {
+        debug!("resolve_anon_const {:?} is_repeat: {:?}", constant, is_repeat);
+        self.with_constant_rib(
+            is_repeat,
+            constant.value.is_potential_trivial_const_param(),
+            |this| {
+                visit::walk_anon_const(this, constant);
+            },
+        );
+    }
+
     fn resolve_expr(&mut self, expr: &'ast Expr, parent: Option<&'ast Expr>) {
         // First, record candidate traits for this expression if it could
         // result in the invocation of a method call.
@@ -2322,6 +2370,10 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
             ExprKind::Async(..) | ExprKind::Closure(..) => {
                 self.with_label_rib(ClosureOrAsyncRibKind, |this| visit::walk_expr(this, expr));
             }
+            ExprKind::Repeat(ref elem, ref ct) => {
+                self.visit_expr(elem);
+                self.resolve_anon_const(ct, IsRepeatExpr::Yes);
+            }
             _ => {
                 visit::walk_expr(self, expr);
             }
diff --git a/compiler/rustc_resolve/src/late/diagnostics.rs b/compiler/rustc_resolve/src/late/diagnostics.rs
index c24b383f3b8..cafceff4f29 100644
--- a/compiler/rustc_resolve/src/late/diagnostics.rs
+++ b/compiler/rustc_resolve/src/late/diagnostics.rs
@@ -917,54 +917,71 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
                 self.suggest_using_enum_variant(err, source, def_id, span);
             }
             (Res::Def(DefKind::Struct, def_id), _) if ns == ValueNS => {
-                if let Some((ctor_def, ctor_vis, fields)) =
-                    self.r.struct_constructors.get(&def_id).cloned()
+                let (ctor_def, ctor_vis, fields) =
+                    if let Some(struct_ctor) = self.r.struct_constructors.get(&def_id).cloned() {
+                        struct_ctor
+                    } else {
+                        bad_struct_syntax_suggestion(def_id);
+                        return true;
+                    };
+
+                let is_accessible = self.r.is_accessible_from(ctor_vis, self.parent_scope.module);
+                if !is_expected(ctor_def) || is_accessible {
+                    return true;
+                }
+
+                let field_spans = match source {
+                    // e.g. `if let Enum::TupleVariant(field1, field2) = _`
+                    PathSource::TupleStruct(_, pattern_spans) => {
+                        err.set_primary_message(
+                            "cannot match against a tuple struct which contains private fields",
+                        );
+
+                        // Use spans of the tuple struct pattern.
+                        Some(Vec::from(pattern_spans))
+                    }
+                    // e.g. `let _ = Enum::TupleVariant(field1, field2);`
+                    _ if source.is_call() => {
+                        err.set_primary_message(
+                            "cannot initialize a tuple struct which contains private fields",
+                        );
+
+                        // Use spans of the tuple struct definition.
+                        self.r
+                            .field_names
+                            .get(&def_id)
+                            .map(|fields| fields.iter().map(|f| f.span).collect::<Vec<_>>())
+                    }
+                    _ => None,
+                };
+
+                if let Some(spans) =
+                    field_spans.filter(|spans| spans.len() > 0 && fields.len() == spans.len())
                 {
-                    let accessible_ctor =
-                        self.r.is_accessible_from(ctor_vis, self.parent_scope.module);
-                    if is_expected(ctor_def) && !accessible_ctor {
-                        let mut better_diag = false;
-                        if let PathSource::TupleStruct(_, pattern_spans) = source {
-                            if pattern_spans.len() > 0 && fields.len() == pattern_spans.len() {
-                                let non_visible_spans: Vec<Span> = fields
-                                    .iter()
-                                    .zip(pattern_spans.iter())
-                                    .filter_map(|(vis, span)| {
-                                        match self
-                                            .r
-                                            .is_accessible_from(*vis, self.parent_scope.module)
-                                        {
-                                            true => None,
-                                            false => Some(*span),
-                                        }
-                                    })
-                                    .collect();
-                                // Extra check to be sure
-                                if non_visible_spans.len() > 0 {
-                                    let mut m: rustc_span::MultiSpan =
-                                        non_visible_spans.clone().into();
-                                    non_visible_spans.into_iter().for_each(|s| {
-                                        m.push_span_label(s, "private field".to_string())
-                                    });
-                                    err.span_note(
-                                        m,
-                                        "constructor is not visible here due to private fields",
-                                    );
-                                    better_diag = true;
-                                }
-                            }
-                        }
+                    let non_visible_spans: Vec<Span> = fields
+                        .iter()
+                        .zip(spans.iter())
+                        .filter(|(vis, _)| {
+                            !self.r.is_accessible_from(**vis, self.parent_scope.module)
+                        })
+                        .map(|(_, span)| *span)
+                        .collect();
 
-                        if !better_diag {
-                            err.span_label(
-                                span,
-                                "constructor is not visible here due to private fields".to_string(),
-                            );
-                        }
+                    if non_visible_spans.len() > 0 {
+                        let mut m: rustc_span::MultiSpan = non_visible_spans.clone().into();
+                        non_visible_spans
+                            .into_iter()
+                            .for_each(|s| m.push_span_label(s, "private field".to_string()));
+                        err.span_note(m, "constructor is not visible here due to private fields");
                     }
-                } else {
-                    bad_struct_syntax_suggestion(def_id);
+
+                    return true;
                 }
+
+                err.span_label(
+                    span,
+                    "constructor is not visible here due to private fields".to_string(),
+                );
             }
             (
                 Res::Def(
diff --git a/compiler/rustc_session/src/filesearch.rs b/compiler/rustc_session/src/filesearch.rs
index 12a268d5b1d..55ee4e52082 100644
--- a/compiler/rustc_session/src/filesearch.rs
+++ b/compiler/rustc_session/src/filesearch.rs
@@ -153,14 +153,14 @@ fn find_libdir(sysroot: &Path) -> Cow<'static, str> {
     const SECONDARY_LIB_DIR: &str = "lib";
 
     match option_env!("CFG_LIBDIR_RELATIVE") {
-        Some(libdir) if libdir != "lib" => libdir.into(),
-        _ => {
+        None | Some("lib") => {
             if sysroot.join(PRIMARY_LIB_DIR).join(RUST_LIB_DIR).exists() {
                 PRIMARY_LIB_DIR.into()
             } else {
                 SECONDARY_LIB_DIR.into()
             }
         }
+        Some(libdir) => libdir.into(),
     }
 }
 
diff --git a/compiler/rustc_session/src/lint/builtin.rs b/compiler/rustc_session/src/lint/builtin.rs
index ab56a0a5667..b8826a548b8 100644
--- a/compiler/rustc_session/src/lint/builtin.rs
+++ b/compiler/rustc_session/src/lint/builtin.rs
@@ -2648,6 +2648,36 @@ declare_lint! {
 }
 
 declare_lint! {
+    /// The `function_item_references` lint detects function references that are
+    /// formatted with [`fmt::Pointer`] or transmuted.
+    ///
+    /// [`fmt::Pointer`]: https://doc.rust-lang.org/std/fmt/trait.Pointer.html
+    ///
+    /// ### Example
+    ///
+    /// ```rust
+    /// fn foo() { }
+    ///
+    /// fn main() {
+    ///     println!("{:p}", &foo);
+    /// }
+    /// ```
+    ///
+    /// {{produces}}
+    ///
+    /// ### Explanation
+    ///
+    /// Taking a reference to a function may be mistaken as a way to obtain a
+    /// pointer to that function. This can give unexpected results when
+    /// formatting the reference as a pointer or transmuting it. This lint is
+    /// issued when function references are formatted as pointers, passed as
+    /// arguments bound by [`fmt::Pointer`] or transmuted.
+    pub FUNCTION_ITEM_REFERENCES,
+    Warn,
+    "suggest casting to a function pointer when attempting to take references to function items",
+}
+
+declare_lint! {
     /// The `uninhabited_static` lint detects uninhabited statics.
     ///
     /// ### Example
@@ -2762,6 +2792,7 @@ declare_lint_pass! {
         CONST_EVALUATABLE_UNCHECKED,
         INEFFECTIVE_UNSTABLE_TRAIT_IMPL,
         UNINHABITED_STATIC,
+        FUNCTION_ITEM_REFERENCES,
     ]
 }
 
diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs
index 627adcceb3f..750f2e19ee2 100644
--- a/compiler/rustc_session/src/options.rs
+++ b/compiler/rustc_session/src/options.rs
@@ -717,7 +717,7 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options,
     // This list is in alphabetical order.
     //
     // If you add a new option, please update:
-    // - src/librustc_interface/tests.rs
+    // - compiler/rustc_interface/src/tests.rs
     // - src/doc/rustc/src/codegen-options/index.md
 
     ar: String = (String::new(), parse_string, [UNTRACKED],
@@ -814,7 +814,7 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options,
     // This list is in alphabetical order.
     //
     // If you add a new option, please update:
-    // - src/librustc_interface/tests.rs
+    // - compiler/rustc_interface/src/tests.rs
     // - src/doc/rustc/src/codegen-options/index.md
 }
 
@@ -825,7 +825,7 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
     // This list is in alphabetical order.
     //
     // If you add a new option, please update:
-    // - src/librustc_interface/tests.rs
+    // - compiler/rustc_interface/src/tests.rs
 
     allow_features: Option<Vec<String>> = (None, parse_opt_comma_list, [TRACKED],
         "only allow the listed language features to be enabled in code (space separated)"),
@@ -904,6 +904,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
         "force all crates to be `rustc_private` unstable (default: no)"),
     fuel: Option<(String, u64)> = (None, parse_optimization_fuel, [TRACKED],
         "set the optimization fuel quota for a crate"),
+    function_sections: Option<bool> = (None, parse_opt_bool, [TRACKED],
+        "whether each function should go in its own section"),
     graphviz_dark_mode: bool = (false, parse_bool, [UNTRACKED],
         "use dark-themed colors in graphviz output (default: no)"),
     graphviz_font: String = ("Courier, monospace".to_string(), parse_string, [UNTRACKED],
diff --git a/compiler/rustc_span/src/caching_source_map_view.rs b/compiler/rustc_span/src/caching_source_map_view.rs
index 68b0bd1a574..15dd00fb483 100644
--- a/compiler/rustc_span/src/caching_source_map_view.rs
+++ b/compiler/rustc_span/src/caching_source_map_view.rs
@@ -1,13 +1,25 @@
 use crate::source_map::SourceMap;
 use crate::{BytePos, SourceFile};
 use rustc_data_structures::sync::Lrc;
+use std::ops::Range;
 
 #[derive(Clone)]
 struct CacheEntry {
     time_stamp: usize,
     line_number: usize,
-    line_start: BytePos,
-    line_end: BytePos,
+    // The line's byte position range in the `SourceMap`. This range will fail to contain a valid
+    // position in certain edge cases. Spans often start/end one past something, and when that
+    // something is the last character of a file (this can happen when a file doesn't end in a
+    // newline, for example), we'd still like for the position to be considered within the last
+    // line. However, it isn't according to the exclusive upper bound of this range. We cannot
+    // change the upper bound to be inclusive, because for most lines, the upper bound is the same
+    // as the lower bound of the next line, so there would be an ambiguity.
+    //
+    // Since the containment aspect of this range is only used to see whether or not the cache
+    // entry contains a position, the only ramification of the above is that we will get cache
+    // misses for these rare positions. A line lookup for the position via `SourceMap::lookup_line`
+    // after a cache miss will produce the last line number, as desired.
+    line: Range<BytePos>,
     file: Lrc<SourceFile>,
     file_index: usize,
 }
@@ -26,8 +38,7 @@ impl<'sm> CachingSourceMapView<'sm> {
         let entry = CacheEntry {
             time_stamp: 0,
             line_number: 0,
-            line_start: BytePos(0),
-            line_end: BytePos(0),
+            line: BytePos(0)..BytePos(0),
             file: first_file,
             file_index: 0,
         };
@@ -47,13 +58,13 @@ impl<'sm> CachingSourceMapView<'sm> {
 
         // Check if the position is in one of the cached lines
         for cache_entry in self.line_cache.iter_mut() {
-            if pos >= cache_entry.line_start && pos < cache_entry.line_end {
+            if cache_entry.line.contains(&pos) {
                 cache_entry.time_stamp = self.time_stamp;
 
                 return Some((
                     cache_entry.file.clone(),
                     cache_entry.line_number,
-                    pos - cache_entry.line_start,
+                    pos - cache_entry.line.start,
                 ));
             }
         }
@@ -69,13 +80,13 @@ impl<'sm> CachingSourceMapView<'sm> {
         let cache_entry = &mut self.line_cache[oldest];
 
         // If the entry doesn't point to the correct file, fix it up
-        if pos < cache_entry.file.start_pos || pos >= cache_entry.file.end_pos {
+        if !file_contains(&cache_entry.file, pos) {
             let file_valid;
             if self.source_map.files().len() > 0 {
                 let file_index = self.source_map.lookup_source_file_idx(pos);
                 let file = self.source_map.files()[file_index].clone();
 
-                if pos >= file.start_pos && pos < file.end_pos {
+                if file_contains(&file, pos) {
                     cache_entry.file = file;
                     cache_entry.file_index = file_index;
                     file_valid = true;
@@ -95,10 +106,19 @@ impl<'sm> CachingSourceMapView<'sm> {
         let line_bounds = cache_entry.file.line_bounds(line_index);
 
         cache_entry.line_number = line_index + 1;
-        cache_entry.line_start = line_bounds.0;
-        cache_entry.line_end = line_bounds.1;
+        cache_entry.line = line_bounds;
         cache_entry.time_stamp = self.time_stamp;
 
-        Some((cache_entry.file.clone(), cache_entry.line_number, pos - cache_entry.line_start))
+        Some((cache_entry.file.clone(), cache_entry.line_number, pos - cache_entry.line.start))
     }
 }
+
+#[inline]
+fn file_contains(file: &SourceFile, pos: BytePos) -> bool {
+    // `SourceMap::lookup_source_file_idx` and `SourceFile::contains` both consider the position
+    // one past the end of a file to belong to it. Normally, that's what we want. But for the
+    // purposes of converting a byte position to a line and column number, we can't come up with a
+    // line and column number if the file is empty, because an empty file doesn't contain any
+    // lines. So for our purposes, we don't consider empty files to contain any byte position.
+    file.contains(pos) && !file.is_empty()
+}
diff --git a/compiler/rustc_span/src/lib.rs b/compiler/rustc_span/src/lib.rs
index d036c078049..0e3027273ab 100644
--- a/compiler/rustc_span/src/lib.rs
+++ b/compiler/rustc_span/src/lib.rs
@@ -52,7 +52,7 @@ use std::cell::RefCell;
 use std::cmp::{self, Ordering};
 use std::fmt;
 use std::hash::Hash;
-use std::ops::{Add, Sub};
+use std::ops::{Add, Range, Sub};
 use std::path::{Path, PathBuf};
 use std::str::FromStr;
 
@@ -738,14 +738,14 @@ impl<D: Decoder> Decodable<D> for Span {
 }
 
 /// Calls the provided closure, using the provided `SourceMap` to format
-/// any spans that are debug-printed during the closure'e exectuino.
+/// any spans that are debug-printed during the closure's execution.
 ///
 /// Normally, the global `TyCtxt` is used to retrieve the `SourceMap`
 /// (see `rustc_interface::callbacks::span_debug1). However, some parts
 /// of the compiler (e.g. `rustc_parse`) may debug-print `Span`s before
 /// a `TyCtxt` is available. In this case, we fall back to
 /// the `SourceMap` provided to this function. If that is not available,
-/// we fall back to printing the raw `Span` field values
+/// we fall back to printing the raw `Span` field values.
 pub fn with_source_map<T, F: FnOnce() -> T>(source_map: Lrc<SourceMap>, f: F) -> T {
     SESSION_GLOBALS.with(|session_globals| {
         *session_globals.source_map.borrow_mut() = Some(source_map);
@@ -1426,24 +1426,33 @@ impl SourceFile {
         if line_index >= 0 { Some(line_index as usize) } else { None }
     }
 
-    pub fn line_bounds(&self, line_index: usize) -> (BytePos, BytePos) {
-        if self.start_pos == self.end_pos {
-            return (self.start_pos, self.end_pos);
+    pub fn line_bounds(&self, line_index: usize) -> Range<BytePos> {
+        if self.is_empty() {
+            return self.start_pos..self.end_pos;
         }
 
         assert!(line_index < self.lines.len());
         if line_index == (self.lines.len() - 1) {
-            (self.lines[line_index], self.end_pos)
+            self.lines[line_index]..self.end_pos
         } else {
-            (self.lines[line_index], self.lines[line_index + 1])
+            self.lines[line_index]..self.lines[line_index + 1]
         }
     }
 
+    /// Returns whether or not the file contains the given `SourceMap` byte
+    /// position. The position one past the end of the file is considered to be
+    /// contained by the file. This implies that files for which `is_empty`
+    /// returns true still contain one byte position according to this function.
     #[inline]
     pub fn contains(&self, byte_pos: BytePos) -> bool {
         byte_pos >= self.start_pos && byte_pos <= self.end_pos
     }
 
+    #[inline]
+    pub fn is_empty(&self) -> bool {
+        self.start_pos == self.end_pos
+    }
+
     /// Calculates the original byte position relative to the start of the file
     /// based on the given byte position.
     pub fn original_relative_byte_pos(&self, pos: BytePos) -> BytePos {
@@ -1925,9 +1934,7 @@ impl<CTX: HashStableContext> HashStable<CTX> for ExpnId {
             return;
         }
 
-        TAG_NOT_ROOT.hash_stable(ctx, hasher);
         let index = self.as_u32() as usize;
-
         let res = CACHE.with(|cache| cache.borrow().get(index).copied().flatten());
 
         if let Some(res) = res {
@@ -1936,6 +1943,7 @@ impl<CTX: HashStableContext> HashStable<CTX> for ExpnId {
             let new_len = index + 1;
 
             let mut sub_hasher = StableHasher::new();
+            TAG_NOT_ROOT.hash_stable(ctx, &mut sub_hasher);
             self.expn_data().hash_stable(ctx, &mut sub_hasher);
             let sub_hash: Fingerprint = sub_hasher.finish();
 
diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs
index 080afdcd2c0..1a6c45b6c80 100644
--- a/compiler/rustc_span/src/symbol.rs
+++ b/compiler/rustc_span/src/symbol.rs
@@ -127,6 +127,7 @@ symbols! {
         ArgumentV1,
         Arguments,
         C,
+        CString,
         Center,
         Clone,
         Copy,
@@ -261,6 +262,7 @@ symbols! {
         arm_target_feature,
         array,
         arrays,
+        as_ptr,
         as_str,
         asm,
         assert,
@@ -310,6 +312,7 @@ symbols! {
         breakpoint,
         bridge,
         bswap,
+        c_str,
         c_variadic,
         call,
         call_mut,
@@ -397,6 +400,7 @@ symbols! {
         crate_type,
         crate_visibility_modifier,
         crt_dash_static: "crt-static",
+        cstring_type,
         ctlz,
         ctlz_nonzero,
         ctpop,
@@ -478,6 +482,7 @@ symbols! {
         existential_type,
         exp2f32,
         exp2f64,
+        expect,
         expected,
         expf32,
         expf64,
@@ -501,6 +506,7 @@ symbols! {
         fadd_fast,
         fdiv_fast,
         feature,
+        ffi,
         ffi_const,
         ffi_pure,
         ffi_returns_twice,
@@ -796,6 +802,8 @@ symbols! {
         plugin_registrar,
         plugins,
         pointer,
+        pointer_trait,
+        pointer_trait_fmt,
         poll,
         position,
         post_dash_lto: "post-lto",
@@ -1161,6 +1169,7 @@ symbols! {
         unsafe_cell,
         unsafe_no_drop_flag,
         unsize,
+        unsized_fn_params,
         unsized_locals,
         unsized_tuple_coercion,
         unstable,
@@ -1168,6 +1177,7 @@ symbols! {
         unused_qualifications,
         unwind,
         unwind_attributes,
+        unwrap,
         unwrap_or,
         use_extern_macros,
         use_nested_groups,
diff --git a/compiler/rustc_trait_selection/src/traits/const_evaluatable.rs b/compiler/rustc_trait_selection/src/traits/const_evaluatable.rs
index c79b2624f8c..5f6d8ac751e 100644
--- a/compiler/rustc_trait_selection/src/traits/const_evaluatable.rs
+++ b/compiler/rustc_trait_selection/src/traits/const_evaluatable.rs
@@ -85,8 +85,10 @@ pub fn is_const_evaluatable<'cx, 'tcx>(
                         } else if leaf.has_param_types_or_consts() {
                             failure_kind = cmp::min(failure_kind, FailureKind::MentionsParam);
                         }
+
+                        false
                     }
-                    Node::Binop(_, _, _) | Node::UnaryOp(_, _) | Node::FunctionCall(_, _) => (),
+                    Node::Binop(_, _, _) | Node::UnaryOp(_, _) | Node::FunctionCall(_, _) => false,
                 });
 
                 match failure_kind {
@@ -194,12 +196,12 @@ pub fn is_const_evaluatable<'cx, 'tcx>(
 ///
 /// This is only able to represent a subset of `MIR`,
 /// and should not leak any information about desugarings.
-#[derive(Clone, Copy)]
+#[derive(Debug, Clone, Copy)]
 pub struct AbstractConst<'tcx> {
     // FIXME: Consider adding something like `IndexSlice`
     // and use this here.
-    inner: &'tcx [Node<'tcx>],
-    substs: SubstsRef<'tcx>,
+    pub inner: &'tcx [Node<'tcx>],
+    pub substs: SubstsRef<'tcx>,
 }
 
 impl AbstractConst<'tcx> {
@@ -209,9 +211,21 @@ impl AbstractConst<'tcx> {
         substs: SubstsRef<'tcx>,
     ) -> Result<Option<AbstractConst<'tcx>>, ErrorReported> {
         let inner = tcx.mir_abstract_const_opt_const_arg(def)?;
+        debug!("AbstractConst::new({:?}) = {:?}", def, inner);
         Ok(inner.map(|inner| AbstractConst { inner, substs }))
     }
 
+    pub fn from_const(
+        tcx: TyCtxt<'tcx>,
+        ct: &ty::Const<'tcx>,
+    ) -> Result<Option<AbstractConst<'tcx>>, ErrorReported> {
+        match ct.val {
+            ty::ConstKind::Unevaluated(def, substs, None) => AbstractConst::new(tcx, def, substs),
+            ty::ConstKind::Error(_) => Err(ErrorReported),
+            _ => Ok(None),
+        }
+    }
+
     #[inline]
     pub fn subtree(self, node: NodeId) -> AbstractConst<'tcx> {
         AbstractConst { inner: &self.inner[..=node.index()], substs: self.substs }
@@ -550,31 +564,32 @@ pub(super) fn try_unify_abstract_consts<'tcx>(
     // on `ErrorReported`.
 }
 
-fn walk_abstract_const<'tcx, F>(tcx: TyCtxt<'tcx>, ct: AbstractConst<'tcx>, mut f: F)
+// FIXME: Use `std::ops::ControlFlow` instead of `bool` here.
+pub fn walk_abstract_const<'tcx, F>(tcx: TyCtxt<'tcx>, ct: AbstractConst<'tcx>, mut f: F) -> bool
 where
-    F: FnMut(Node<'tcx>),
+    F: FnMut(Node<'tcx>) -> bool,
 {
-    recurse(tcx, ct, &mut f);
-    fn recurse<'tcx>(tcx: TyCtxt<'tcx>, ct: AbstractConst<'tcx>, f: &mut dyn FnMut(Node<'tcx>)) {
+    fn recurse<'tcx>(
+        tcx: TyCtxt<'tcx>,
+        ct: AbstractConst<'tcx>,
+        f: &mut dyn FnMut(Node<'tcx>) -> bool,
+    ) -> bool {
         let root = ct.root();
-        f(root);
-        match root {
-            Node::Leaf(_) => (),
-            Node::Binop(_, l, r) => {
-                recurse(tcx, ct.subtree(l), f);
-                recurse(tcx, ct.subtree(r), f);
-            }
-            Node::UnaryOp(_, v) => {
-                recurse(tcx, ct.subtree(v), f);
-            }
-            Node::FunctionCall(func, args) => {
-                recurse(tcx, ct.subtree(func), f);
-                for &arg in args {
-                    recurse(tcx, ct.subtree(arg), f);
+        f(root)
+            || match root {
+                Node::Leaf(_) => false,
+                Node::Binop(_, l, r) => {
+                    recurse(tcx, ct.subtree(l), f) || recurse(tcx, ct.subtree(r), f)
+                }
+                Node::UnaryOp(_, v) => recurse(tcx, ct.subtree(v), f),
+                Node::FunctionCall(func, args) => {
+                    recurse(tcx, ct.subtree(func), f)
+                        || args.iter().any(|&arg| recurse(tcx, ct.subtree(arg), f))
                 }
             }
-        }
     }
+
+    recurse(tcx, ct, &mut f)
 }
 
 /// Tries to unify two abstract constants using structural equality.
diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs
index fa837e04db3..c0881befe24 100644
--- a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs
+++ b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs
@@ -1845,9 +1845,9 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
                     err.note("all function arguments must have a statically known size");
                 }
                 if tcx.sess.opts.unstable_features.is_nightly_build()
-                    && !self.tcx.features().unsized_locals
+                    && !self.tcx.features().unsized_fn_params
                 {
-                    err.help("unsized locals are gated as an unstable feature");
+                    err.help("unsized fn params are gated as an unstable feature");
                 }
             }
             ObligationCauseCode::SizedReturnType => {
diff --git a/compiler/rustc_trait_selection/src/traits/object_safety.rs b/compiler/rustc_trait_selection/src/traits/object_safety.rs
index d1647e686a8..c52fd0b5786 100644
--- a/compiler/rustc_trait_selection/src/traits/object_safety.rs
+++ b/compiler/rustc_trait_selection/src/traits/object_safety.rs
@@ -11,6 +11,7 @@
 use super::elaborate_predicates;
 
 use crate::infer::TyCtxtInferExt;
+use crate::traits::const_evaluatable::{self, AbstractConst};
 use crate::traits::query::evaluate_obligation::InferCtxtExt;
 use crate::traits::{self, Obligation, ObligationCause};
 use rustc_errors::FatalError;
@@ -249,7 +250,7 @@ fn predicates_reference_self(
     predicates
         .predicates
         .iter()
-        .map(|(predicate, sp)| (predicate.subst_supertrait(tcx, &trait_ref), *sp))
+        .map(|&(predicate, sp)| (predicate.subst_supertrait(tcx, &trait_ref), sp))
         .filter_map(|predicate| predicate_references_self(tcx, predicate))
         .collect()
 }
@@ -260,7 +261,7 @@ fn bounds_reference_self(tcx: TyCtxt<'_>, trait_def_id: DefId) -> SmallVec<[Span
         .in_definition_order()
         .filter(|item| item.kind == ty::AssocKind::Type)
         .flat_map(|item| tcx.explicit_item_bounds(item.def_id))
-        .map(|(predicate, sp)| (predicate.subst_supertrait(tcx, &trait_ref), *sp))
+        .map(|&(predicate, sp)| (predicate.subst_supertrait(tcx, &trait_ref), sp))
         .filter_map(|predicate| predicate_references_self(tcx, predicate))
         .collect()
 }
@@ -415,7 +416,7 @@ fn virtual_call_violation_for_method<'tcx>(
         ));
     }
 
-    for (i, input_ty) in sig.skip_binder().inputs()[1..].iter().enumerate() {
+    for (i, &input_ty) in sig.skip_binder().inputs()[1..].iter().enumerate() {
         if contains_illegal_self_type_reference(tcx, trait_def_id, input_ty) {
             return Some(MethodViolationCode::ReferencesSelfInput(i));
         }
@@ -438,10 +439,7 @@ fn virtual_call_violation_for_method<'tcx>(
         // so outlives predicates will always hold.
         .cloned()
         .filter(|(p, _)| p.to_opt_type_outlives().is_none())
-        .collect::<Vec<_>>()
-        // Do a shallow visit so that `contains_illegal_self_type_reference`
-        // may apply it's custom visiting.
-        .visit_tys_shallow(|t| contains_illegal_self_type_reference(tcx, trait_def_id, t))
+        .any(|pred| contains_illegal_self_type_reference(tcx, trait_def_id, pred))
     {
         return Some(MethodViolationCode::WhereClauseReferencesSelf);
     }
@@ -463,10 +461,17 @@ fn virtual_call_violation_for_method<'tcx>(
 
             let param_env = tcx.param_env(method.def_id);
 
-            let abi_of_ty = |ty: Ty<'tcx>| -> &Abi {
+            let abi_of_ty = |ty: Ty<'tcx>| -> Option<&Abi> {
                 match tcx.layout_of(param_env.and(ty)) {
-                    Ok(layout) => &layout.abi,
-                    Err(err) => bug!("error: {}\n while computing layout for type {:?}", err, ty),
+                    Ok(layout) => Some(&layout.abi),
+                    Err(err) => {
+                        // #78372
+                        tcx.sess.delay_span_bug(
+                            tcx.def_span(method.def_id),
+                            &format!("error: {}\n while computing layout for type {:?}", err, ty),
+                        );
+                        None
+                    }
                 }
             };
 
@@ -475,7 +480,7 @@ fn virtual_call_violation_for_method<'tcx>(
                 receiver_for_self_ty(tcx, receiver_ty, tcx.mk_unit(), method.def_id);
 
             match abi_of_ty(unit_receiver_ty) {
-                &Abi::Scalar(..) => (),
+                Some(Abi::Scalar(..)) => (),
                 abi => {
                     tcx.sess.delay_span_bug(
                         tcx.def_span(method.def_id),
@@ -495,13 +500,12 @@ fn virtual_call_violation_for_method<'tcx>(
                 receiver_for_self_ty(tcx, receiver_ty, trait_object_ty, method.def_id);
 
             match abi_of_ty(trait_object_receiver) {
-                &Abi::ScalarPair(..) => (),
+                Some(Abi::ScalarPair(..)) => (),
                 abi => {
                     tcx.sess.delay_span_bug(
                         tcx.def_span(method.def_id),
                         &format!(
-                            "receiver when `Self = {}` should have a ScalarPair ABI; \
-                                 found {:?}",
+                            "receiver when `Self = {}` should have a ScalarPair ABI; found {:?}",
                             trait_object_ty, abi
                         ),
                     );
@@ -715,10 +719,10 @@ fn receiver_is_dispatchable<'tcx>(
     })
 }
 
-fn contains_illegal_self_type_reference<'tcx>(
+fn contains_illegal_self_type_reference<'tcx, T: TypeFoldable<'tcx>>(
     tcx: TyCtxt<'tcx>,
     trait_def_id: DefId,
-    ty: Ty<'tcx>,
+    value: T,
 ) -> bool {
     // This is somewhat subtle. In general, we want to forbid
     // references to `Self` in the argument and return types,
@@ -761,7 +765,6 @@ fn contains_illegal_self_type_reference<'tcx>(
 
     struct IllegalSelfTypeVisitor<'tcx> {
         tcx: TyCtxt<'tcx>,
-        self_ty: Ty<'tcx>,
         trait_def_id: DefId,
         supertraits: Option<Vec<ty::PolyTraitRef<'tcx>>>,
     }
@@ -769,7 +772,7 @@ fn contains_illegal_self_type_reference<'tcx>(
     impl<'tcx> TypeVisitor<'tcx> for IllegalSelfTypeVisitor<'tcx> {
         fn visit_ty(&mut self, t: Ty<'tcx>) -> bool {
             match t.kind() {
-                ty::Param(_) => t == self.self_ty,
+                ty::Param(_) => t == self.tcx.types.self_param,
                 ty::Projection(ref data) => {
                     // This is a projected type `<Foo as SomeTrait>::X`.
 
@@ -802,22 +805,62 @@ fn contains_illegal_self_type_reference<'tcx>(
             }
         }
 
-        fn visit_const(&mut self, _c: &ty::Const<'tcx>) -> bool {
-            // FIXME(#72219) Look into the unevaluated constants for object safety violations.
-            // Do not walk substitutions of unevaluated consts, as they contain `Self`, even
-            // though the const expression doesn't necessary use it. Currently type variables
-            // inside array length expressions are forbidden, so they can't break the above
-            // rules.
-            false
+        fn visit_const(&mut self, ct: &ty::Const<'tcx>) -> bool {
+            // First check if the type of this constant references `Self`.
+            if self.visit_ty(ct.ty) {
+                return true;
+            }
+
+            // Constants can only influence object safety if they reference `Self`.
+            // This is only possible for unevaluated constants, so we walk these here.
+            //
+            // If `AbstractConst::new` returned an error we already failed compilation
+            // so we don't have to emit an additional error here.
+            //
+            // We currently recurse into abstract consts here but do not recurse in
+            // `is_const_evaluatable`. This means that the object safety check is more
+            // liberal than the const eval check.
+            //
+            // This shouldn't really matter though as we can't really use any
+            // constants which are not considered const evaluatable.
+            use rustc_middle::mir::abstract_const::Node;
+            if let Ok(Some(ct)) = AbstractConst::from_const(self.tcx, ct) {
+                const_evaluatable::walk_abstract_const(self.tcx, ct, |node| match node {
+                    Node::Leaf(leaf) => {
+                        let leaf = leaf.subst(self.tcx, ct.substs);
+                        self.visit_const(leaf)
+                    }
+                    Node::Binop(..) | Node::UnaryOp(..) | Node::FunctionCall(_, _) => false,
+                })
+            } else {
+                false
+            }
+        }
+
+        fn visit_predicate(&mut self, pred: ty::Predicate<'tcx>) -> bool {
+            if let ty::PredicateAtom::ConstEvaluatable(def, substs) = pred.skip_binders() {
+                // FIXME(const_evaluatable_checked): We should probably deduplicate the logic for
+                // `AbstractConst`s here, it might make sense to change `ConstEvaluatable` to
+                // take a `ty::Const` instead.
+                use rustc_middle::mir::abstract_const::Node;
+                if let Ok(Some(ct)) = AbstractConst::new(self.tcx, def, substs) {
+                    const_evaluatable::walk_abstract_const(self.tcx, ct, |node| match node {
+                        Node::Leaf(leaf) => {
+                            let leaf = leaf.subst(self.tcx, ct.substs);
+                            self.visit_const(leaf)
+                        }
+                        Node::Binop(..) | Node::UnaryOp(..) | Node::FunctionCall(_, _) => false,
+                    })
+                } else {
+                    false
+                }
+            } else {
+                pred.super_visit_with(self)
+            }
         }
     }
 
-    ty.visit_with(&mut IllegalSelfTypeVisitor {
-        tcx,
-        self_ty: tcx.types.self_param,
-        trait_def_id,
-        supertraits: None,
-    })
+    value.visit_with(&mut IllegalSelfTypeVisitor { tcx, trait_def_id, supertraits: None })
 }
 
 pub fn provide(providers: &mut ty::query::Providers) {
diff --git a/compiler/rustc_typeck/src/check/check.rs b/compiler/rustc_typeck/src/check/check.rs
index 8f2537404c5..6dd8a143ec0 100644
--- a/compiler/rustc_typeck/src/check/check.rs
+++ b/compiler/rustc_typeck/src/check/check.rs
@@ -131,7 +131,7 @@ pub(super) fn check_fn<'a, 'tcx>(
         // The check for a non-trivial pattern is a hack to avoid duplicate warnings
         // for simple cases like `fn foo(x: Trait)`,
         // where we would error once on the parameter as a whole, and once on the binding `x`.
-        if param.pat.simple_ident().is_none() && !tcx.features().unsized_locals {
+        if param.pat.simple_ident().is_none() && !tcx.features().unsized_fn_params {
             fcx.require_type_is_sized(param_ty, param.pat.span, traits::SizedArgumentType(ty_span));
         }
 
diff --git a/compiler/rustc_typeck/src/check/expr.rs b/compiler/rustc_typeck/src/check/expr.rs
index 03e448a00cc..324aa1a66a6 100644
--- a/compiler/rustc_typeck/src/check/expr.rs
+++ b/compiler/rustc_typeck/src/check/expr.rs
@@ -42,7 +42,7 @@ use rustc_middle::ty::{AdtKind, Visibility};
 use rustc_span::hygiene::DesugaringKind;
 use rustc_span::source_map::Span;
 use rustc_span::symbol::{kw, sym, Ident, Symbol};
-use rustc_trait_selection::traits::{self, ObligationCauseCode, SelectionContext};
+use rustc_trait_selection::traits::{self, ObligationCauseCode};
 
 use std::fmt::Display;
 
@@ -476,7 +476,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
 
         if let ty::FnDef(..) = ty.kind() {
             let fn_sig = ty.fn_sig(tcx);
-            if !tcx.features().unsized_locals {
+            if !tcx.features().unsized_fn_params {
                 // We want to remove some Sized bounds from std functions,
                 // but don't want to expose the removal to stable Rust.
                 // i.e., we don't want to allow
@@ -627,7 +627,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                 assert!(expr_opt.is_none() || self.tcx.sess.has_errors());
             }
 
-            ctxt.may_break = true;
+            // If we encountered a `break`, then (no surprise) it may be possible to break from the
+            // loop... unless the value being returned from the loop diverges itself, e.g.
+            // `break return 5` or `break loop {}`.
+            ctxt.may_break |= !self.diverges.get().is_always();
 
             // the type of a `break` is always `!`, since it diverges
             tcx.types.never
@@ -1580,51 +1583,34 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         err: &mut DiagnosticBuilder<'_>,
         field_ident: Ident,
         base: &'tcx hir::Expr<'tcx>,
-        expr: &'tcx hir::Expr<'tcx>,
-        def_id: DefId,
+        ty: Ty<'tcx>,
     ) {
-        let param_env = self.tcx().param_env(def_id);
-        let future_trait = self.tcx.require_lang_item(LangItem::Future, None);
-        // Future::Output
-        let item_def_id =
-            self.tcx.associated_items(future_trait).in_definition_order().next().unwrap().def_id;
-
-        let projection_ty = self.tcx.projection_ty_from_predicates((def_id, item_def_id));
-        debug!("suggest_await_on_field_access: projection_ty={:?}", projection_ty);
-
-        let cause = self.misc(expr.span);
-        let mut selcx = SelectionContext::new(&self.infcx);
-
-        let mut obligations = vec![];
-        if let Some(projection_ty) = projection_ty {
-            let normalized_ty = rustc_trait_selection::traits::normalize_projection_type(
-                &mut selcx,
-                param_env,
-                projection_ty,
-                cause,
-                0,
-                &mut obligations,
-            );
-            debug!(
-                "suggest_await_on_field_access: normalized_ty={:?}, ty_kind={:?}",
-                self.resolve_vars_if_possible(&normalized_ty),
-                normalized_ty.kind(),
-            );
-            if let ty::Adt(def, _) = normalized_ty.kind() {
-                // no field access on enum type
-                if !def.is_enum() {
-                    if def.non_enum_variant().fields.iter().any(|field| field.ident == field_ident)
-                    {
-                        err.span_suggestion_verbose(
-                            base.span.shrink_to_hi(),
-                            "consider awaiting before field access",
-                            ".await".to_string(),
-                            Applicability::MaybeIncorrect,
-                        );
-                    }
+        let output_ty = match self.infcx.get_impl_future_output_ty(ty) {
+            Some(output_ty) => self.resolve_vars_if_possible(&output_ty),
+            _ => return,
+        };
+        let mut add_label = true;
+        if let ty::Adt(def, _) = output_ty.kind() {
+            // no field access on enum type
+            if !def.is_enum() {
+                if def.non_enum_variant().fields.iter().any(|field| field.ident == field_ident) {
+                    add_label = false;
+                    err.span_label(
+                        field_ident.span,
+                        "field not available in `impl Future`, but it is available in its `Output`",
+                    );
+                    err.span_suggestion_verbose(
+                        base.span.shrink_to_hi(),
+                        "consider `await`ing on the `Future` and access the field of its `Output`",
+                        ".await".to_string(),
+                        Applicability::MaybeIncorrect,
+                    );
                 }
             }
         }
+        if add_label {
+            err.span_label(field_ident.span, &format!("field not found in `{}`", ty));
+        }
     }
 
     fn ban_nonexisting_field(
@@ -1653,8 +1639,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             ty::Param(param_ty) => {
                 self.point_at_param_definition(&mut err, param_ty);
             }
-            ty::Opaque(def_id, _) => {
-                self.suggest_await_on_field_access(&mut err, field, base, expr, def_id);
+            ty::Opaque(_, _) => {
+                self.suggest_await_on_field_access(&mut err, field, base, expr_t.peel_refs());
             }
             _ => {}
         }
diff --git a/compiler/rustc_typeck/src/check/gather_locals.rs b/compiler/rustc_typeck/src/check/gather_locals.rs
index 1d505cfa698..af552389de0 100644
--- a/compiler/rustc_typeck/src/check/gather_locals.rs
+++ b/compiler/rustc_typeck/src/check/gather_locals.rs
@@ -6,15 +6,20 @@ use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKi
 use rustc_middle::ty::Ty;
 use rustc_span::Span;
 use rustc_trait_selection::traits;
+use std::mem;
 
 pub(super) struct GatherLocalsVisitor<'a, 'tcx> {
     fcx: &'a FnCtxt<'a, 'tcx>,
     parent_id: hir::HirId,
+    // parameters are special cases of patterns, but we want to handle them as
+    // *distinct* cases. so track when we are hitting a pattern *within* an fn
+    // parameter.
+    outermost_fn_param_pat: bool,
 }
 
 impl<'a, 'tcx> GatherLocalsVisitor<'a, 'tcx> {
     pub(super) fn new(fcx: &'a FnCtxt<'a, 'tcx>, parent_id: hir::HirId) -> Self {
-        Self { fcx, parent_id }
+        Self { fcx, parent_id, outermost_fn_param_pat: false }
     }
 
     fn assign(&mut self, span: Span, nid: hir::HirId, ty_opt: Option<LocalTy<'tcx>>) -> Ty<'tcx> {
@@ -88,13 +93,29 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> {
         intravisit::walk_local(self, local);
     }
 
+    fn visit_param(&mut self, param: &'tcx hir::Param<'tcx>) {
+        let old_outermost_fn_param_pat = mem::replace(&mut self.outermost_fn_param_pat, true);
+        intravisit::walk_param(self, param);
+        self.outermost_fn_param_pat = old_outermost_fn_param_pat;
+    }
+
     // Add pattern bindings.
     fn visit_pat(&mut self, p: &'tcx hir::Pat<'tcx>) {
         if let PatKind::Binding(_, _, ident, _) = p.kind {
             let var_ty = self.assign(p.span, p.hir_id, None);
 
-            if !self.fcx.tcx.features().unsized_locals {
-                self.fcx.require_type_is_sized(var_ty, p.span, traits::VariableType(p.hir_id));
+            if self.outermost_fn_param_pat {
+                if !self.fcx.tcx.features().unsized_fn_params {
+                    self.fcx.require_type_is_sized(
+                        var_ty,
+                        p.span,
+                        traits::SizedArgumentType(Some(p.span)),
+                    );
+                }
+            } else {
+                if !self.fcx.tcx.features().unsized_locals {
+                    self.fcx.require_type_is_sized(var_ty, p.span, traits::VariableType(p.hir_id));
+                }
             }
 
             debug!(
@@ -104,7 +125,9 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> {
                 var_ty
             );
         }
+        let old_outermost_fn_param_pat = mem::replace(&mut self.outermost_fn_param_pat, false);
         intravisit::walk_pat(self, p);
+        self.outermost_fn_param_pat = old_outermost_fn_param_pat;
     }
 
     // Don't descend into the bodies of nested closures.
diff --git a/compiler/rustc_typeck/src/check/method/suggest.rs b/compiler/rustc_typeck/src/check/method/suggest.rs
index 6d2ffadc20c..46afe4892db 100644
--- a/compiler/rustc_typeck/src/check/method/suggest.rs
+++ b/compiler/rustc_typeck/src/check/method/suggest.rs
@@ -21,7 +21,6 @@ use rustc_span::symbol::{kw, sym, Ident};
 use rustc_span::{source_map, FileName, Span};
 use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt;
 use rustc_trait_selection::traits::Obligation;
-use rustc_trait_selection::traits::SelectionContext;
 
 use std::cmp::Ordering;
 
@@ -870,46 +869,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         call: &hir::Expr<'_>,
         span: Span,
     ) {
-        if let ty::Opaque(def_id, _) = *ty.kind() {
-            let future_trait = self.tcx.require_lang_item(LangItem::Future, None);
-            // Future::Output
-            let item_def_id = self
-                .tcx
-                .associated_items(future_trait)
-                .in_definition_order()
-                .next()
-                .unwrap()
-                .def_id;
-
-            let projection_ty = self.tcx.projection_ty_from_predicates((def_id, item_def_id));
-            let cause = self.misc(span);
-            let mut selcx = SelectionContext::new(&self.infcx);
-            let mut obligations = vec![];
-            if let Some(projection_ty) = projection_ty {
-                let normalized_ty = rustc_trait_selection::traits::normalize_projection_type(
-                    &mut selcx,
-                    self.param_env,
-                    projection_ty,
-                    cause,
-                    0,
-                    &mut obligations,
-                );
-                debug!(
-                    "suggest_await_before_method: normalized_ty={:?}, ty_kind={:?}",
-                    self.resolve_vars_if_possible(&normalized_ty),
-                    normalized_ty.kind(),
-                );
-                let method_exists = self.method_exists(item_name, normalized_ty, call.hir_id, true);
-                debug!("suggest_await_before_method: is_method_exist={}", method_exists);
-                if method_exists {
-                    err.span_suggestion_verbose(
-                        span.shrink_to_lo(),
-                        "consider awaiting before this method call",
-                        "await.".to_string(),
-                        Applicability::MaybeIncorrect,
-                    );
-                }
-            }
+        let output_ty = match self.infcx.get_impl_future_output_ty(ty) {
+            Some(output_ty) => self.resolve_vars_if_possible(&output_ty),
+            _ => return,
+        };
+        let method_exists = self.method_exists(item_name, output_ty, call.hir_id, true);
+        debug!("suggest_await_before_method: is_method_exist={}", method_exists);
+        if method_exists {
+            err.span_suggestion_verbose(
+                span.shrink_to_lo(),
+                "consider `await`ing on the `Future` and calling the method on its `Output`",
+                "await.".to_string(),
+                Applicability::MaybeIncorrect,
+            );
         }
     }
 
diff --git a/compiler/rustc_typeck/src/coherence/inherent_impls_overlap.rs b/compiler/rustc_typeck/src/coherence/inherent_impls_overlap.rs
index be77d049cae..ce157f809ef 100644
--- a/compiler/rustc_typeck/src/coherence/inherent_impls_overlap.rs
+++ b/compiler/rustc_typeck/src/coherence/inherent_impls_overlap.rs
@@ -2,8 +2,9 @@ use rustc_errors::struct_span_err;
 use rustc_hir as hir;
 use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
 use rustc_hir::itemlikevisit::ItemLikeVisitor;
-use rustc_middle::ty::TyCtxt;
+use rustc_middle::ty::{self, TyCtxt};
 use rustc_trait_selection::traits::{self, SkipLeakCheck};
+use smallvec::SmallVec;
 
 pub fn crate_inherent_impls_overlap_check(tcx: TyCtxt<'_>, crate_num: CrateNum) {
     assert_eq!(crate_num, LOCAL_CRATE);
@@ -18,9 +19,18 @@ struct InherentOverlapChecker<'tcx> {
 impl InherentOverlapChecker<'tcx> {
     /// Checks whether any associated items in impls 1 and 2 share the same identifier and
     /// namespace.
-    fn impls_have_common_items(&self, impl1: DefId, impl2: DefId) -> bool {
-        let impl_items1 = self.tcx.associated_items(impl1);
-        let impl_items2 = self.tcx.associated_items(impl2);
+    fn impls_have_common_items(
+        &self,
+        impl_items1: &ty::AssociatedItems<'_>,
+        impl_items2: &ty::AssociatedItems<'_>,
+    ) -> bool {
+        let mut impl_items1 = &impl_items1;
+        let mut impl_items2 = &impl_items2;
+
+        // Performance optimization: iterate over the smaller list
+        if impl_items1.len() > impl_items2.len() {
+            std::mem::swap(&mut impl_items1, &mut impl_items2);
+        }
 
         for item1 in impl_items1.in_definition_order() {
             let collision = impl_items2.filter_by_name_unhygienic(item1.ident.name).any(|item2| {
@@ -113,9 +123,20 @@ impl ItemLikeVisitor<'v> for InherentOverlapChecker<'tcx> {
                 let ty_def_id = self.tcx.hir().local_def_id(item.hir_id);
                 let impls = self.tcx.inherent_impls(ty_def_id);
 
-                for (i, &impl1_def_id) in impls.iter().enumerate() {
-                    for &impl2_def_id in &impls[(i + 1)..] {
-                        if self.impls_have_common_items(impl1_def_id, impl2_def_id) {
+                // If there is only one inherent impl block,
+                // there is nothing to overlap check it with
+                if impls.len() <= 1 {
+                    return;
+                }
+
+                let impls_items = impls
+                    .iter()
+                    .map(|impl_def_id| (impl_def_id, self.tcx.associated_items(*impl_def_id)))
+                    .collect::<SmallVec<[_; 8]>>();
+
+                for (i, &(&impl1_def_id, impl_items1)) in impls_items.iter().enumerate() {
+                    for &(&impl2_def_id, impl_items2) in &impls_items[(i + 1)..] {
+                        if self.impls_have_common_items(impl_items1, impl_items2) {
                             self.check_for_overlapping_inherent_impls(impl1_def_id, impl2_def_id);
                         }
                     }
diff --git a/compiler/rustc_typeck/src/collect.rs b/compiler/rustc_typeck/src/collect.rs
index afc95eb4718..136867d78f5 100644
--- a/compiler/rustc_typeck/src/collect.rs
+++ b/compiler/rustc_typeck/src/collect.rs
@@ -2090,25 +2090,25 @@ fn const_evaluatable_predicates_of<'tcx>(
     if let hir::Node::Item(item) = node {
         if let hir::ItemKind::Impl { ref of_trait, ref self_ty, .. } = item.kind {
             if let Some(of_trait) = of_trait {
-                warn!("const_evaluatable_predicates_of({:?}): visit impl trait_ref", def_id);
+                debug!("const_evaluatable_predicates_of({:?}): visit impl trait_ref", def_id);
                 collector.visit_trait_ref(of_trait);
             }
 
-            warn!("const_evaluatable_predicates_of({:?}): visit_self_ty", def_id);
+            debug!("const_evaluatable_predicates_of({:?}): visit_self_ty", def_id);
             collector.visit_ty(self_ty);
         }
     }
 
     if let Some(generics) = node.generics() {
-        warn!("const_evaluatable_predicates_of({:?}): visit_generics", def_id);
+        debug!("const_evaluatable_predicates_of({:?}): visit_generics", def_id);
         collector.visit_generics(generics);
     }
 
     if let Some(fn_sig) = tcx.hir().fn_sig_by_hir_id(hir_id) {
-        warn!("const_evaluatable_predicates_of({:?}): visit_fn_decl", def_id);
+        debug!("const_evaluatable_predicates_of({:?}): visit_fn_decl", def_id);
         collector.visit_fn_decl(fn_sig.decl);
     }
-    warn!("const_evaluatable_predicates_of({:?}) = {:?}", def_id, collector.preds);
+    debug!("const_evaluatable_predicates_of({:?}) = {:?}", def_id, collector.preds);
 
     collector.preds
 }