about summary refs log tree commit diff
path: root/compiler/rustc_pattern_analysis/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_pattern_analysis/src')
-rw-r--r--compiler/rustc_pattern_analysis/src/constructor.rs1198
-rw-r--r--compiler/rustc_pattern_analysis/src/errors.rs143
-rw-r--r--compiler/rustc_pattern_analysis/src/lib.rs126
-rw-r--r--compiler/rustc_pattern_analysis/src/lints.rs109
-rw-r--r--compiler/rustc_pattern_analysis/src/pat.rs317
-rw-r--r--compiler/rustc_pattern_analysis/src/pat_column.rs90
-rw-r--r--compiler/rustc_pattern_analysis/src/rustc.rs1098
-rw-r--r--compiler/rustc_pattern_analysis/src/rustc/print.rs193
-rw-r--r--compiler/rustc_pattern_analysis/src/usefulness.rs1883
9 files changed, 5157 insertions, 0 deletions
diff --git a/compiler/rustc_pattern_analysis/src/constructor.rs b/compiler/rustc_pattern_analysis/src/constructor.rs
new file mode 100644
index 00000000000..3a2a75a638f
--- /dev/null
+++ b/compiler/rustc_pattern_analysis/src/constructor.rs
@@ -0,0 +1,1198 @@
+//! As explained in [`crate::usefulness`], values and patterns are made from constructors applied to
+//! fields. This file defines a `Constructor` enum and various operations to manipulate them.
+//!
+//! There are two important bits of core logic in this file: constructor inclusion and constructor
+//! splitting. Constructor inclusion, i.e. whether a constructor is included in/covered by another,
+//! is straightforward and defined in [`Constructor::is_covered_by`].
+//!
+//! Constructor splitting is mentioned in [`crate::usefulness`] but not detailed. We describe it
+//! precisely here.
+//!
+//!
+//!
+//! # Constructor grouping and splitting
+//!
+//! As explained in the corresponding section in [`crate::usefulness`], to make usefulness tractable
+//! we need to group together constructors that have the same effect when they are used to
+//! specialize the matrix.
+//!
+//! Example:
+//! ```compile_fail,E0004
+//! match (0, false) {
+//!     (0 ..=100, true) => {}
+//!     (50..=150, false) => {}
+//!     (0 ..=200, _) => {}
+//! }
+//! ```
+//!
+//! In this example we can restrict specialization to 5 cases: `0..50`, `50..=100`, `101..=150`,
+//! `151..=200` and `200..`.
+//!
+//! In [`crate::usefulness`], we had said that `specialize` only takes value-only constructors. We
+//! now relax this restriction: we allow `specialize` to take constructors like `0..50` as long as
+//! we're careful to only do that with constructors that make sense. For example, `specialize(0..50,
+//! (0..=100, true))` is sensible, but `specialize(50..=200, (0..=100, true))` is not.
+//!
+//! Constructor splitting looks at the constructors in the first column of the matrix and constructs
+//! such a sensible set of constructors. Formally, we want to find a smallest disjoint set of
+//! constructors:
+//! - Whose union covers the whole type, and
+//! - That have no non-trivial intersection with any of the constructors in the column (i.e. they're
+//!     each either disjoint with or covered by any given column constructor).
+//!
+//! We compute this in two steps: first [`PatCx::ctors_for_ty`] determines the
+//! set of all possible constructors for the type. Then [`ConstructorSet::split`] looks at the
+//! column of constructors and splits the set into groups accordingly. The precise invariants of
+//! [`ConstructorSet::split`] is described in [`SplitConstructorSet`].
+//!
+//! Constructor splitting has two interesting special cases: integer range splitting (see
+//! [`IntRange::split`]) and slice splitting (see [`Slice::split`]).
+//!
+//!
+//!
+//! # The `Missing` constructor
+//!
+//! We detail a special case of constructor splitting that is a bit subtle. Take the following:
+//!
+//! ```
+//! enum Direction { North, South, East, West }
+//! # let wind = (Direction::North, 0u8);
+//! match wind {
+//!     (Direction::North, 50..) => {}
+//!     (_, _) => {}
+//! }
+//! ```
+//!
+//! Here we expect constructor splitting to output two cases: `North`, and "everything else". This
+//! "everything else" is represented by [`Constructor::Missing`]. Unlike other constructors, it's a
+//! bit contextual: to know the exact list of constructors it represents we have to look at the
+//! column. In practice however we don't need to, because by construction it only matches rows that
+//! have wildcards. This is how this constructor is special: the only constructor that covers it is
+//! `Wildcard`.
+//!
+//! The only place where we care about which constructors `Missing` represents is in diagnostics
+//! (see `crate::usefulness::WitnessMatrix::apply_constructor`).
+//!
+//! We choose whether to specialize with `Missing` in
+//! `crate::usefulness::compute_exhaustiveness_and_usefulness`.
+//!
+//!
+//!
+//! ## Empty types, empty constructors, and the `exhaustive_patterns` feature
+//!
+//! An empty type is a type that has no valid value, like `!`, `enum Void {}`, or `Result<!, !>`.
+//! They require careful handling.
+//!
+//! First, for soundness reasons related to the possible existence of invalid values, by default we
+//! don't treat empty types as empty. We force them to be matched with wildcards. Except if the
+//! `exhaustive_patterns` feature is turned on, in which case we do treat them as empty. And also
+//! except if the type has no constructors (like `enum Void {}` but not like `Result<!, !>`), we
+//! specifically allow `match void {}` to be exhaustive. There are additionally considerations of
+//! place validity that are handled in `crate::usefulness`. Yes this is a bit tricky.
+//!
+//! The second thing is that regardless of the above, it is always allowed to use all the
+//! constructors of a type. For example, all the following is ok:
+//!
+//! ```rust,ignore(example)
+//! # #![feature(never_type)]
+//! # #![feature(exhaustive_patterns)]
+//! fn foo(x: Option<!>) {
+//!   match x {
+//!     None => {}
+//!     Some(_) => {}
+//!   }
+//! }
+//! fn bar(x: &[!]) -> u32 {
+//!   match x {
+//!     [] => 1,
+//!     [_] => 2,
+//!     [_, _] => 3,
+//!   }
+//! }
+//! ```
+//!
+//! Moreover, take the following:
+//!
+//! ```rust
+//! # #![feature(never_type)]
+//! # #![feature(exhaustive_patterns)]
+//! # let x = None::<!>;
+//! match x {
+//!   None => {}
+//! }
+//! ```
+//!
+//! On a normal type, we would identify `Some` as missing and tell the user. If `x: Option<!>`
+//! however (and `exhaustive_patterns` is on), it's ok to omit `Some`. When listing the constructors
+//! of a type, we must therefore track which can be omitted.
+//!
+//! Let's call "empty" a constructor that matches no valid value for the type, like `Some` for the
+//! type `Option<!>`. What this all means is that `ConstructorSet` must know which constructors are
+//! empty. The difference between empty and nonempty constructors is that empty constructors need
+//! not be present for the match to be exhaustive.
+//!
+//! A final remark: empty constructors of arity 0 break specialization, we must avoid them. The
+//! reason is that if we specialize by them, nothing remains to witness the emptiness; the rest of
+//! the algorithm can't distinguish them from a nonempty constructor. The only known case where this
+//! could happen is the `[..]` pattern on `[!; N]` with `N > 0` so we must take care to not emit it.
+//!
+//! This is all handled by [`PatCx::ctors_for_ty`] and
+//! [`ConstructorSet::split`]. The invariants of [`SplitConstructorSet`] are also of interest.
+//!
+//!
+//! ## Unions
+//!
+//! Unions allow us to match a value via several overlapping representations at the same time. For
+//! example, the following is exhaustive because when seeing the value as a boolean we handled all
+//! possible cases (other cases such as `n == 3` would trigger UB).
+//!
+//! ```rust
+//! # fn main() {
+//! union U8AsBool {
+//!     n: u8,
+//!     b: bool,
+//! }
+//! let x = U8AsBool { n: 1 };
+//! unsafe {
+//!     match x {
+//!         U8AsBool { n: 2 } => {}
+//!         U8AsBool { b: true } => {}
+//!         U8AsBool { b: false } => {}
+//!     }
+//! }
+//! # }
+//! ```
+//!
+//! Pattern-matching has no knowledge that e.g. `false as u8 == 0`, so the values we consider in the
+//! algorithm look like `U8AsBool { b: true, n: 2 }`. In other words, for the most part a union is
+//! treated like a struct with the same fields. The difference lies in how we construct witnesses of
+//! non-exhaustiveness.
+//!
+//!
+//! ## Opaque patterns
+//!
+//! Some patterns, such as constants that are not allowed to be matched structurally, cannot be
+//! inspected, which we handle with `Constructor::Opaque`. Since we know nothing of these patterns,
+//! we assume they never cover each other. In order to respect the invariants of
+//! [`SplitConstructorSet`], we give each `Opaque` constructor a unique id so we can recognize it.
+
+use std::cmp::{self, max, min, Ordering};
+use std::fmt;
+use std::iter::once;
+
+use rustc_apfloat::ieee::{DoubleS, HalfS, IeeeFloat, QuadS, SingleS};
+use rustc_index::bit_set::{BitSet, GrowableBitSet};
+use rustc_index::IndexVec;
+use smallvec::SmallVec;
+
+use self::Constructor::*;
+use self::MaybeInfiniteInt::*;
+use self::SliceKind::*;
+use crate::PatCx;
+
+/// Whether we have seen a constructor in the column or not.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+enum Presence {
+    Unseen,
+    Seen,
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum RangeEnd {
+    Included,
+    Excluded,
+}
+
+impl fmt::Display for RangeEnd {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.write_str(match self {
+            RangeEnd::Included => "..=",
+            RangeEnd::Excluded => "..",
+        })
+    }
+}
+
+/// A possibly infinite integer. Values are encoded such that the ordering on `u128` matches the
+/// natural order on the original type. For example, `-128i8` is encoded as `0` and `127i8` as
+/// `255`. See `signed_bias` for details.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+pub enum MaybeInfiniteInt {
+    NegInfinity,
+    /// Encoded value. DO NOT CONSTRUCT BY HAND; use `new_finite_{int,uint}`.
+    #[non_exhaustive]
+    Finite(u128),
+    PosInfinity,
+}
+
+impl MaybeInfiniteInt {
+    pub fn new_finite_uint(bits: u128) -> Self {
+        Finite(bits)
+    }
+    pub fn new_finite_int(bits: u128, size: u64) -> Self {
+        // Perform a shift if the underlying types are signed, which makes the interval arithmetic
+        // type-independent.
+        let bias = 1u128 << (size - 1);
+        Finite(bits ^ bias)
+    }
+
+    pub fn as_finite_uint(self) -> Option<u128> {
+        match self {
+            Finite(bits) => Some(bits),
+            _ => None,
+        }
+    }
+    pub fn as_finite_int(self, size: u64) -> Option<u128> {
+        // We decode the shift.
+        match self {
+            Finite(bits) => {
+                let bias = 1u128 << (size - 1);
+                Some(bits ^ bias)
+            }
+            _ => None,
+        }
+    }
+
+    /// Note: this will not turn a finite value into an infinite one or vice-versa.
+    pub fn minus_one(self) -> Option<Self> {
+        match self {
+            Finite(n) => n.checked_sub(1).map(Finite),
+            x => Some(x),
+        }
+    }
+    /// Note: this will turn `u128::MAX` into `PosInfinity`. This means `plus_one` and `minus_one`
+    /// are not strictly inverses, but that poses no problem in our use of them.
+    /// this will not turn a finite value into an infinite one or vice-versa.
+    pub fn plus_one(self) -> Option<Self> {
+        match self {
+            Finite(n) => match n.checked_add(1) {
+                Some(m) => Some(Finite(m)),
+                None => Some(PosInfinity),
+            },
+            x => Some(x),
+        }
+    }
+}
+
+/// An exclusive interval, used for precise integer exhaustiveness checking. `IntRange`s always
+/// store a contiguous range.
+///
+/// `IntRange` is never used to encode an empty range or a "range" that wraps around the (offset)
+/// space: i.e., `range.lo < range.hi`.
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub struct IntRange {
+    pub lo: MaybeInfiniteInt, // Must not be `PosInfinity`.
+    pub hi: MaybeInfiniteInt, // Must not be `NegInfinity`.
+}
+
+impl IntRange {
+    /// Best effort; will not know that e.g. `255u8..` is a singleton.
+    pub fn is_singleton(&self) -> bool {
+        // Since `lo` and `hi` can't be the same `Infinity` and `plus_one` never changes from finite
+        // to infinite, this correctly only detects ranges that contain exacly one `Finite(x)`.
+        self.lo.plus_one() == Some(self.hi)
+    }
+
+    /// Construct a singleton range.
+    /// `x` must be a `Finite(_)` value.
+    #[inline]
+    pub fn from_singleton(x: MaybeInfiniteInt) -> IntRange {
+        // `unwrap()` is ok on a finite value
+        IntRange { lo: x, hi: x.plus_one().unwrap() }
+    }
+
+    /// Construct a range with these boundaries.
+    /// `lo` must not be `PosInfinity`. `hi` must not be `NegInfinity`.
+    #[inline]
+    pub fn from_range(lo: MaybeInfiniteInt, mut hi: MaybeInfiniteInt, end: RangeEnd) -> IntRange {
+        if end == RangeEnd::Included {
+            hi = hi.plus_one().unwrap();
+        }
+        if lo >= hi {
+            // This should have been caught earlier by E0030.
+            panic!("malformed range pattern: {lo:?}..{hi:?}");
+        }
+        IntRange { lo, hi }
+    }
+
+    fn is_subrange(&self, other: &Self) -> bool {
+        other.lo <= self.lo && self.hi <= other.hi
+    }
+
+    fn intersection(&self, other: &Self) -> Option<Self> {
+        if self.lo < other.hi && other.lo < self.hi {
+            Some(IntRange { lo: max(self.lo, other.lo), hi: min(self.hi, other.hi) })
+        } else {
+            None
+        }
+    }
+
+    /// Partition a range of integers into disjoint subranges. This does constructor splitting for
+    /// integer ranges as explained at the top of the file.
+    ///
+    /// This returns an output that covers `self`. The output is split so that the only
+    /// intersections between an output range and a column range are inclusions. No output range
+    /// straddles the boundary of one of the inputs.
+    ///
+    /// Additionally, we track for each output range whether it is covered by one of the column ranges or not.
+    ///
+    /// The following input:
+    /// ```text
+    ///   (--------------------------) // `self`
+    /// (------) (----------)    (-)
+    ///     (------) (--------)
+    /// ```
+    /// is first intersected with `self`:
+    /// ```text
+    ///   (--------------------------) // `self`
+    ///   (----) (----------)    (-)
+    ///     (------) (--------)
+    /// ```
+    /// and then iterated over as follows:
+    /// ```text
+    ///   (-(--)-(-)-(------)-)--(-)-
+    /// ```
+    /// where each sequence of dashes is an output range, and dashes outside parentheses are marked
+    /// as `Presence::Missing`.
+    ///
+    /// ## `isize`/`usize`
+    ///
+    /// Whereas a wildcard of type `i32` stands for the range `i32::MIN..=i32::MAX`, a `usize`
+    /// wildcard stands for `0..PosInfinity` and a `isize` wildcard stands for
+    /// `NegInfinity..PosInfinity`. In other words, as far as `IntRange` is concerned, there are
+    /// values before `isize::MIN` and after `usize::MAX`/`isize::MAX`.
+    /// This is to avoid e.g. `0..(u32::MAX as usize)` from being exhaustive on one architecture and
+    /// not others. This was decided in <https://github.com/rust-lang/rfcs/pull/2591>.
+    ///
+    /// These infinities affect splitting subtly: it is possible to get `NegInfinity..0` and
+    /// `usize::MAX+1..PosInfinity` in the output. Diagnostics must be careful to handle these
+    /// fictitious ranges sensibly.
+    fn split(
+        &self,
+        column_ranges: impl Iterator<Item = IntRange>,
+    ) -> impl Iterator<Item = (Presence, IntRange)> {
+        // The boundaries of ranges in `column_ranges` intersected with `self`.
+        // We do parenthesis matching for input ranges. A boundary counts as +1 if it starts
+        // a range and -1 if it ends it. When the count is > 0 between two boundaries, we
+        // are within an input range.
+        let mut boundaries: Vec<(MaybeInfiniteInt, isize)> = column_ranges
+            .filter_map(|r| self.intersection(&r))
+            .flat_map(|r| [(r.lo, 1), (r.hi, -1)])
+            .collect();
+        // We sort by boundary, and for each boundary we sort the "closing parentheses" first. The
+        // order of +1/-1 for a same boundary value is actually irrelevant, because we only look at
+        // the accumulated count between distinct boundary values.
+        boundaries.sort_unstable();
+
+        // Accumulate parenthesis counts.
+        let mut paren_counter = 0isize;
+        // Gather pairs of adjacent boundaries.
+        let mut prev_bdy = self.lo;
+        boundaries
+            .into_iter()
+            // End with the end of the range. The count is ignored.
+            .chain(once((self.hi, 0)))
+            // List pairs of adjacent boundaries and the count between them.
+            .map(move |(bdy, delta)| {
+                // `delta` affects the count as we cross `bdy`, so the relevant count between
+                // `prev_bdy` and `bdy` is untouched by `delta`.
+                let ret = (prev_bdy, paren_counter, bdy);
+                prev_bdy = bdy;
+                paren_counter += delta;
+                ret
+            })
+            // Skip empty ranges.
+            .filter(|&(prev_bdy, _, bdy)| prev_bdy != bdy)
+            // Convert back to ranges.
+            .map(move |(prev_bdy, paren_count, bdy)| {
+                use Presence::*;
+                let presence = if paren_count > 0 { Seen } else { Unseen };
+                let range = IntRange { lo: prev_bdy, hi: bdy };
+                (presence, range)
+            })
+    }
+}
+
+/// Note: this will render signed ranges incorrectly. To render properly, convert to a pattern
+/// first.
+impl fmt::Debug for IntRange {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        if self.is_singleton() {
+            // Only finite ranges can be singletons.
+            let Finite(lo) = self.lo else { unreachable!() };
+            write!(f, "{lo}")?;
+        } else {
+            if let Finite(lo) = self.lo {
+                write!(f, "{lo}")?;
+            }
+            write!(f, "{}", RangeEnd::Excluded)?;
+            if let Finite(hi) = self.hi {
+                write!(f, "{hi}")?;
+            }
+        }
+        Ok(())
+    }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum SliceKind {
+    /// Patterns of length `n` (`[x, y]`).
+    FixedLen(usize),
+    /// Patterns using the `..` notation (`[x, .., y]`).
+    /// Captures any array constructor of `length >= i + j`.
+    /// In the case where `array_len` is `Some(_)`,
+    /// this indicates that we only care about the first `i` and the last `j` values of the array,
+    /// and everything in between is a wildcard `_`.
+    VarLen(usize, usize),
+}
+
+impl SliceKind {
+    pub fn arity(self) -> usize {
+        match self {
+            FixedLen(length) => length,
+            VarLen(prefix, suffix) => prefix + suffix,
+        }
+    }
+
+    /// Whether this pattern includes patterns of length `other_len`.
+    fn covers_length(self, other_len: usize) -> bool {
+        match self {
+            FixedLen(len) => len == other_len,
+            VarLen(prefix, suffix) => prefix + suffix <= other_len,
+        }
+    }
+}
+
+/// A constructor for array and slice patterns.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct Slice {
+    /// `None` if the matched value is a slice, `Some(n)` if it is an array of size `n`.
+    pub(crate) array_len: Option<usize>,
+    /// The kind of pattern it is: fixed-length `[x, y]` or variable length `[x, .., y]`.
+    pub(crate) kind: SliceKind,
+}
+
+impl Slice {
+    pub fn new(array_len: Option<usize>, kind: SliceKind) -> Self {
+        let kind = match (array_len, kind) {
+            // If the middle `..` has length 0, we effectively have a fixed-length pattern.
+            (Some(len), VarLen(prefix, suffix)) if prefix + suffix == len => FixedLen(len),
+            (Some(len), VarLen(prefix, suffix)) if prefix + suffix > len => panic!(
+                "Slice pattern of length {} longer than its array length {len}",
+                prefix + suffix
+            ),
+            _ => kind,
+        };
+        Slice { array_len, kind }
+    }
+
+    pub fn arity(self) -> usize {
+        self.kind.arity()
+    }
+
+    /// See `Constructor::is_covered_by`
+    fn is_covered_by(self, other: Self) -> bool {
+        other.kind.covers_length(self.arity())
+    }
+
+    /// This computes constructor splitting for variable-length slices, as explained at the top of
+    /// the file.
+    ///
+    /// A slice pattern `[x, .., y]` behaves like the infinite or-pattern `[x, y] | [x, _, y] | [x,
+    /// _, _, y] | etc`. The corresponding value constructors are fixed-length array constructors of
+    /// corresponding lengths. We obviously can't list this infinitude of constructors.
+    /// Thankfully, it turns out that for each finite set of slice patterns, all sufficiently large
+    /// array lengths are equivalent.
+    ///
+    /// Let's look at an example, where we are trying to split the last pattern:
+    /// ```
+    /// # fn foo(x: &[bool]) {
+    /// match x {
+    ///     [true, true, ..] => {}
+    ///     [.., false, false] => {}
+    ///     [..] => {}
+    /// }
+    /// # }
+    /// ```
+    /// Here are the results of specialization for the first few lengths:
+    /// ```
+    /// # fn foo(x: &[bool]) { match x {
+    /// // length 0
+    /// [] => {}
+    /// // length 1
+    /// [_] => {}
+    /// // length 2
+    /// [true, true] => {}
+    /// [false, false] => {}
+    /// [_, _] => {}
+    /// // length 3
+    /// [true, true,  _    ] => {}
+    /// [_,    false, false] => {}
+    /// [_,    _,     _    ] => {}
+    /// // length 4
+    /// [true, true, _,     _    ] => {}
+    /// [_,    _,    false, false] => {}
+    /// [_,    _,    _,     _    ] => {}
+    /// // length 5
+    /// [true, true, _, _,     _    ] => {}
+    /// [_,    _,    _, false, false] => {}
+    /// [_,    _,    _, _,     _    ] => {}
+    /// # _ => {}
+    /// # }}
+    /// ```
+    ///
+    /// We see that above length 4, we are simply inserting columns full of wildcards in the middle.
+    /// This means that specialization and witness computation with slices of length `l >= 4` will
+    /// give equivalent results regardless of `l`. This applies to any set of slice patterns: there
+    /// will be a length `L` above which all lengths behave the same. This is exactly what we need
+    /// for constructor splitting.
+    ///
+    /// A variable-length slice pattern covers all lengths from its arity up to infinity. As we just
+    /// saw, we can split this in two: lengths below `L` are treated individually with a
+    /// fixed-length slice each; lengths above `L` are grouped into a single variable-length slice
+    /// constructor.
+    ///
+    /// For each variable-length slice pattern `p` with a prefix of length `plₚ` and suffix of
+    /// length `slₚ`, only the first `plₚ` and the last `slₚ` elements are examined. Therefore, as
+    /// long as `L` is positive (to avoid concerns about empty types), all elements after the
+    /// maximum prefix length and before the maximum suffix length are not examined by any
+    /// variable-length pattern, and therefore can be ignored. This gives us a way to compute `L`.
+    ///
+    /// Additionally, if fixed-length patterns exist, we must pick an `L` large enough to miss them,
+    /// so we can pick `L = max(max(FIXED_LEN)+1, max(PREFIX_LEN) + max(SUFFIX_LEN))`.
+    /// `max_slice` below will be made to have this arity `L`.
+    ///
+    /// If `self` is fixed-length, it is returned as-is.
+    ///
+    /// Additionally, we track for each output slice whether it is covered by one of the column slices or not.
+    fn split(
+        self,
+        column_slices: impl Iterator<Item = Slice>,
+    ) -> impl Iterator<Item = (Presence, Slice)> {
+        // Range of lengths below `L`.
+        let smaller_lengths;
+        let arity = self.arity();
+        let mut max_slice = self.kind;
+        // Tracks the smallest variable-length slice we've seen. Any slice arity above it is
+        // therefore `Presence::Seen` in the column.
+        let mut min_var_len = usize::MAX;
+        // Tracks the fixed-length slices we've seen, to mark them as `Presence::Seen`.
+        let mut seen_fixed_lens = GrowableBitSet::new_empty();
+        match &mut max_slice {
+            VarLen(max_prefix_len, max_suffix_len) => {
+                // A length larger than any fixed-length slice encountered.
+                // We start at 1 in case the subtype is empty because in that case the zero-length
+                // slice must be treated separately from the rest.
+                let mut fixed_len_upper_bound = 1;
+                // We grow `max_slice` to be larger than all slices encountered, as described above.
+                // `L` is `max_slice.arity()`. For diagnostics, we keep the prefix and suffix
+                // lengths separate.
+                for slice in column_slices {
+                    match slice.kind {
+                        FixedLen(len) => {
+                            fixed_len_upper_bound = cmp::max(fixed_len_upper_bound, len + 1);
+                            seen_fixed_lens.insert(len);
+                        }
+                        VarLen(prefix, suffix) => {
+                            *max_prefix_len = cmp::max(*max_prefix_len, prefix);
+                            *max_suffix_len = cmp::max(*max_suffix_len, suffix);
+                            min_var_len = cmp::min(min_var_len, prefix + suffix);
+                        }
+                    }
+                }
+                // If `fixed_len_upper_bound >= L`, we set `L` to `fixed_len_upper_bound`.
+                if let Some(delta) =
+                    fixed_len_upper_bound.checked_sub(*max_prefix_len + *max_suffix_len)
+                {
+                    *max_prefix_len += delta
+                }
+
+                // We cap the arity of `max_slice` at the array size.
+                match self.array_len {
+                    Some(len) if max_slice.arity() >= len => max_slice = FixedLen(len),
+                    _ => {}
+                }
+
+                smaller_lengths = match self.array_len {
+                    // The only admissible fixed-length slice is one of the array size. Whether `max_slice`
+                    // is fixed-length or variable-length, it will be the only relevant slice to output
+                    // here.
+                    Some(_) => 0..0, // empty range
+                    // We need to cover all arities in the range `(arity..infinity)`. We split that
+                    // range into two: lengths smaller than `max_slice.arity()` are treated
+                    // independently as fixed-lengths slices, and lengths above are captured by
+                    // `max_slice`.
+                    None => self.arity()..max_slice.arity(),
+                };
+            }
+            FixedLen(_) => {
+                // No need to split here. We only track presence.
+                for slice in column_slices {
+                    match slice.kind {
+                        FixedLen(len) => {
+                            if len == arity {
+                                seen_fixed_lens.insert(len);
+                            }
+                        }
+                        VarLen(prefix, suffix) => {
+                            min_var_len = cmp::min(min_var_len, prefix + suffix);
+                        }
+                    }
+                }
+                smaller_lengths = 0..0;
+            }
+        };
+
+        smaller_lengths.map(FixedLen).chain(once(max_slice)).map(move |kind| {
+            let arity = kind.arity();
+            let seen = if min_var_len <= arity || seen_fixed_lens.contains(arity) {
+                Presence::Seen
+            } else {
+                Presence::Unseen
+            };
+            (seen, Slice::new(self.array_len, kind))
+        })
+    }
+}
+
+/// A globally unique id to distinguish `Opaque` patterns.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct OpaqueId(u32);
+
+impl OpaqueId {
+    pub fn new() -> Self {
+        use std::sync::atomic::{AtomicU32, Ordering};
+        static OPAQUE_ID: AtomicU32 = AtomicU32::new(0);
+        OpaqueId(OPAQUE_ID.fetch_add(1, Ordering::SeqCst))
+    }
+}
+
+/// A value can be decomposed into a constructor applied to some fields. This struct represents
+/// the constructor. See also `Fields`.
+///
+/// `pat_constructor` retrieves the constructor corresponding to a pattern.
+/// `specialize_constructor` returns the list of fields corresponding to a pattern, given a
+/// constructor. `Constructor::apply` reconstructs the pattern from a pair of `Constructor` and
+/// `Fields`.
+#[derive(Debug)]
+pub enum Constructor<Cx: PatCx> {
+    /// Tuples and structs.
+    Struct,
+    /// Enum variants.
+    Variant(Cx::VariantIdx),
+    /// References
+    Ref,
+    /// Array and slice patterns.
+    Slice(Slice),
+    /// Union field accesses.
+    UnionField,
+    /// Booleans
+    Bool(bool),
+    /// Ranges of integer literal values (`2`, `2..=5` or `2..5`).
+    IntRange(IntRange),
+    /// Ranges of floating-point literal values (`2.0..=5.2`).
+    F16Range(IeeeFloat<HalfS>, IeeeFloat<HalfS>, RangeEnd),
+    F32Range(IeeeFloat<SingleS>, IeeeFloat<SingleS>, RangeEnd),
+    F64Range(IeeeFloat<DoubleS>, IeeeFloat<DoubleS>, RangeEnd),
+    F128Range(IeeeFloat<QuadS>, IeeeFloat<QuadS>, RangeEnd),
+    /// String literals. Strings are not quite the same as `&[u8]` so we treat them separately.
+    Str(Cx::StrLit),
+    /// Constants that must not be matched structurally. They are treated as black boxes for the
+    /// purposes of exhaustiveness: we must not inspect them, and they don't count towards making a
+    /// match exhaustive.
+    /// Carries an id that must be unique within a match. We need this to ensure the invariants of
+    /// [`SplitConstructorSet`].
+    Opaque(OpaqueId),
+    /// Or-pattern.
+    Or,
+    /// Wildcard pattern.
+    Wildcard,
+    /// Never pattern. Only used in `WitnessPat`. An actual never pattern should be lowered as
+    /// `Wildcard`.
+    Never,
+    /// Fake extra constructor for enums that aren't allowed to be matched exhaustively. Also used
+    /// for those types for which we cannot list constructors explicitly, like `f64` and `str`. Only
+    /// used in `WitnessPat`.
+    NonExhaustive,
+    /// Fake extra constructor for variants that should not be mentioned in diagnostics. We use this
+    /// for variants behind an unstable gate as well as `#[doc(hidden)]` ones. Only used in
+    /// `WitnessPat`.
+    Hidden,
+    /// Fake extra constructor for constructors that are not seen in the matrix, as explained at the
+    /// top of the file. Only used for specialization.
+    Missing,
+    /// Fake extra constructor that indicates and empty field that is private. When we encounter one
+    /// we skip the column entirely so we don't observe its emptiness. Only used for specialization.
+    PrivateUninhabited,
+}
+
+impl<Cx: PatCx> Clone for Constructor<Cx> {
+    fn clone(&self) -> Self {
+        match self {
+            Constructor::Struct => Constructor::Struct,
+            Constructor::Variant(idx) => Constructor::Variant(*idx),
+            Constructor::Ref => Constructor::Ref,
+            Constructor::Slice(slice) => Constructor::Slice(*slice),
+            Constructor::UnionField => Constructor::UnionField,
+            Constructor::Bool(b) => Constructor::Bool(*b),
+            Constructor::IntRange(range) => Constructor::IntRange(*range),
+            Constructor::F16Range(lo, hi, end) => Constructor::F16Range(lo.clone(), *hi, *end),
+            Constructor::F32Range(lo, hi, end) => Constructor::F32Range(lo.clone(), *hi, *end),
+            Constructor::F64Range(lo, hi, end) => Constructor::F64Range(lo.clone(), *hi, *end),
+            Constructor::F128Range(lo, hi, end) => Constructor::F128Range(lo.clone(), *hi, *end),
+            Constructor::Str(value) => Constructor::Str(value.clone()),
+            Constructor::Opaque(inner) => Constructor::Opaque(inner.clone()),
+            Constructor::Or => Constructor::Or,
+            Constructor::Never => Constructor::Never,
+            Constructor::Wildcard => Constructor::Wildcard,
+            Constructor::NonExhaustive => Constructor::NonExhaustive,
+            Constructor::Hidden => Constructor::Hidden,
+            Constructor::Missing => Constructor::Missing,
+            Constructor::PrivateUninhabited => Constructor::PrivateUninhabited,
+        }
+    }
+}
+
+impl<Cx: PatCx> Constructor<Cx> {
+    pub(crate) fn is_non_exhaustive(&self) -> bool {
+        matches!(self, NonExhaustive)
+    }
+
+    pub(crate) fn as_variant(&self) -> Option<Cx::VariantIdx> {
+        match self {
+            Variant(i) => Some(*i),
+            _ => None,
+        }
+    }
+    fn as_bool(&self) -> Option<bool> {
+        match self {
+            Bool(b) => Some(*b),
+            _ => None,
+        }
+    }
+    pub(crate) fn as_int_range(&self) -> Option<&IntRange> {
+        match self {
+            IntRange(range) => Some(range),
+            _ => None,
+        }
+    }
+    fn as_slice(&self) -> Option<Slice> {
+        match self {
+            Slice(slice) => Some(*slice),
+            _ => None,
+        }
+    }
+
+    /// The number of fields for this constructor. This must be kept in sync with
+    /// `Fields::wildcards`.
+    pub(crate) fn arity(&self, cx: &Cx, ty: &Cx::Ty) -> usize {
+        cx.ctor_arity(self, ty)
+    }
+
+    /// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`.
+    /// For the simple cases, this is simply checking for equality. For the "grouped" constructors,
+    /// this checks for inclusion.
+    // We inline because this has a single call site in `Matrix::specialize_constructor`.
+    #[inline]
+    pub(crate) fn is_covered_by(&self, cx: &Cx, other: &Self) -> Result<bool, Cx::Error> {
+        Ok(match (self, other) {
+            (Wildcard, _) => {
+                return Err(cx.bug(format_args!(
+                    "Constructor splitting should not have returned `Wildcard`"
+                )));
+            }
+            // Wildcards cover anything
+            (_, Wildcard) => true,
+            // `PrivateUninhabited` skips everything.
+            (PrivateUninhabited, _) => true,
+            // Only a wildcard pattern can match these special constructors.
+            (Missing { .. } | NonExhaustive | Hidden, _) => false,
+
+            (Struct, Struct) => true,
+            (Ref, Ref) => true,
+            (UnionField, UnionField) => true,
+            (Variant(self_id), Variant(other_id)) => self_id == other_id,
+            (Bool(self_b), Bool(other_b)) => self_b == other_b,
+
+            (IntRange(self_range), IntRange(other_range)) => self_range.is_subrange(other_range),
+            (F16Range(self_from, self_to, self_end), F16Range(other_from, other_to, other_end)) => {
+                self_from.ge(other_from)
+                    && match self_to.partial_cmp(other_to) {
+                        Some(Ordering::Less) => true,
+                        Some(Ordering::Equal) => other_end == self_end,
+                        _ => false,
+                    }
+            }
+            (F32Range(self_from, self_to, self_end), F32Range(other_from, other_to, other_end)) => {
+                self_from.ge(other_from)
+                    && match self_to.partial_cmp(other_to) {
+                        Some(Ordering::Less) => true,
+                        Some(Ordering::Equal) => other_end == self_end,
+                        _ => false,
+                    }
+            }
+            (F64Range(self_from, self_to, self_end), F64Range(other_from, other_to, other_end)) => {
+                self_from.ge(other_from)
+                    && match self_to.partial_cmp(other_to) {
+                        Some(Ordering::Less) => true,
+                        Some(Ordering::Equal) => other_end == self_end,
+                        _ => false,
+                    }
+            }
+            (
+                F128Range(self_from, self_to, self_end),
+                F128Range(other_from, other_to, other_end),
+            ) => {
+                self_from.ge(other_from)
+                    && match self_to.partial_cmp(other_to) {
+                        Some(Ordering::Less) => true,
+                        Some(Ordering::Equal) => other_end == self_end,
+                        _ => false,
+                    }
+            }
+            (Str(self_val), Str(other_val)) => {
+                // FIXME Once valtrees are available we can directly use the bytes
+                // in the `Str` variant of the valtree for the comparison here.
+                self_val == other_val
+            }
+            (Slice(self_slice), Slice(other_slice)) => self_slice.is_covered_by(*other_slice),
+
+            // Opaque constructors don't interact with anything unless they come from the
+            // syntactically identical pattern.
+            (Opaque(self_id), Opaque(other_id)) => self_id == other_id,
+            (Opaque(..), _) | (_, Opaque(..)) => false,
+
+            _ => {
+                return Err(cx.bug(format_args!(
+                    "trying to compare incompatible constructors {self:?} and {other:?}"
+                )));
+            }
+        })
+    }
+
+    pub(crate) fn fmt_fields(
+        &self,
+        f: &mut fmt::Formatter<'_>,
+        ty: &Cx::Ty,
+        mut fields: impl Iterator<Item = impl fmt::Debug>,
+    ) -> fmt::Result {
+        let mut first = true;
+        let mut start_or_continue = |s| {
+            if first {
+                first = false;
+                ""
+            } else {
+                s
+            }
+        };
+        let mut start_or_comma = || start_or_continue(", ");
+
+        match self {
+            Struct | Variant(_) | UnionField => {
+                Cx::write_variant_name(f, self, ty)?;
+                // Without `cx`, we can't know which field corresponds to which, so we can't
+                // get the names of the fields. Instead we just display everything as a tuple
+                // struct, which should be good enough.
+                write!(f, "(")?;
+                for p in fields {
+                    write!(f, "{}{:?}", start_or_comma(), p)?;
+                }
+                write!(f, ")")?;
+            }
+            // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
+            // be careful to detect strings here. However a string literal pattern will never
+            // be reported as a non-exhaustiveness witness, so we can ignore this issue.
+            Ref => {
+                write!(f, "&{:?}", fields.next().unwrap())?;
+            }
+            Slice(slice) => {
+                write!(f, "[")?;
+                match slice.kind {
+                    SliceKind::FixedLen(_) => {
+                        for p in fields {
+                            write!(f, "{}{:?}", start_or_comma(), p)?;
+                        }
+                    }
+                    SliceKind::VarLen(prefix_len, _) => {
+                        for p in fields.by_ref().take(prefix_len) {
+                            write!(f, "{}{:?}", start_or_comma(), p)?;
+                        }
+                        write!(f, "{}..", start_or_comma())?;
+                        for p in fields {
+                            write!(f, "{}{:?}", start_or_comma(), p)?;
+                        }
+                    }
+                }
+                write!(f, "]")?;
+            }
+            Bool(b) => write!(f, "{b}")?,
+            // Best-effort, will render signed ranges incorrectly
+            IntRange(range) => write!(f, "{range:?}")?,
+            F16Range(lo, hi, end) => write!(f, "{lo}{end}{hi}")?,
+            F32Range(lo, hi, end) => write!(f, "{lo}{end}{hi}")?,
+            F64Range(lo, hi, end) => write!(f, "{lo}{end}{hi}")?,
+            F128Range(lo, hi, end) => write!(f, "{lo}{end}{hi}")?,
+            Str(value) => write!(f, "{value:?}")?,
+            Opaque(..) => write!(f, "<constant pattern>")?,
+            Or => {
+                for pat in fields {
+                    write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
+                }
+            }
+            Never => write!(f, "!")?,
+            Wildcard | Missing | NonExhaustive | Hidden | PrivateUninhabited => {
+                write!(f, "_ : {:?}", ty)?
+            }
+        }
+        Ok(())
+    }
+}
+
+#[derive(Debug, Clone, Copy)]
+pub enum VariantVisibility {
+    /// Variant that doesn't fit the other cases, i.e. most variants.
+    Visible,
+    /// Variant behind an unstable gate or with the `#[doc(hidden)]` attribute. It will not be
+    /// mentioned in diagnostics unless the user mentioned it first.
+    Hidden,
+    /// Variant that matches no value. E.g. `Some::<Option<!>>` if the `exhaustive_patterns` feature
+    /// is enabled. Like `Hidden`, it will not be mentioned in diagnostics unless the user mentioned
+    /// it first.
+    Empty,
+}
+
+/// Describes the set of all constructors for a type. For details, in particular about the emptiness
+/// of constructors, see the top of the file.
+///
+/// In terms of division of responsibility, [`ConstructorSet::split`] handles all of the
+/// `exhaustive_patterns` feature.
+#[derive(Debug)]
+pub enum ConstructorSet<Cx: PatCx> {
+    /// The type is a tuple or struct. `empty` tracks whether the type is empty.
+    Struct { empty: bool },
+    /// This type has the following list of constructors. If `variants` is empty and
+    /// `non_exhaustive` is false, don't use this; use `NoConstructors` instead.
+    Variants { variants: IndexVec<Cx::VariantIdx, VariantVisibility>, non_exhaustive: bool },
+    /// The type is `&T`.
+    Ref,
+    /// The type is a union.
+    Union,
+    /// Booleans.
+    Bool,
+    /// The type is spanned by integer values. The range or ranges give the set of allowed values.
+    /// The second range is only useful for `char`.
+    Integers { range_1: IntRange, range_2: Option<IntRange> },
+    /// The type is matched by slices. `array_len` is the compile-time length of the array, if
+    /// known. If `subtype_is_empty`, all constructors are empty except possibly the zero-length
+    /// slice `[]`.
+    Slice { array_len: Option<usize>, subtype_is_empty: bool },
+    /// The constructors cannot be listed, and the type cannot be matched exhaustively. E.g. `str`,
+    /// floats.
+    Unlistable,
+    /// The type has no constructors (not even empty ones). This is `!` and empty enums.
+    NoConstructors,
+}
+
+/// Describes the result of analyzing the constructors in a column of a match.
+///
+/// `present` is morally the set of constructors present in the column, and `missing` is the set of
+/// constructors that exist in the type but are not present in the column.
+///
+/// More formally, if we discard wildcards from the column, this respects the following constraints:
+/// 1. the union of `present`, `missing` and `missing_empty` covers all the constructors of the type
+/// 2. each constructor in `present` is covered by something in the column
+/// 3. no constructor in `missing` or `missing_empty` is covered by anything in the column
+/// 4. each constructor in the column is equal to the union of one or more constructors in `present`
+/// 5. `missing` does not contain empty constructors (see discussion about emptiness at the top of
+///    the file);
+/// 6. `missing_empty` contains only empty constructors
+/// 7. constructors in `present`, `missing` and `missing_empty` are split for the column; in other
+///    words, they are either fully included in or fully disjoint from each constructor in the
+///    column. In yet other words, there are no non-trivial intersections like between `0..10` and
+///    `5..15`.
+///
+/// We must be particularly careful with weird constructors like `Opaque`: they're not formally part
+/// of the `ConstructorSet` for the type, yet if we forgot to include them in `present` we would be
+/// ignoring any row with `Opaque`s in the algorithm. Hence the importance of point 4.
+#[derive(Debug)]
+pub struct SplitConstructorSet<Cx: PatCx> {
+    pub present: SmallVec<[Constructor<Cx>; 1]>,
+    pub missing: Vec<Constructor<Cx>>,
+    pub missing_empty: Vec<Constructor<Cx>>,
+}
+
+impl<Cx: PatCx> ConstructorSet<Cx> {
+    /// This analyzes a column of constructors to 1/ determine which constructors of the type (if
+    /// any) are missing; 2/ split constructors to handle non-trivial intersections e.g. on ranges
+    /// or slices. This can get subtle; see [`SplitConstructorSet`] for details of this operation
+    /// and its invariants.
+    pub fn split<'a>(
+        &self,
+        ctors: impl Iterator<Item = &'a Constructor<Cx>> + Clone,
+    ) -> SplitConstructorSet<Cx>
+    where
+        Cx: 'a,
+    {
+        let mut present: SmallVec<[_; 1]> = SmallVec::new();
+        // Empty constructors found missing.
+        let mut missing_empty = Vec::new();
+        // Nonempty constructors found missing.
+        let mut missing = Vec::new();
+        // Constructors in `ctors`, except wildcards and opaques.
+        let mut seen = Vec::new();
+        for ctor in ctors.cloned() {
+            match ctor {
+                Opaque(..) => present.push(ctor),
+                Wildcard => {} // discard wildcards
+                _ => seen.push(ctor),
+            }
+        }
+
+        match self {
+            ConstructorSet::Struct { empty } => {
+                if !seen.is_empty() {
+                    present.push(Struct);
+                } else if *empty {
+                    missing_empty.push(Struct);
+                } else {
+                    missing.push(Struct);
+                }
+            }
+            ConstructorSet::Ref => {
+                if !seen.is_empty() {
+                    present.push(Ref);
+                } else {
+                    missing.push(Ref);
+                }
+            }
+            ConstructorSet::Union => {
+                if !seen.is_empty() {
+                    present.push(UnionField);
+                } else {
+                    missing.push(UnionField);
+                }
+            }
+            ConstructorSet::Variants { variants, non_exhaustive } => {
+                let mut seen_set = BitSet::new_empty(variants.len());
+                for idx in seen.iter().filter_map(|c| c.as_variant()) {
+                    seen_set.insert(idx);
+                }
+                let mut skipped_a_hidden_variant = false;
+
+                for (idx, visibility) in variants.iter_enumerated() {
+                    let ctor = Variant(idx);
+                    if seen_set.contains(idx) {
+                        present.push(ctor);
+                    } else {
+                        // We only put visible variants directly into `missing`.
+                        match visibility {
+                            VariantVisibility::Visible => missing.push(ctor),
+                            VariantVisibility::Hidden => skipped_a_hidden_variant = true,
+                            VariantVisibility::Empty => missing_empty.push(ctor),
+                        }
+                    }
+                }
+
+                if skipped_a_hidden_variant {
+                    missing.push(Hidden);
+                }
+                if *non_exhaustive {
+                    missing.push(NonExhaustive);
+                }
+            }
+            ConstructorSet::Bool => {
+                let mut seen_false = false;
+                let mut seen_true = false;
+                for b in seen.iter().filter_map(|ctor| ctor.as_bool()) {
+                    if b {
+                        seen_true = true;
+                    } else {
+                        seen_false = true;
+                    }
+                }
+                if seen_false {
+                    present.push(Bool(false));
+                } else {
+                    missing.push(Bool(false));
+                }
+                if seen_true {
+                    present.push(Bool(true));
+                } else {
+                    missing.push(Bool(true));
+                }
+            }
+            ConstructorSet::Integers { range_1, range_2 } => {
+                let seen_ranges: Vec<_> =
+                    seen.iter().filter_map(|ctor| ctor.as_int_range()).copied().collect();
+                for (seen, splitted_range) in range_1.split(seen_ranges.iter().cloned()) {
+                    match seen {
+                        Presence::Unseen => missing.push(IntRange(splitted_range)),
+                        Presence::Seen => present.push(IntRange(splitted_range)),
+                    }
+                }
+                if let Some(range_2) = range_2 {
+                    for (seen, splitted_range) in range_2.split(seen_ranges.into_iter()) {
+                        match seen {
+                            Presence::Unseen => missing.push(IntRange(splitted_range)),
+                            Presence::Seen => present.push(IntRange(splitted_range)),
+                        }
+                    }
+                }
+            }
+            ConstructorSet::Slice { array_len, subtype_is_empty } => {
+                let seen_slices = seen.iter().filter_map(|c| c.as_slice());
+                let base_slice = Slice::new(*array_len, VarLen(0, 0));
+                for (seen, splitted_slice) in base_slice.split(seen_slices) {
+                    let ctor = Slice(splitted_slice);
+                    match seen {
+                        Presence::Seen => present.push(ctor),
+                        Presence::Unseen => {
+                            if *subtype_is_empty && splitted_slice.arity() != 0 {
+                                // We have subpatterns of an empty type, so the constructor is
+                                // empty.
+                                missing_empty.push(ctor);
+                            } else {
+                                missing.push(ctor);
+                            }
+                        }
+                    }
+                }
+            }
+            ConstructorSet::Unlistable => {
+                // Since we can't list constructors, we take the ones in the column. This might list
+                // some constructors several times but there's not much we can do.
+                present.extend(seen);
+                missing.push(NonExhaustive);
+            }
+            ConstructorSet::NoConstructors => {
+                // In a `MaybeInvalid` place even an empty pattern may be reachable. We therefore
+                // add a dummy empty constructor here, which will be ignored if the place is
+                // `ValidOnly`.
+                missing_empty.push(Never);
+            }
+        }
+
+        SplitConstructorSet { present, missing, missing_empty }
+    }
+
+    /// Whether this set only contains empty constructors.
+    pub(crate) fn all_empty(&self) -> bool {
+        match self {
+            ConstructorSet::Bool
+            | ConstructorSet::Integers { .. }
+            | ConstructorSet::Ref
+            | ConstructorSet::Union
+            | ConstructorSet::Unlistable => false,
+            ConstructorSet::NoConstructors => true,
+            ConstructorSet::Struct { empty } => *empty,
+            ConstructorSet::Variants { variants, non_exhaustive } => {
+                !*non_exhaustive
+                    && variants
+                        .iter()
+                        .all(|visibility| matches!(visibility, VariantVisibility::Empty))
+            }
+            ConstructorSet::Slice { array_len, subtype_is_empty } => {
+                *subtype_is_empty && matches!(array_len, Some(1..))
+            }
+        }
+    }
+}
diff --git a/compiler/rustc_pattern_analysis/src/errors.rs b/compiler/rustc_pattern_analysis/src/errors.rs
new file mode 100644
index 00000000000..1f7852e5190
--- /dev/null
+++ b/compiler/rustc_pattern_analysis/src/errors.rs
@@ -0,0 +1,143 @@
+use rustc_errors::{Diag, EmissionGuarantee, SubdiagMessageOp, Subdiagnostic};
+use rustc_macros::{LintDiagnostic, Subdiagnostic};
+use rustc_middle::ty::Ty;
+use rustc_span::Span;
+
+use crate::rustc::{RustcPatCtxt, WitnessPat};
+
+#[derive(Subdiagnostic)]
+#[label(pattern_analysis_uncovered)]
+pub struct Uncovered {
+    #[primary_span]
+    span: Span,
+    count: usize,
+    witness_1: String, // a printed pattern
+    witness_2: String, // a printed pattern
+    witness_3: String, // a printed pattern
+    remainder: usize,
+}
+
+impl Uncovered {
+    pub fn new<'p, 'tcx>(
+        span: Span,
+        cx: &RustcPatCtxt<'p, 'tcx>,
+        witnesses: Vec<WitnessPat<'p, 'tcx>>,
+    ) -> Self
+    where
+        'tcx: 'p,
+    {
+        let witness_1 = cx.print_witness_pat(witnesses.get(0).unwrap());
+        Self {
+            span,
+            count: witnesses.len(),
+            // Substitute dummy values if witnesses is smaller than 3. These will never be read.
+            witness_2: witnesses.get(1).map(|w| cx.print_witness_pat(w)).unwrap_or_default(),
+            witness_3: witnesses.get(2).map(|w| cx.print_witness_pat(w)).unwrap_or_default(),
+            witness_1,
+            remainder: witnesses.len().saturating_sub(3),
+        }
+    }
+}
+
+#[derive(LintDiagnostic)]
+#[diag(pattern_analysis_overlapping_range_endpoints)]
+#[note]
+pub struct OverlappingRangeEndpoints {
+    #[label]
+    pub range: Span,
+    #[subdiagnostic]
+    pub overlap: Vec<Overlap>,
+}
+
+pub struct Overlap {
+    pub span: Span,
+    pub range: String, // a printed pattern
+}
+
+impl Subdiagnostic for Overlap {
+    fn add_to_diag_with<G: EmissionGuarantee, F: SubdiagMessageOp<G>>(
+        self,
+        diag: &mut Diag<'_, G>,
+        _: &F,
+    ) {
+        let Overlap { span, range } = self;
+
+        // FIXME(mejrs) unfortunately `#[derive(LintDiagnostic)]`
+        // does not support `#[subdiagnostic(eager)]`...
+        let message = format!("this range overlaps on `{range}`...");
+        diag.span_label(span, message);
+    }
+}
+
+#[derive(LintDiagnostic)]
+#[diag(pattern_analysis_excluside_range_missing_max)]
+pub struct ExclusiveRangeMissingMax {
+    #[label]
+    #[suggestion(code = "{suggestion}", applicability = "maybe-incorrect")]
+    /// This is an exclusive range that looks like `lo..max` (i.e. doesn't match `max`).
+    pub first_range: Span,
+    /// Suggest `lo..=max` instead.
+    pub suggestion: String,
+    pub max: String, // a printed pattern
+}
+
+#[derive(LintDiagnostic)]
+#[diag(pattern_analysis_excluside_range_missing_gap)]
+pub struct ExclusiveRangeMissingGap {
+    #[label]
+    #[suggestion(code = "{suggestion}", applicability = "maybe-incorrect")]
+    /// This is an exclusive range that looks like `lo..gap` (i.e. doesn't match `gap`).
+    pub first_range: Span,
+    pub gap: String, // a printed pattern
+    /// Suggest `lo..=gap` instead.
+    pub suggestion: String,
+    #[subdiagnostic]
+    /// All these ranges skipped over `gap` which we think is probably a mistake.
+    pub gap_with: Vec<GappedRange>,
+}
+
+pub struct GappedRange {
+    pub span: Span,
+    pub gap: String,         // a printed pattern
+    pub first_range: String, // a printed pattern
+}
+
+impl Subdiagnostic for GappedRange {
+    fn add_to_diag_with<G: EmissionGuarantee, F: SubdiagMessageOp<G>>(
+        self,
+        diag: &mut Diag<'_, G>,
+        _: &F,
+    ) {
+        let GappedRange { span, gap, first_range } = self;
+
+        // FIXME(mejrs) unfortunately `#[derive(LintDiagnostic)]`
+        // does not support `#[subdiagnostic(eager)]`...
+        let message = format!(
+            "this could appear to continue range `{first_range}`, but `{gap}` isn't matched by \
+            either of them"
+        );
+        diag.span_label(span, message);
+    }
+}
+
+#[derive(LintDiagnostic)]
+#[diag(pattern_analysis_non_exhaustive_omitted_pattern)]
+#[help]
+#[note]
+pub(crate) struct NonExhaustiveOmittedPattern<'tcx> {
+    pub scrut_ty: Ty<'tcx>,
+    #[subdiagnostic]
+    pub uncovered: Uncovered,
+}
+
+#[derive(LintDiagnostic)]
+#[diag(pattern_analysis_non_exhaustive_omitted_pattern_lint_on_arm)]
+#[help]
+pub(crate) struct NonExhaustiveOmittedPatternLintOnArm {
+    #[label]
+    pub lint_span: Span,
+    #[suggestion(code = "#[{lint_level}({lint_name})]\n", applicability = "maybe-incorrect")]
+    pub suggest_lint_on_match: Option<Span>,
+    pub lint_level: &'static str,
+    pub lint_name: &'static str,
+}
diff --git a/compiler/rustc_pattern_analysis/src/lib.rs b/compiler/rustc_pattern_analysis/src/lib.rs
new file mode 100644
index 00000000000..a5c0b13c90b
--- /dev/null
+++ b/compiler/rustc_pattern_analysis/src/lib.rs
@@ -0,0 +1,126 @@
+//! Analysis of patterns, notably match exhaustiveness checking. The main entrypoint for this crate
+//! is [`usefulness::compute_match_usefulness`]. For rustc-specific types and entrypoints, see the
+//! [`rustc`] module.
+
+// tidy-alphabetical-start
+#![allow(rustc::diagnostic_outside_of_impl)]
+#![allow(rustc::untranslatable_diagnostic)]
+// tidy-alphabetical-end
+
+pub mod constructor;
+#[cfg(feature = "rustc")]
+pub mod errors;
+#[cfg(feature = "rustc")]
+pub(crate) mod lints;
+pub mod pat;
+pub mod pat_column;
+#[cfg(feature = "rustc")]
+pub mod rustc;
+pub mod usefulness;
+
+#[cfg(feature = "rustc")]
+rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
+
+use std::fmt;
+
+pub use rustc_index::{Idx, IndexVec}; // re-exported to avoid rustc_index version issues
+
+use crate::constructor::{Constructor, ConstructorSet, IntRange};
+use crate::pat::DeconstructedPat;
+
+pub trait Captures<'a> {}
+impl<'a, T: ?Sized> Captures<'a> for T {}
+
+/// `bool` newtype that indicates whether this is a privately uninhabited field that we should skip
+/// during analysis.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct PrivateUninhabitedField(pub bool);
+
+/// Context that provides type information about constructors.
+///
+/// Most of the crate is parameterized on a type that implements this trait.
+pub trait PatCx: Sized + fmt::Debug {
+    /// The type of a pattern.
+    type Ty: Clone + fmt::Debug;
+    /// Errors that can abort analysis.
+    type Error: fmt::Debug;
+    /// The index of an enum variant.
+    type VariantIdx: Clone + Idx + fmt::Debug;
+    /// A string literal
+    type StrLit: Clone + PartialEq + fmt::Debug;
+    /// Extra data to store in a match arm.
+    type ArmData: Copy + Clone + fmt::Debug;
+    /// Extra data to store in a pattern.
+    type PatData: Clone;
+
+    fn is_exhaustive_patterns_feature_on(&self) -> bool;
+    fn is_min_exhaustive_patterns_feature_on(&self) -> bool;
+
+    /// The number of fields for this constructor.
+    fn ctor_arity(&self, ctor: &Constructor<Self>, ty: &Self::Ty) -> usize;
+
+    /// The types of the fields for this constructor. The result must contain `ctor_arity()` fields.
+    fn ctor_sub_tys<'a>(
+        &'a self,
+        ctor: &'a Constructor<Self>,
+        ty: &'a Self::Ty,
+    ) -> impl Iterator<Item = (Self::Ty, PrivateUninhabitedField)> + ExactSizeIterator + Captures<'a>;
+
+    /// The set of all the constructors for `ty`.
+    ///
+    /// This must follow the invariants of `ConstructorSet`
+    fn ctors_for_ty(&self, ty: &Self::Ty) -> Result<ConstructorSet<Self>, Self::Error>;
+
+    /// Write the name of the variant represented by `pat`. Used for the best-effort `Debug` impl of
+    /// `DeconstructedPat`. Only invoqued when `pat.ctor()` is `Struct | Variant(_) | UnionField`.
+    fn write_variant_name(
+        f: &mut fmt::Formatter<'_>,
+        ctor: &crate::constructor::Constructor<Self>,
+        ty: &Self::Ty,
+    ) -> fmt::Result;
+
+    /// Raise a bug.
+    fn bug(&self, fmt: fmt::Arguments<'_>) -> Self::Error;
+
+    /// Lint that the range `pat` overlapped with all the ranges in `overlaps_with`, where the range
+    /// they overlapped over is `overlaps_on`. We only detect singleton overlaps.
+    /// The default implementation does nothing.
+    fn lint_overlapping_range_endpoints(
+        &self,
+        _pat: &DeconstructedPat<Self>,
+        _overlaps_on: IntRange,
+        _overlaps_with: &[&DeconstructedPat<Self>],
+    ) {
+    }
+
+    /// The maximum pattern complexity limit was reached.
+    fn complexity_exceeded(&self) -> Result<(), Self::Error>;
+
+    /// Lint that there is a gap `gap` between `pat` and all of `gapped_with` such that the gap is
+    /// not matched by another range. If `gapped_with` is empty, then `gap` is `T::MAX`. We only
+    /// detect singleton gaps.
+    /// The default implementation does nothing.
+    fn lint_non_contiguous_range_endpoints(
+        &self,
+        _pat: &DeconstructedPat<Self>,
+        _gap: IntRange,
+        _gapped_with: &[&DeconstructedPat<Self>],
+    ) {
+    }
+}
+
+/// The arm of a match expression.
+#[derive(Debug)]
+pub struct MatchArm<'p, Cx: PatCx> {
+    pub pat: &'p DeconstructedPat<Cx>,
+    pub has_guard: bool,
+    pub arm_data: Cx::ArmData,
+}
+
+impl<'p, Cx: PatCx> Clone for MatchArm<'p, Cx> {
+    fn clone(&self) -> Self {
+        Self { pat: self.pat, has_guard: self.has_guard, arm_data: self.arm_data }
+    }
+}
+
+impl<'p, Cx: PatCx> Copy for MatchArm<'p, Cx> {}
diff --git a/compiler/rustc_pattern_analysis/src/lints.rs b/compiler/rustc_pattern_analysis/src/lints.rs
new file mode 100644
index 00000000000..6bcef0ec879
--- /dev/null
+++ b/compiler/rustc_pattern_analysis/src/lints.rs
@@ -0,0 +1,109 @@
+use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS;
+use rustc_span::ErrorGuaranteed;
+use tracing::instrument;
+
+use crate::constructor::Constructor;
+use crate::errors::{NonExhaustiveOmittedPattern, NonExhaustiveOmittedPatternLintOnArm, Uncovered};
+use crate::pat_column::PatternColumn;
+use crate::rustc::{RevealedTy, RustcPatCtxt, WitnessPat};
+use crate::MatchArm;
+
+/// Traverse the patterns to collect any variants of a non_exhaustive enum that fail to be mentioned
+/// in a given column.
+#[instrument(level = "debug", skip(cx), ret)]
+fn collect_nonexhaustive_missing_variants<'p, 'tcx>(
+    cx: &RustcPatCtxt<'p, 'tcx>,
+    column: &PatternColumn<'p, RustcPatCtxt<'p, 'tcx>>,
+) -> Result<Vec<WitnessPat<'p, 'tcx>>, ErrorGuaranteed> {
+    let Some(&ty) = column.head_ty() else {
+        return Ok(Vec::new());
+    };
+
+    let set = column.analyze_ctors(cx, &ty)?;
+    if set.present.is_empty() {
+        // We can't consistently handle the case where no constructors are present (since this would
+        // require digging deep through any type in case there's a non_exhaustive enum somewhere),
+        // so for consistency we refuse to handle the top-level case, where we could handle it.
+        return Ok(Vec::new());
+    }
+
+    let mut witnesses = Vec::new();
+    if cx.is_foreign_non_exhaustive_enum(ty) {
+        witnesses.extend(
+            set.missing
+                .into_iter()
+                // This will list missing visible variants.
+                .filter(|c| !matches!(c, Constructor::Hidden | Constructor::NonExhaustive))
+                .map(|missing_ctor| WitnessPat::wild_from_ctor(cx, missing_ctor, ty)),
+        )
+    }
+
+    // Recurse into the fields.
+    for ctor in set.present {
+        let specialized_columns = column.specialize(cx, &ty, &ctor);
+        let wild_pat = WitnessPat::wild_from_ctor(cx, ctor, ty);
+        for (i, col_i) in specialized_columns.iter().enumerate() {
+            // Compute witnesses for each column.
+            let wits_for_col_i = collect_nonexhaustive_missing_variants(cx, col_i)?;
+            // For each witness, we build a new pattern in the shape of `ctor(_, _, wit, _, _)`,
+            // adding enough wildcards to match `arity`.
+            for wit in wits_for_col_i {
+                let mut pat = wild_pat.clone();
+                pat.fields[i] = wit;
+                witnesses.push(pat);
+            }
+        }
+    }
+    Ok(witnesses)
+}
+
+pub(crate) fn lint_nonexhaustive_missing_variants<'p, 'tcx>(
+    rcx: &RustcPatCtxt<'p, 'tcx>,
+    arms: &[MatchArm<'p, RustcPatCtxt<'p, 'tcx>>],
+    pat_column: &PatternColumn<'p, RustcPatCtxt<'p, 'tcx>>,
+    scrut_ty: RevealedTy<'tcx>,
+) -> Result<(), ErrorGuaranteed> {
+    if !matches!(
+        rcx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, rcx.match_lint_level).0,
+        rustc_session::lint::Level::Allow
+    ) {
+        let witnesses = collect_nonexhaustive_missing_variants(rcx, pat_column)?;
+        if !witnesses.is_empty() {
+            // Report that a match of a `non_exhaustive` enum marked with `non_exhaustive_omitted_patterns`
+            // is not exhaustive enough.
+            //
+            // NB: The partner lint for structs lives in `compiler/rustc_hir_analysis/src/check/pat.rs`.
+            rcx.tcx.emit_node_span_lint(
+                NON_EXHAUSTIVE_OMITTED_PATTERNS,
+                rcx.match_lint_level,
+                rcx.scrut_span,
+                NonExhaustiveOmittedPattern {
+                    scrut_ty: scrut_ty.inner(),
+                    uncovered: Uncovered::new(rcx.scrut_span, rcx, witnesses),
+                },
+            );
+        }
+    } else {
+        // We used to allow putting the `#[allow(non_exhaustive_omitted_patterns)]` on a match
+        // arm. This no longer makes sense so we warn users, to avoid silently breaking their
+        // usage of the lint.
+        for arm in arms {
+            let (lint_level, lint_level_source) =
+                rcx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, arm.arm_data);
+            if !matches!(lint_level, rustc_session::lint::Level::Allow) {
+                let decorator = NonExhaustiveOmittedPatternLintOnArm {
+                    lint_span: lint_level_source.span(),
+                    suggest_lint_on_match: rcx.whole_match_span.map(|span| span.shrink_to_lo()),
+                    lint_level: lint_level.as_str(),
+                    lint_name: "non_exhaustive_omitted_patterns",
+                };
+
+                use rustc_errors::LintDiagnostic;
+                let mut err = rcx.tcx.dcx().struct_span_warn(arm.pat.data().span, "");
+                decorator.decorate_lint(&mut err);
+                err.emit();
+            }
+        }
+    }
+    Ok(())
+}
diff --git a/compiler/rustc_pattern_analysis/src/pat.rs b/compiler/rustc_pattern_analysis/src/pat.rs
new file mode 100644
index 00000000000..d91deab160c
--- /dev/null
+++ b/compiler/rustc_pattern_analysis/src/pat.rs
@@ -0,0 +1,317 @@
+//! As explained in [`crate::usefulness`], values and patterns are made from constructors applied to
+//! fields. This file defines types that represent patterns in this way.
+
+use std::fmt;
+
+use smallvec::{smallvec, SmallVec};
+
+use self::Constructor::*;
+use crate::constructor::{Constructor, Slice, SliceKind};
+use crate::{PatCx, PrivateUninhabitedField};
+
+/// A globally unique id to distinguish patterns.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub(crate) struct PatId(u32);
+impl PatId {
+    fn new() -> Self {
+        use std::sync::atomic::{AtomicU32, Ordering};
+        static PAT_ID: AtomicU32 = AtomicU32::new(0);
+        PatId(PAT_ID.fetch_add(1, Ordering::SeqCst))
+    }
+}
+
+/// A pattern with an index denoting which field it corresponds to.
+pub struct IndexedPat<Cx: PatCx> {
+    pub idx: usize,
+    pub pat: DeconstructedPat<Cx>,
+}
+
+/// Values and patterns can be represented as a constructor applied to some fields. This represents
+/// a pattern in this form. A `DeconstructedPat` will almost always come from user input; the only
+/// exception are some `Wildcard`s introduced during pattern lowering.
+pub struct DeconstructedPat<Cx: PatCx> {
+    ctor: Constructor<Cx>,
+    fields: Vec<IndexedPat<Cx>>,
+    /// The number of fields in this pattern. E.g. if the pattern is `SomeStruct { field12: true, ..
+    /// }` this would be the total number of fields of the struct.
+    /// This is also the same as `self.ctor.arity(self.ty)`.
+    arity: usize,
+    ty: Cx::Ty,
+    /// Extra data to store in a pattern.
+    data: Cx::PatData,
+    /// Globally-unique id used to track usefulness at the level of subpatterns.
+    pub(crate) uid: PatId,
+}
+
+impl<Cx: PatCx> DeconstructedPat<Cx> {
+    pub fn new(
+        ctor: Constructor<Cx>,
+        fields: Vec<IndexedPat<Cx>>,
+        arity: usize,
+        ty: Cx::Ty,
+        data: Cx::PatData,
+    ) -> Self {
+        DeconstructedPat { ctor, fields, arity, ty, data, uid: PatId::new() }
+    }
+
+    pub fn at_index(self, idx: usize) -> IndexedPat<Cx> {
+        IndexedPat { idx, pat: self }
+    }
+
+    pub(crate) fn is_or_pat(&self) -> bool {
+        matches!(self.ctor, Or)
+    }
+
+    pub fn ctor(&self) -> &Constructor<Cx> {
+        &self.ctor
+    }
+    pub fn ty(&self) -> &Cx::Ty {
+        &self.ty
+    }
+    /// Returns the extra data stored in a pattern.
+    pub fn data(&self) -> &Cx::PatData {
+        &self.data
+    }
+    pub fn arity(&self) -> usize {
+        self.arity
+    }
+
+    pub fn iter_fields<'a>(&'a self) -> impl Iterator<Item = &'a IndexedPat<Cx>> {
+        self.fields.iter()
+    }
+
+    /// Specialize this pattern with a constructor.
+    /// `other_ctor` can be different from `self.ctor`, but must be covered by it.
+    pub(crate) fn specialize<'a>(
+        &'a self,
+        other_ctor: &Constructor<Cx>,
+        other_ctor_arity: usize,
+    ) -> SmallVec<[PatOrWild<'a, Cx>; 2]> {
+        if matches!(other_ctor, PrivateUninhabited) {
+            // Skip this column.
+            return smallvec![];
+        }
+
+        // Start with a slice of wildcards of the appropriate length.
+        let mut fields: SmallVec<[_; 2]> = (0..other_ctor_arity).map(|_| PatOrWild::Wild).collect();
+        // Fill `fields` with our fields. The arities are known to be compatible.
+        match self.ctor {
+            // The only non-trivial case: two slices of different arity. `other_ctor` is guaranteed
+            // to have a larger arity, so we adjust the indices of the patterns in the suffix so
+            // that they are correctly positioned in the larger slice.
+            Slice(Slice { kind: SliceKind::VarLen(prefix, _), .. })
+                if self.arity != other_ctor_arity =>
+            {
+                for ipat in &self.fields {
+                    let new_idx = if ipat.idx < prefix {
+                        ipat.idx
+                    } else {
+                        // Adjust the indices in the suffix.
+                        ipat.idx + other_ctor_arity - self.arity
+                    };
+                    fields[new_idx] = PatOrWild::Pat(&ipat.pat);
+                }
+            }
+            _ => {
+                for ipat in &self.fields {
+                    fields[ipat.idx] = PatOrWild::Pat(&ipat.pat);
+                }
+            }
+        }
+        fields
+    }
+
+    /// Walk top-down and call `it` in each place where a pattern occurs
+    /// starting with the root pattern `walk` is called on. If `it` returns
+    /// false then we will descend no further but siblings will be processed.
+    pub fn walk<'a>(&'a self, it: &mut impl FnMut(&'a Self) -> bool) {
+        if !it(self) {
+            return;
+        }
+
+        for p in self.iter_fields() {
+            p.pat.walk(it)
+        }
+    }
+}
+
+/// This is best effort and not good enough for a `Display` impl.
+impl<Cx: PatCx> fmt::Debug for DeconstructedPat<Cx> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        let mut fields: Vec<_> = (0..self.arity).map(|_| PatOrWild::Wild).collect();
+        for ipat in self.iter_fields() {
+            fields[ipat.idx] = PatOrWild::Pat(&ipat.pat);
+        }
+        self.ctor().fmt_fields(f, self.ty(), fields.into_iter())
+    }
+}
+
+/// Delegate to `uid`.
+impl<Cx: PatCx> PartialEq for DeconstructedPat<Cx> {
+    fn eq(&self, other: &Self) -> bool {
+        self.uid == other.uid
+    }
+}
+/// Delegate to `uid`.
+impl<Cx: PatCx> Eq for DeconstructedPat<Cx> {}
+/// Delegate to `uid`.
+impl<Cx: PatCx> std::hash::Hash for DeconstructedPat<Cx> {
+    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+        self.uid.hash(state);
+    }
+}
+
+/// Represents either a pattern obtained from user input or a wildcard constructed during the
+/// algorithm. Do not use `Wild` to represent a wildcard pattern comping from user input.
+///
+/// This is morally `Option<&'p DeconstructedPat>` where `None` is interpreted as a wildcard.
+pub(crate) enum PatOrWild<'p, Cx: PatCx> {
+    /// A non-user-provided wildcard, created during specialization.
+    Wild,
+    /// A user-provided pattern.
+    Pat(&'p DeconstructedPat<Cx>),
+}
+
+impl<'p, Cx: PatCx> Clone for PatOrWild<'p, Cx> {
+    fn clone(&self) -> Self {
+        match self {
+            PatOrWild::Wild => PatOrWild::Wild,
+            PatOrWild::Pat(pat) => PatOrWild::Pat(pat),
+        }
+    }
+}
+
+impl<'p, Cx: PatCx> Copy for PatOrWild<'p, Cx> {}
+
+impl<'p, Cx: PatCx> PatOrWild<'p, Cx> {
+    pub(crate) fn as_pat(&self) -> Option<&'p DeconstructedPat<Cx>> {
+        match self {
+            PatOrWild::Wild => None,
+            PatOrWild::Pat(pat) => Some(pat),
+        }
+    }
+    pub(crate) fn ctor(self) -> &'p Constructor<Cx> {
+        match self {
+            PatOrWild::Wild => &Wildcard,
+            PatOrWild::Pat(pat) => pat.ctor(),
+        }
+    }
+
+    pub(crate) fn is_or_pat(&self) -> bool {
+        match self {
+            PatOrWild::Wild => false,
+            PatOrWild::Pat(pat) => pat.is_or_pat(),
+        }
+    }
+
+    /// Expand this or-pattern into its alternatives. This only expands one or-pattern; use
+    /// `flatten_or_pat` to recursively expand nested or-patterns.
+    pub(crate) fn expand_or_pat(self) -> SmallVec<[Self; 1]> {
+        match self {
+            PatOrWild::Pat(pat) if pat.is_or_pat() => {
+                pat.iter_fields().map(|ipat| PatOrWild::Pat(&ipat.pat)).collect()
+            }
+            _ => smallvec![self],
+        }
+    }
+
+    /// Recursively expand this (possibly-nested) or-pattern into its alternatives.
+    pub(crate) fn flatten_or_pat(self) -> SmallVec<[Self; 1]> {
+        match self {
+            PatOrWild::Pat(pat) if pat.is_or_pat() => pat
+                .iter_fields()
+                .flat_map(|ipat| PatOrWild::Pat(&ipat.pat).flatten_or_pat())
+                .collect(),
+            _ => smallvec![self],
+        }
+    }
+
+    /// Specialize this pattern with a constructor.
+    /// `other_ctor` can be different from `self.ctor`, but must be covered by it.
+    pub(crate) fn specialize(
+        &self,
+        other_ctor: &Constructor<Cx>,
+        ctor_arity: usize,
+    ) -> SmallVec<[PatOrWild<'p, Cx>; 2]> {
+        match self {
+            PatOrWild::Wild => (0..ctor_arity).map(|_| PatOrWild::Wild).collect(),
+            PatOrWild::Pat(pat) => pat.specialize(other_ctor, ctor_arity),
+        }
+    }
+}
+
+impl<'p, Cx: PatCx> fmt::Debug for PatOrWild<'p, Cx> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match self {
+            PatOrWild::Wild => write!(f, "_"),
+            PatOrWild::Pat(pat) => pat.fmt(f),
+        }
+    }
+}
+
+/// Same idea as `DeconstructedPat`, except this is a fictitious pattern built up for diagnostics
+/// purposes. As such they don't use interning and can be cloned.
+pub struct WitnessPat<Cx: PatCx> {
+    ctor: Constructor<Cx>,
+    pub(crate) fields: Vec<WitnessPat<Cx>>,
+    ty: Cx::Ty,
+}
+
+impl<Cx: PatCx> Clone for WitnessPat<Cx> {
+    fn clone(&self) -> Self {
+        Self { ctor: self.ctor.clone(), fields: self.fields.clone(), ty: self.ty.clone() }
+    }
+}
+
+impl<Cx: PatCx> WitnessPat<Cx> {
+    pub(crate) fn new(ctor: Constructor<Cx>, fields: Vec<Self>, ty: Cx::Ty) -> Self {
+        Self { ctor, fields, ty }
+    }
+    /// Create a wildcard pattern for this type. If the type is empty, we create a `!` pattern.
+    pub(crate) fn wildcard(cx: &Cx, ty: Cx::Ty) -> Self {
+        let is_empty = cx.ctors_for_ty(&ty).is_ok_and(|ctors| ctors.all_empty());
+        let ctor = if is_empty { Never } else { Wildcard };
+        Self::new(ctor, Vec::new(), ty)
+    }
+
+    /// Construct a pattern that matches everything that starts with this constructor.
+    /// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern
+    /// `Some(_)`.
+    pub(crate) fn wild_from_ctor(cx: &Cx, ctor: Constructor<Cx>, ty: Cx::Ty) -> Self {
+        if matches!(ctor, Wildcard) {
+            return Self::wildcard(cx, ty);
+        }
+        let fields = cx
+            .ctor_sub_tys(&ctor, &ty)
+            .filter(|(_, PrivateUninhabitedField(skip))| !skip)
+            .map(|(ty, _)| Self::wildcard(cx, ty))
+            .collect();
+        Self::new(ctor, fields, ty)
+    }
+
+    pub fn ctor(&self) -> &Constructor<Cx> {
+        &self.ctor
+    }
+    pub fn ty(&self) -> &Cx::Ty {
+        &self.ty
+    }
+
+    pub fn is_never_pattern(&self) -> bool {
+        match self.ctor() {
+            Never => true,
+            Or => self.fields.iter().all(|p| p.is_never_pattern()),
+            _ => self.fields.iter().any(|p| p.is_never_pattern()),
+        }
+    }
+
+    pub fn iter_fields(&self) -> impl Iterator<Item = &WitnessPat<Cx>> {
+        self.fields.iter()
+    }
+}
+
+/// This is best effort and not good enough for a `Display` impl.
+impl<Cx: PatCx> fmt::Debug for WitnessPat<Cx> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.ctor().fmt_fields(f, self.ty(), self.fields.iter())
+    }
+}
diff --git a/compiler/rustc_pattern_analysis/src/pat_column.rs b/compiler/rustc_pattern_analysis/src/pat_column.rs
new file mode 100644
index 00000000000..eb4e095c1c6
--- /dev/null
+++ b/compiler/rustc_pattern_analysis/src/pat_column.rs
@@ -0,0 +1,90 @@
+use crate::constructor::{Constructor, SplitConstructorSet};
+use crate::pat::{DeconstructedPat, PatOrWild};
+use crate::{Captures, MatchArm, PatCx};
+
+/// A column of patterns in a match, where a column is the intuitive notion of "subpatterns that
+/// inspect the same subvalue/place".
+/// This is used to traverse patterns column-by-column for lints. Despite similarities with the
+/// algorithm in [`crate::usefulness`], this does a different traversal. Notably this is linear in
+/// the depth of patterns, whereas `compute_exhaustiveness_and_usefulness` is worst-case exponential
+/// (exhaustiveness is NP-complete). The core difference is that we treat sub-columns separately.
+///
+/// This is not used in the usefulness algorithm; only in lints.
+#[derive(Debug)]
+pub struct PatternColumn<'p, Cx: PatCx> {
+    /// This must not contain an or-pattern. `expand_and_push` takes care to expand them.
+    patterns: Vec<&'p DeconstructedPat<Cx>>,
+}
+
+impl<'p, Cx: PatCx> PatternColumn<'p, Cx> {
+    pub fn new(arms: &[MatchArm<'p, Cx>]) -> Self {
+        let patterns = Vec::with_capacity(arms.len());
+        let mut column = PatternColumn { patterns };
+        for arm in arms {
+            column.expand_and_push(PatOrWild::Pat(arm.pat));
+        }
+        column
+    }
+    /// Pushes a pattern onto the column, expanding any or-patterns into its subpatterns.
+    /// Internal method, prefer [`PatternColumn::new`].
+    fn expand_and_push(&mut self, pat: PatOrWild<'p, Cx>) {
+        // We flatten or-patterns and skip algorithm-generated wildcards.
+        if pat.is_or_pat() {
+            self.patterns.extend(
+                pat.flatten_or_pat().into_iter().filter_map(|pat_or_wild| pat_or_wild.as_pat()),
+            )
+        } else if let Some(pat) = pat.as_pat() {
+            self.patterns.push(pat)
+        }
+    }
+
+    pub fn head_ty(&self) -> Option<&Cx::Ty> {
+        self.patterns.first().map(|pat| pat.ty())
+    }
+    pub fn iter<'a>(&'a self) -> impl Iterator<Item = &'p DeconstructedPat<Cx>> + Captures<'a> {
+        self.patterns.iter().copied()
+    }
+
+    /// Do constructor splitting on the constructors of the column.
+    pub fn analyze_ctors(
+        &self,
+        cx: &Cx,
+        ty: &Cx::Ty,
+    ) -> Result<SplitConstructorSet<Cx>, Cx::Error> {
+        let column_ctors = self.patterns.iter().map(|p| p.ctor());
+        let ctors_for_ty = cx.ctors_for_ty(ty)?;
+        Ok(ctors_for_ty.split(column_ctors))
+    }
+
+    /// Does specialization: given a constructor, this takes the patterns from the column that match
+    /// the constructor, and outputs their fields.
+    /// This returns one column per field of the constructor. They usually all have the same length
+    /// (the number of patterns in `self` that matched `ctor`), except that we expand or-patterns
+    /// which may change the lengths.
+    pub fn specialize(
+        &self,
+        cx: &Cx,
+        ty: &Cx::Ty,
+        ctor: &Constructor<Cx>,
+    ) -> Vec<PatternColumn<'p, Cx>> {
+        let arity = ctor.arity(cx, ty);
+        if arity == 0 {
+            return Vec::new();
+        }
+
+        // We specialize the column by `ctor`. This gives us `arity`-many columns of patterns. These
+        // columns may have different lengths in the presence of or-patterns (this is why we can't
+        // reuse `Matrix`).
+        let mut specialized_columns: Vec<_> =
+            (0..arity).map(|_| Self { patterns: Vec::new() }).collect();
+        let relevant_patterns =
+            self.patterns.iter().filter(|pat| ctor.is_covered_by(cx, pat.ctor()).unwrap_or(false));
+        for pat in relevant_patterns {
+            let specialized = pat.specialize(ctor, arity);
+            for (subpat, column) in specialized.into_iter().zip(&mut specialized_columns) {
+                column.expand_and_push(subpat);
+            }
+        }
+        specialized_columns
+    }
+}
diff --git a/compiler/rustc_pattern_analysis/src/rustc.rs b/compiler/rustc_pattern_analysis/src/rustc.rs
new file mode 100644
index 00000000000..6290aeb2523
--- /dev/null
+++ b/compiler/rustc_pattern_analysis/src/rustc.rs
@@ -0,0 +1,1098 @@
+use std::fmt;
+use std::iter::once;
+
+use rustc_arena::DroplessArena;
+use rustc_hir::def_id::DefId;
+use rustc_hir::HirId;
+use rustc_index::{Idx, IndexVec};
+use rustc_middle::middle::stability::EvalResult;
+use rustc_middle::mir::{self, Const};
+use rustc_middle::thir::{self, Pat, PatKind, PatRange, PatRangeBoundary};
+use rustc_middle::ty::layout::IntegerExt;
+use rustc_middle::ty::{
+    self, FieldDef, OpaqueTypeKey, ScalarInt, Ty, TyCtxt, TypeVisitableExt, VariantDef,
+};
+use rustc_middle::{bug, span_bug};
+use rustc_session::lint;
+use rustc_span::{ErrorGuaranteed, Span, DUMMY_SP};
+use rustc_target::abi::{FieldIdx, Integer, VariantIdx, FIRST_VARIANT};
+
+use crate::constructor::Constructor::*;
+use crate::constructor::{
+    IntRange, MaybeInfiniteInt, OpaqueId, RangeEnd, Slice, SliceKind, VariantVisibility,
+};
+use crate::lints::lint_nonexhaustive_missing_variants;
+use crate::pat_column::PatternColumn;
+use crate::usefulness::{compute_match_usefulness, PlaceValidity};
+use crate::{errors, Captures, PatCx, PrivateUninhabitedField};
+
+mod print;
+
+// Re-export rustc-specific versions of all these types.
+pub type Constructor<'p, 'tcx> = crate::constructor::Constructor<RustcPatCtxt<'p, 'tcx>>;
+pub type ConstructorSet<'p, 'tcx> = crate::constructor::ConstructorSet<RustcPatCtxt<'p, 'tcx>>;
+pub type DeconstructedPat<'p, 'tcx> = crate::pat::DeconstructedPat<RustcPatCtxt<'p, 'tcx>>;
+pub type MatchArm<'p, 'tcx> = crate::MatchArm<'p, RustcPatCtxt<'p, 'tcx>>;
+pub type RedundancyExplanation<'p, 'tcx> =
+    crate::usefulness::RedundancyExplanation<'p, RustcPatCtxt<'p, 'tcx>>;
+pub type Usefulness<'p, 'tcx> = crate::usefulness::Usefulness<'p, RustcPatCtxt<'p, 'tcx>>;
+pub type UsefulnessReport<'p, 'tcx> =
+    crate::usefulness::UsefulnessReport<'p, RustcPatCtxt<'p, 'tcx>>;
+pub type WitnessPat<'p, 'tcx> = crate::pat::WitnessPat<RustcPatCtxt<'p, 'tcx>>;
+
+/// A type which has gone through `cx.reveal_opaque_ty`, i.e. if it was opaque it was replaced by
+/// the hidden type if allowed in the current body. This ensures we consistently inspect the hidden
+/// types when we should.
+///
+/// Use `.inner()` or deref to get to the `Ty<'tcx>`.
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+pub struct RevealedTy<'tcx>(Ty<'tcx>);
+
+impl<'tcx> fmt::Display for RevealedTy<'tcx> {
+    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.0.fmt(fmt)
+    }
+}
+
+impl<'tcx> fmt::Debug for RevealedTy<'tcx> {
+    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.0.fmt(fmt)
+    }
+}
+
+impl<'tcx> std::ops::Deref for RevealedTy<'tcx> {
+    type Target = Ty<'tcx>;
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
+impl<'tcx> RevealedTy<'tcx> {
+    pub fn inner(self) -> Ty<'tcx> {
+        self.0
+    }
+}
+
+#[derive(Clone)]
+pub struct RustcPatCtxt<'p, 'tcx: 'p> {
+    pub tcx: TyCtxt<'tcx>,
+    pub typeck_results: &'tcx ty::TypeckResults<'tcx>,
+    /// The module in which the match occurs. This is necessary for
+    /// checking inhabited-ness of types because whether a type is (visibly)
+    /// inhabited can depend on whether it was defined in the current module or
+    /// not. E.g., `struct Foo { _private: ! }` cannot be seen to be empty
+    /// outside its module and should not be matchable with an empty match statement.
+    pub module: DefId,
+    pub param_env: ty::ParamEnv<'tcx>,
+    /// To allocate the result of `self.ctor_sub_tys()`
+    pub dropless_arena: &'p DroplessArena,
+    /// Lint level at the match.
+    pub match_lint_level: HirId,
+    /// The span of the whole match, if applicable.
+    pub whole_match_span: Option<Span>,
+    /// Span of the scrutinee.
+    pub scrut_span: Span,
+    /// Only produce `NON_EXHAUSTIVE_OMITTED_PATTERNS` lint on refutable patterns.
+    pub refutable: bool,
+    /// Whether the data at the scrutinee is known to be valid. This is false if the scrutinee comes
+    /// from a union field, a pointer deref, or a reference deref (pending opsem decisions).
+    pub known_valid_scrutinee: bool,
+}
+
+impl<'p, 'tcx: 'p> fmt::Debug for RustcPatCtxt<'p, 'tcx> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RustcPatCtxt").finish()
+    }
+}
+
+impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> {
+    /// Type inference occasionally gives us opaque types in places where corresponding patterns
+    /// have more specific types. To avoid inconsistencies as well as detect opaque uninhabited
+    /// types, we use the corresponding concrete type if possible.
+    #[inline]
+    pub fn reveal_opaque_ty(&self, ty: Ty<'tcx>) -> RevealedTy<'tcx> {
+        fn reveal_inner<'tcx>(cx: &RustcPatCtxt<'_, 'tcx>, ty: Ty<'tcx>) -> RevealedTy<'tcx> {
+            let ty::Alias(ty::Opaque, alias_ty) = *ty.kind() else { bug!() };
+            if let Some(local_def_id) = alias_ty.def_id.as_local() {
+                let key = ty::OpaqueTypeKey { def_id: local_def_id, args: alias_ty.args };
+                if let Some(ty) = cx.reveal_opaque_key(key) {
+                    return RevealedTy(ty);
+                }
+            }
+            RevealedTy(ty)
+        }
+        if let ty::Alias(ty::Opaque, _) = ty.kind() {
+            reveal_inner(self, ty)
+        } else {
+            RevealedTy(ty)
+        }
+    }
+
+    /// Returns the hidden type corresponding to this key if the body under analysis is allowed to
+    /// know it.
+    fn reveal_opaque_key(&self, key: OpaqueTypeKey<'tcx>) -> Option<Ty<'tcx>> {
+        self.typeck_results.concrete_opaque_types.get(&key).map(|x| x.ty)
+    }
+    // This can take a non-revealed `Ty` because it reveals opaques itself.
+    pub fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool {
+        !ty.inhabited_predicate(self.tcx).apply_revealing_opaque(
+            self.tcx,
+            self.param_env,
+            self.module,
+            &|key| self.reveal_opaque_key(key),
+        )
+    }
+
+    /// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
+    pub fn is_foreign_non_exhaustive_enum(&self, ty: RevealedTy<'tcx>) -> bool {
+        match ty.kind() {
+            ty::Adt(def, ..) => {
+                def.is_enum() && def.is_variant_list_non_exhaustive() && !def.did().is_local()
+            }
+            _ => false,
+        }
+    }
+
+    /// Whether the range denotes the fictitious values before `isize::MIN` or after
+    /// `usize::MAX`/`isize::MAX` (see doc of [`IntRange::split`] for why these exist).
+    pub fn is_range_beyond_boundaries(&self, range: &IntRange, ty: RevealedTy<'tcx>) -> bool {
+        ty.is_ptr_sized_integral() && {
+            // The two invalid ranges are `NegInfinity..isize::MIN` (represented as
+            // `NegInfinity..0`), and `{u,i}size::MAX+1..PosInfinity`. `hoist_pat_range_bdy`
+            // converts `MAX+1` to `PosInfinity`, and we couldn't have `PosInfinity` in `range.lo`
+            // otherwise.
+            let lo = self.hoist_pat_range_bdy(range.lo, ty);
+            matches!(lo, PatRangeBoundary::PosInfinity)
+                || matches!(range.hi, MaybeInfiniteInt::Finite(0))
+        }
+    }
+
+    pub(crate) fn variant_sub_tys(
+        &self,
+        ty: RevealedTy<'tcx>,
+        variant: &'tcx VariantDef,
+    ) -> impl Iterator<Item = (&'tcx FieldDef, RevealedTy<'tcx>)> + Captures<'p> + Captures<'_>
+    {
+        let ty::Adt(_, args) = ty.kind() else { bug!() };
+        variant.fields.iter().map(move |field| {
+            let ty = field.ty(self.tcx, args);
+            // `field.ty()` doesn't normalize after instantiating.
+            let ty = self.tcx.normalize_erasing_regions(self.param_env, ty);
+            let ty = self.reveal_opaque_ty(ty);
+            (field, ty)
+        })
+    }
+
+    pub(crate) fn variant_index_for_adt(
+        ctor: &Constructor<'p, 'tcx>,
+        adt: ty::AdtDef<'tcx>,
+    ) -> VariantIdx {
+        match *ctor {
+            Variant(idx) => idx,
+            Struct | UnionField => {
+                assert!(!adt.is_enum());
+                FIRST_VARIANT
+            }
+            _ => bug!("bad constructor {:?} for adt {:?}", ctor, adt),
+        }
+    }
+
+    /// Returns the types of the fields for a given constructor. The result must have a length of
+    /// `ctor.arity()`.
+    pub(crate) fn ctor_sub_tys<'a>(
+        &'a self,
+        ctor: &'a Constructor<'p, 'tcx>,
+        ty: RevealedTy<'tcx>,
+    ) -> impl Iterator<Item = (RevealedTy<'tcx>, PrivateUninhabitedField)>
+    + ExactSizeIterator
+    + Captures<'a> {
+        fn reveal_and_alloc<'a, 'tcx>(
+            cx: &'a RustcPatCtxt<'_, 'tcx>,
+            iter: impl Iterator<Item = Ty<'tcx>>,
+        ) -> &'a [(RevealedTy<'tcx>, PrivateUninhabitedField)] {
+            cx.dropless_arena.alloc_from_iter(
+                iter.map(|ty| cx.reveal_opaque_ty(ty))
+                    .map(|ty| (ty, PrivateUninhabitedField(false))),
+            )
+        }
+        let cx = self;
+        let slice = match ctor {
+            Struct | Variant(_) | UnionField => match ty.kind() {
+                ty::Tuple(fs) => reveal_and_alloc(cx, fs.iter()),
+                ty::Adt(adt, args) => {
+                    if adt.is_box() {
+                        // The only legal patterns of type `Box` (outside `std`) are `_` and box
+                        // patterns. If we're here we can assume this is a box pattern.
+                        reveal_and_alloc(cx, once(args.type_at(0)))
+                    } else {
+                        let variant =
+                            &adt.variant(RustcPatCtxt::variant_index_for_adt(&ctor, *adt));
+
+                        // In the cases of either a `#[non_exhaustive]` field list or a non-public
+                        // field, we skip uninhabited fields in order not to reveal the
+                        // uninhabitedness of the whole variant.
+                        let is_non_exhaustive =
+                            variant.is_field_list_non_exhaustive() && !adt.did().is_local();
+                        let tys = cx.variant_sub_tys(ty, variant).map(|(field, ty)| {
+                            let is_visible =
+                                adt.is_enum() || field.vis.is_accessible_from(cx.module, cx.tcx);
+                            let is_uninhabited = (cx.tcx.features().exhaustive_patterns
+                                || cx.tcx.features().min_exhaustive_patterns)
+                                && cx.is_uninhabited(*ty);
+                            let skip = is_uninhabited && (!is_visible || is_non_exhaustive);
+                            (ty, PrivateUninhabitedField(skip))
+                        });
+                        cx.dropless_arena.alloc_from_iter(tys)
+                    }
+                }
+                _ => bug!("Unexpected type for constructor `{ctor:?}`: {ty:?}"),
+            },
+            Ref => match ty.kind() {
+                ty::Ref(_, rty, _) => reveal_and_alloc(cx, once(*rty)),
+                _ => bug!("Unexpected type for `Ref` constructor: {ty:?}"),
+            },
+            Slice(slice) => match *ty.kind() {
+                ty::Slice(ty) | ty::Array(ty, _) => {
+                    let arity = slice.arity();
+                    reveal_and_alloc(cx, (0..arity).map(|_| ty))
+                }
+                _ => bug!("bad slice pattern {:?} {:?}", ctor, ty),
+            },
+            Bool(..) | IntRange(..) | F16Range(..) | F32Range(..) | F64Range(..)
+            | F128Range(..) | Str(..) | Opaque(..) | Never | NonExhaustive | Hidden | Missing
+            | PrivateUninhabited | Wildcard => &[],
+            Or => {
+                bug!("called `Fields::wildcards` on an `Or` ctor")
+            }
+        };
+        slice.iter().copied()
+    }
+
+    /// The number of fields for this constructor.
+    pub(crate) fn ctor_arity(&self, ctor: &Constructor<'p, 'tcx>, ty: RevealedTy<'tcx>) -> usize {
+        match ctor {
+            Struct | Variant(_) | UnionField => match ty.kind() {
+                ty::Tuple(fs) => fs.len(),
+                ty::Adt(adt, ..) => {
+                    if adt.is_box() {
+                        // The only legal patterns of type `Box` (outside `std`) are `_` and box
+                        // patterns. If we're here we can assume this is a box pattern.
+                        1
+                    } else {
+                        let variant_idx = RustcPatCtxt::variant_index_for_adt(&ctor, *adt);
+                        adt.variant(variant_idx).fields.len()
+                    }
+                }
+                _ => bug!("Unexpected type for constructor `{ctor:?}`: {ty:?}"),
+            },
+            Ref => 1,
+            Slice(slice) => slice.arity(),
+            Bool(..) | IntRange(..) | F16Range(..) | F32Range(..) | F64Range(..)
+            | F128Range(..) | Str(..) | Opaque(..) | Never | NonExhaustive | Hidden | Missing
+            | PrivateUninhabited | Wildcard => 0,
+            Or => bug!("The `Or` constructor doesn't have a fixed arity"),
+        }
+    }
+
+    /// Creates a set that represents all the constructors of `ty`.
+    ///
+    /// See [`crate::constructor`] for considerations of emptiness.
+    pub fn ctors_for_ty(
+        &self,
+        ty: RevealedTy<'tcx>,
+    ) -> Result<ConstructorSet<'p, 'tcx>, ErrorGuaranteed> {
+        let cx = self;
+        let make_uint_range = |start, end| {
+            IntRange::from_range(
+                MaybeInfiniteInt::new_finite_uint(start),
+                MaybeInfiniteInt::new_finite_uint(end),
+                RangeEnd::Included,
+            )
+        };
+        // Abort on type error.
+        ty.error_reported()?;
+        // This determines the set of all possible constructors for the type `ty`. For numbers,
+        // arrays and slices we use ranges and variable-length slices when appropriate.
+        Ok(match ty.kind() {
+            ty::Bool => ConstructorSet::Bool,
+            ty::Char => {
+                // The valid Unicode Scalar Value ranges.
+                ConstructorSet::Integers {
+                    range_1: make_uint_range('\u{0000}' as u128, '\u{D7FF}' as u128),
+                    range_2: Some(make_uint_range('\u{E000}' as u128, '\u{10FFFF}' as u128)),
+                }
+            }
+            &ty::Int(ity) => {
+                let range = if ty.is_ptr_sized_integral() {
+                    // The min/max values of `isize` are not allowed to be observed.
+                    IntRange {
+                        lo: MaybeInfiniteInt::NegInfinity,
+                        hi: MaybeInfiniteInt::PosInfinity,
+                    }
+                } else {
+                    let size = Integer::from_int_ty(&cx.tcx, ity).size().bits();
+                    let min = 1u128 << (size - 1);
+                    let max = min - 1;
+                    let min = MaybeInfiniteInt::new_finite_int(min, size);
+                    let max = MaybeInfiniteInt::new_finite_int(max, size);
+                    IntRange::from_range(min, max, RangeEnd::Included)
+                };
+                ConstructorSet::Integers { range_1: range, range_2: None }
+            }
+            &ty::Uint(uty) => {
+                let range = if ty.is_ptr_sized_integral() {
+                    // The max value of `usize` is not allowed to be observed.
+                    let lo = MaybeInfiniteInt::new_finite_uint(0);
+                    IntRange { lo, hi: MaybeInfiniteInt::PosInfinity }
+                } else {
+                    let size = Integer::from_uint_ty(&cx.tcx, uty).size();
+                    let max = size.truncate(u128::MAX);
+                    make_uint_range(0, max)
+                };
+                ConstructorSet::Integers { range_1: range, range_2: None }
+            }
+            ty::Slice(sub_ty) => ConstructorSet::Slice {
+                array_len: None,
+                subtype_is_empty: cx.is_uninhabited(*sub_ty),
+            },
+            ty::Array(sub_ty, len) => {
+                // We treat arrays of a constant but unknown length like slices.
+                ConstructorSet::Slice {
+                    array_len: len.try_eval_target_usize(cx.tcx, cx.param_env).map(|l| l as usize),
+                    subtype_is_empty: cx.is_uninhabited(*sub_ty),
+                }
+            }
+            ty::Adt(def, args) if def.is_enum() => {
+                let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(ty);
+                if def.variants().is_empty() && !is_declared_nonexhaustive {
+                    ConstructorSet::NoConstructors
+                } else {
+                    let mut variants =
+                        IndexVec::from_elem(VariantVisibility::Visible, def.variants());
+                    for (idx, v) in def.variants().iter_enumerated() {
+                        let variant_def_id = def.variant(idx).def_id;
+                        // Visibly uninhabited variants.
+                        let is_inhabited = v
+                            .inhabited_predicate(cx.tcx, *def)
+                            .instantiate(cx.tcx, args)
+                            .apply_revealing_opaque(cx.tcx, cx.param_env, cx.module, &|key| {
+                                cx.reveal_opaque_key(key)
+                            });
+                        // Variants that depend on a disabled unstable feature.
+                        let is_unstable = matches!(
+                            cx.tcx.eval_stability(variant_def_id, None, DUMMY_SP, None),
+                            EvalResult::Deny { .. }
+                        );
+                        // Foreign `#[doc(hidden)]` variants.
+                        let is_doc_hidden =
+                            cx.tcx.is_doc_hidden(variant_def_id) && !variant_def_id.is_local();
+                        let visibility = if !is_inhabited {
+                            // FIXME: handle empty+hidden
+                            VariantVisibility::Empty
+                        } else if is_unstable || is_doc_hidden {
+                            VariantVisibility::Hidden
+                        } else {
+                            VariantVisibility::Visible
+                        };
+                        variants[idx] = visibility;
+                    }
+
+                    ConstructorSet::Variants { variants, non_exhaustive: is_declared_nonexhaustive }
+                }
+            }
+            ty::Adt(def, _) if def.is_union() => ConstructorSet::Union,
+            ty::Adt(..) | ty::Tuple(..) => {
+                ConstructorSet::Struct { empty: cx.is_uninhabited(ty.inner()) }
+            }
+            ty::Ref(..) => ConstructorSet::Ref,
+            ty::Never => ConstructorSet::NoConstructors,
+            // This type is one for which we cannot list constructors, like `str` or `f64`.
+            // FIXME(Nadrieril): which of these are actually allowed?
+            ty::Float(_)
+            | ty::Str
+            | ty::Foreign(_)
+            | ty::RawPtr(_, _)
+            | ty::FnDef(_, _)
+            | ty::FnPtr(_)
+            | ty::Pat(_, _)
+            | ty::Dynamic(_, _, _)
+            | ty::Closure(..)
+            | ty::CoroutineClosure(..)
+            | ty::Coroutine(_, _)
+            | ty::Alias(_, _)
+            | ty::Param(_)
+            | ty::Error(_) => ConstructorSet::Unlistable,
+            ty::CoroutineWitness(_, _) | ty::Bound(_, _) | ty::Placeholder(_) | ty::Infer(_) => {
+                bug!("Encountered unexpected type in `ConstructorSet::for_ty`: {ty:?}")
+            }
+        })
+    }
+
+    pub(crate) fn lower_pat_range_bdy(
+        &self,
+        bdy: PatRangeBoundary<'tcx>,
+        ty: RevealedTy<'tcx>,
+    ) -> MaybeInfiniteInt {
+        match bdy {
+            PatRangeBoundary::NegInfinity => MaybeInfiniteInt::NegInfinity,
+            PatRangeBoundary::Finite(value) => {
+                let bits = value.eval_bits(self.tcx, self.param_env);
+                match *ty.kind() {
+                    ty::Int(ity) => {
+                        let size = Integer::from_int_ty(&self.tcx, ity).size().bits();
+                        MaybeInfiniteInt::new_finite_int(bits, size)
+                    }
+                    _ => MaybeInfiniteInt::new_finite_uint(bits),
+                }
+            }
+            PatRangeBoundary::PosInfinity => MaybeInfiniteInt::PosInfinity,
+        }
+    }
+
+    /// Note: the input patterns must have been lowered through
+    /// `rustc_mir_build::thir::pattern::check_match::MatchVisitor::lower_pattern`.
+    pub fn lower_pat(&self, pat: &'p Pat<'tcx>) -> DeconstructedPat<'p, 'tcx> {
+        let cx = self;
+        let ty = cx.reveal_opaque_ty(pat.ty);
+        let ctor;
+        let arity;
+        let fields: Vec<_>;
+        match &pat.kind {
+            PatKind::AscribeUserType { subpattern, .. }
+            | PatKind::InlineConstant { subpattern, .. } => return self.lower_pat(subpattern),
+            PatKind::Binding { subpattern: Some(subpat), .. } => return self.lower_pat(subpat),
+            PatKind::Binding { subpattern: None, .. } | PatKind::Wild => {
+                ctor = Wildcard;
+                fields = vec![];
+                arity = 0;
+            }
+            PatKind::Deref { subpattern } => {
+                fields = vec![self.lower_pat(subpattern).at_index(0)];
+                arity = 1;
+                ctor = match ty.kind() {
+                    // This is a box pattern.
+                    ty::Adt(adt, ..) if adt.is_box() => Struct,
+                    ty::Ref(..) => Ref,
+                    _ => span_bug!(
+                        pat.span,
+                        "pattern has unexpected type: pat: {:?}, ty: {:?}",
+                        pat.kind,
+                        ty.inner()
+                    ),
+                };
+            }
+            PatKind::DerefPattern { .. } => {
+                // FIXME(deref_patterns): At least detect that `box _` is irrefutable.
+                fields = vec![];
+                arity = 0;
+                ctor = Opaque(OpaqueId::new());
+            }
+            PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
+                match ty.kind() {
+                    ty::Tuple(fs) => {
+                        ctor = Struct;
+                        arity = fs.len();
+                        fields = subpatterns
+                            .iter()
+                            .map(|ipat| self.lower_pat(&ipat.pattern).at_index(ipat.field.index()))
+                            .collect();
+                    }
+                    ty::Adt(adt, _) if adt.is_box() => {
+                        // The only legal patterns of type `Box` (outside `std`) are `_` and box
+                        // patterns. If we're here we can assume this is a box pattern.
+                        // FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
+                        // _)` or a box pattern. As a hack to avoid an ICE with the former, we
+                        // ignore other fields than the first one. This will trigger an error later
+                        // anyway.
+                        // See https://github.com/rust-lang/rust/issues/82772,
+                        // explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
+                        // The problem is that we can't know from the type whether we'll match
+                        // normally or through box-patterns. We'll have to figure out a proper
+                        // solution when we introduce generalized deref patterns. Also need to
+                        // prevent mixing of those two options.
+                        let pattern = subpatterns.into_iter().find(|pat| pat.field.index() == 0);
+                        if let Some(pat) = pattern {
+                            fields = vec![self.lower_pat(&pat.pattern).at_index(0)];
+                        } else {
+                            fields = vec![];
+                        }
+                        ctor = Struct;
+                        arity = 1;
+                    }
+                    ty::Adt(adt, _) => {
+                        ctor = match pat.kind {
+                            PatKind::Leaf { .. } if adt.is_union() => UnionField,
+                            PatKind::Leaf { .. } => Struct,
+                            PatKind::Variant { variant_index, .. } => Variant(variant_index),
+                            _ => bug!(),
+                        };
+                        let variant =
+                            &adt.variant(RustcPatCtxt::variant_index_for_adt(&ctor, *adt));
+                        arity = variant.fields.len();
+                        fields = subpatterns
+                            .iter()
+                            .map(|ipat| self.lower_pat(&ipat.pattern).at_index(ipat.field.index()))
+                            .collect();
+                    }
+                    _ => span_bug!(
+                        pat.span,
+                        "pattern has unexpected type: pat: {:?}, ty: {}",
+                        pat.kind,
+                        ty.inner()
+                    ),
+                }
+            }
+            PatKind::Constant { value } => {
+                match ty.kind() {
+                    ty::Bool => {
+                        ctor = match value.try_eval_bool(cx.tcx, cx.param_env) {
+                            Some(b) => Bool(b),
+                            None => Opaque(OpaqueId::new()),
+                        };
+                        fields = vec![];
+                        arity = 0;
+                    }
+                    ty::Char | ty::Int(_) | ty::Uint(_) => {
+                        ctor = match value.try_eval_bits(cx.tcx, cx.param_env) {
+                            Some(bits) => {
+                                let x = match *ty.kind() {
+                                    ty::Int(ity) => {
+                                        let size = Integer::from_int_ty(&cx.tcx, ity).size().bits();
+                                        MaybeInfiniteInt::new_finite_int(bits, size)
+                                    }
+                                    _ => MaybeInfiniteInt::new_finite_uint(bits),
+                                };
+                                IntRange(IntRange::from_singleton(x))
+                            }
+                            None => Opaque(OpaqueId::new()),
+                        };
+                        fields = vec![];
+                        arity = 0;
+                    }
+                    ty::Float(ty::FloatTy::F16) => {
+                        ctor = match value.try_eval_bits(cx.tcx, cx.param_env) {
+                            Some(bits) => {
+                                use rustc_apfloat::Float;
+                                let value = rustc_apfloat::ieee::Half::from_bits(bits);
+                                F16Range(value, value, RangeEnd::Included)
+                            }
+                            None => Opaque(OpaqueId::new()),
+                        };
+                        fields = vec![];
+                        arity = 0;
+                    }
+                    ty::Float(ty::FloatTy::F32) => {
+                        ctor = match value.try_eval_bits(cx.tcx, cx.param_env) {
+                            Some(bits) => {
+                                use rustc_apfloat::Float;
+                                let value = rustc_apfloat::ieee::Single::from_bits(bits);
+                                F32Range(value, value, RangeEnd::Included)
+                            }
+                            None => Opaque(OpaqueId::new()),
+                        };
+                        fields = vec![];
+                        arity = 0;
+                    }
+                    ty::Float(ty::FloatTy::F64) => {
+                        ctor = match value.try_eval_bits(cx.tcx, cx.param_env) {
+                            Some(bits) => {
+                                use rustc_apfloat::Float;
+                                let value = rustc_apfloat::ieee::Double::from_bits(bits);
+                                F64Range(value, value, RangeEnd::Included)
+                            }
+                            None => Opaque(OpaqueId::new()),
+                        };
+                        fields = vec![];
+                        arity = 0;
+                    }
+                    ty::Float(ty::FloatTy::F128) => {
+                        ctor = match value.try_eval_bits(cx.tcx, cx.param_env) {
+                            Some(bits) => {
+                                use rustc_apfloat::Float;
+                                let value = rustc_apfloat::ieee::Quad::from_bits(bits);
+                                F128Range(value, value, RangeEnd::Included)
+                            }
+                            None => Opaque(OpaqueId::new()),
+                        };
+                        fields = vec![];
+                        arity = 0;
+                    }
+                    ty::Ref(_, t, _) if t.is_str() => {
+                        // We want a `&str` constant to behave like a `Deref` pattern, to be compatible
+                        // with other `Deref` patterns. This could have been done in `const_to_pat`,
+                        // but that causes issues with the rest of the matching code.
+                        // So here, the constructor for a `"foo"` pattern is `&` (represented by
+                        // `Ref`), and has one field. That field has constructor `Str(value)` and no
+                        // subfields.
+                        // Note: `t` is `str`, not `&str`.
+                        let ty = self.reveal_opaque_ty(*t);
+                        let subpattern = DeconstructedPat::new(Str(*value), Vec::new(), 0, ty, pat);
+                        ctor = Ref;
+                        fields = vec![subpattern.at_index(0)];
+                        arity = 1;
+                    }
+                    // All constants that can be structurally matched have already been expanded
+                    // into the corresponding `Pat`s by `const_to_pat`. Constants that remain are
+                    // opaque.
+                    _ => {
+                        ctor = Opaque(OpaqueId::new());
+                        fields = vec![];
+                        arity = 0;
+                    }
+                }
+            }
+            PatKind::Range(patrange) => {
+                let PatRange { lo, hi, end, .. } = patrange.as_ref();
+                let end = match end {
+                    rustc_hir::RangeEnd::Included => RangeEnd::Included,
+                    rustc_hir::RangeEnd::Excluded => RangeEnd::Excluded,
+                };
+                ctor = match ty.kind() {
+                    ty::Char | ty::Int(_) | ty::Uint(_) => {
+                        let lo = cx.lower_pat_range_bdy(*lo, ty);
+                        let hi = cx.lower_pat_range_bdy(*hi, ty);
+                        IntRange(IntRange::from_range(lo, hi, end))
+                    }
+                    ty::Float(fty) => {
+                        use rustc_apfloat::Float;
+                        let lo = lo.as_finite().map(|c| c.eval_bits(cx.tcx, cx.param_env));
+                        let hi = hi.as_finite().map(|c| c.eval_bits(cx.tcx, cx.param_env));
+                        match fty {
+                            ty::FloatTy::F16 => {
+                                use rustc_apfloat::ieee::Half;
+                                let lo = lo.map(Half::from_bits).unwrap_or(-Half::INFINITY);
+                                let hi = hi.map(Half::from_bits).unwrap_or(Half::INFINITY);
+                                F16Range(lo, hi, end)
+                            }
+                            ty::FloatTy::F32 => {
+                                use rustc_apfloat::ieee::Single;
+                                let lo = lo.map(Single::from_bits).unwrap_or(-Single::INFINITY);
+                                let hi = hi.map(Single::from_bits).unwrap_or(Single::INFINITY);
+                                F32Range(lo, hi, end)
+                            }
+                            ty::FloatTy::F64 => {
+                                use rustc_apfloat::ieee::Double;
+                                let lo = lo.map(Double::from_bits).unwrap_or(-Double::INFINITY);
+                                let hi = hi.map(Double::from_bits).unwrap_or(Double::INFINITY);
+                                F64Range(lo, hi, end)
+                            }
+                            ty::FloatTy::F128 => {
+                                use rustc_apfloat::ieee::Quad;
+                                let lo = lo.map(Quad::from_bits).unwrap_or(-Quad::INFINITY);
+                                let hi = hi.map(Quad::from_bits).unwrap_or(Quad::INFINITY);
+                                F128Range(lo, hi, end)
+                            }
+                        }
+                    }
+                    _ => span_bug!(pat.span, "invalid type for range pattern: {}", ty.inner()),
+                };
+                fields = vec![];
+                arity = 0;
+            }
+            PatKind::Array { prefix, slice, suffix } | PatKind::Slice { prefix, slice, suffix } => {
+                let array_len = match ty.kind() {
+                    ty::Array(_, length) => {
+                        Some(length.eval_target_usize(cx.tcx, cx.param_env) as usize)
+                    }
+                    ty::Slice(_) => None,
+                    _ => span_bug!(pat.span, "bad ty {} for slice pattern", ty.inner()),
+                };
+                let kind = if slice.is_some() {
+                    SliceKind::VarLen(prefix.len(), suffix.len())
+                } else {
+                    SliceKind::FixedLen(prefix.len() + suffix.len())
+                };
+                ctor = Slice(Slice::new(array_len, kind));
+                fields = prefix
+                    .iter()
+                    .chain(suffix.iter())
+                    .map(|p| self.lower_pat(&*p))
+                    .enumerate()
+                    .map(|(i, p)| p.at_index(i))
+                    .collect();
+                arity = kind.arity();
+            }
+            PatKind::Or { .. } => {
+                ctor = Or;
+                let pats = expand_or_pat(pat);
+                fields = pats
+                    .into_iter()
+                    .map(|p| self.lower_pat(p))
+                    .enumerate()
+                    .map(|(i, p)| p.at_index(i))
+                    .collect();
+                arity = fields.len();
+            }
+            PatKind::Never => {
+                // A never pattern matches all the values of its type (namely none). Moreover it
+                // must be compatible with other constructors, since we can use `!` on a type like
+                // `Result<!, !>` which has other constructors. Hence we lower it as a wildcard.
+                ctor = Wildcard;
+                fields = vec![];
+                arity = 0;
+            }
+            PatKind::Error(_) => {
+                ctor = Opaque(OpaqueId::new());
+                fields = vec![];
+                arity = 0;
+            }
+        }
+        DeconstructedPat::new(ctor, fields, arity, ty, pat)
+    }
+
+    /// Convert back to a `thir::PatRangeBoundary` for diagnostic purposes.
+    /// Note: it is possible to get `isize/usize::MAX+1` here, as explained in the doc for
+    /// [`IntRange::split`]. This cannot be represented as a `Const`, so we represent it with
+    /// `PosInfinity`.
+    fn hoist_pat_range_bdy(
+        &self,
+        miint: MaybeInfiniteInt,
+        ty: RevealedTy<'tcx>,
+    ) -> PatRangeBoundary<'tcx> {
+        use MaybeInfiniteInt::*;
+        let tcx = self.tcx;
+        match miint {
+            NegInfinity => PatRangeBoundary::NegInfinity,
+            Finite(_) => {
+                let size = ty.primitive_size(tcx);
+                let bits = match *ty.kind() {
+                    ty::Int(_) => miint.as_finite_int(size.bits()).unwrap(),
+                    _ => miint.as_finite_uint().unwrap(),
+                };
+                match ScalarInt::try_from_uint(bits, size) {
+                    Some(scalar) => {
+                        let value = mir::Const::from_scalar(tcx, scalar.into(), ty.inner());
+                        PatRangeBoundary::Finite(value)
+                    }
+                    // The value doesn't fit. Since `x >= 0` and 0 always encodes the minimum value
+                    // for a type, the problem isn't that the value is too small. So it must be too
+                    // large.
+                    None => PatRangeBoundary::PosInfinity,
+                }
+            }
+            PosInfinity => PatRangeBoundary::PosInfinity,
+        }
+    }
+
+    /// Convert to a [`print::Pat`] for diagnostic purposes.
+    fn hoist_pat_range(&self, range: &IntRange, ty: RevealedTy<'tcx>) -> print::Pat<'tcx> {
+        use print::{Pat, PatKind};
+        use MaybeInfiniteInt::*;
+        let cx = self;
+        let kind = if matches!((range.lo, range.hi), (NegInfinity, PosInfinity)) {
+            PatKind::Wild
+        } else if range.is_singleton() {
+            let lo = cx.hoist_pat_range_bdy(range.lo, ty);
+            let value = lo.as_finite().unwrap();
+            PatKind::Constant { value }
+        } else {
+            // We convert to an inclusive range for diagnostics.
+            let mut end = rustc_hir::RangeEnd::Included;
+            let mut lo = cx.hoist_pat_range_bdy(range.lo, ty);
+            if matches!(lo, PatRangeBoundary::PosInfinity) {
+                // The only reason to get `PosInfinity` here is the special case where
+                // `hoist_pat_range_bdy` found `{u,i}size::MAX+1`. So the range denotes the
+                // fictitious values after `{u,i}size::MAX` (see [`IntRange::split`] for why we do
+                // this). We show this to the user as `usize::MAX..` which is slightly incorrect but
+                // probably clear enough.
+                let c = ty.numeric_max_val(cx.tcx).unwrap();
+                let value = mir::Const::from_ty_const(c, ty.0, cx.tcx);
+                lo = PatRangeBoundary::Finite(value);
+            }
+            let hi = if let Some(hi) = range.hi.minus_one() {
+                hi
+            } else {
+                // The range encodes `..ty::MIN`, so we can't convert it to an inclusive range.
+                end = rustc_hir::RangeEnd::Excluded;
+                range.hi
+            };
+            let hi = cx.hoist_pat_range_bdy(hi, ty);
+            PatKind::Range(Box::new(PatRange { lo, hi, end, ty: ty.inner() }))
+        };
+
+        Pat { ty: ty.inner(), kind }
+    }
+
+    /// Prints a [`WitnessPat`] to an owned string, for diagnostic purposes.
+    pub fn print_witness_pat(&self, pat: &WitnessPat<'p, 'tcx>) -> String {
+        // This works by converting the witness pattern to a `print::Pat`
+        // and then printing that, but callers don't need to know that.
+        self.hoist_witness_pat(pat).to_string()
+    }
+
+    /// Convert to a [`print::Pat`] for diagnostic purposes. This panics for patterns that don't
+    /// appear in diagnostics, like float ranges.
+    fn hoist_witness_pat(&self, pat: &WitnessPat<'p, 'tcx>) -> print::Pat<'tcx> {
+        use print::{FieldPat, Pat, PatKind};
+        let cx = self;
+        let is_wildcard = |pat: &Pat<'_>| matches!(pat.kind, PatKind::Wild);
+        let mut subpatterns = pat.iter_fields().map(|p| Box::new(cx.hoist_witness_pat(p)));
+        let kind = match pat.ctor() {
+            Bool(b) => PatKind::Constant { value: mir::Const::from_bool(cx.tcx, *b) },
+            IntRange(range) => return self.hoist_pat_range(range, *pat.ty()),
+            Struct | Variant(_) | UnionField => match pat.ty().kind() {
+                ty::Tuple(..) => PatKind::Leaf {
+                    subpatterns: subpatterns
+                        .enumerate()
+                        .map(|(i, pattern)| FieldPat { field: FieldIdx::new(i), pattern })
+                        .collect(),
+                },
+                ty::Adt(adt_def, _) if adt_def.is_box() => {
+                    // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
+                    // of `std`). So this branch is only reachable when the feature is enabled and
+                    // the pattern is a box pattern.
+                    PatKind::Deref { subpattern: subpatterns.next().unwrap() }
+                }
+                ty::Adt(adt_def, _args) => {
+                    let variant_index = RustcPatCtxt::variant_index_for_adt(&pat.ctor(), *adt_def);
+                    let subpatterns = subpatterns
+                        .enumerate()
+                        .map(|(i, pattern)| FieldPat { field: FieldIdx::new(i), pattern })
+                        .collect();
+
+                    if adt_def.is_enum() {
+                        PatKind::Variant { adt_def: *adt_def, variant_index, subpatterns }
+                    } else {
+                        PatKind::Leaf { subpatterns }
+                    }
+                }
+                _ => bug!("unexpected ctor for type {:?} {:?}", pat.ctor(), *pat.ty()),
+            },
+            // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
+            // be careful to reconstruct the correct constant pattern here. However a string
+            // literal pattern will never be reported as a non-exhaustiveness witness, so we
+            // ignore this issue.
+            Ref => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
+            Slice(slice) => {
+                match slice.kind {
+                    SliceKind::FixedLen(_) => PatKind::Slice {
+                        prefix: subpatterns.collect(),
+                        slice: None,
+                        suffix: Box::new([]),
+                    },
+                    SliceKind::VarLen(prefix, _) => {
+                        let mut subpatterns = subpatterns.peekable();
+                        let mut prefix: Vec<_> = subpatterns.by_ref().take(prefix).collect();
+                        if slice.array_len.is_some() {
+                            // Improves diagnostics a bit: if the type is a known-size array, instead
+                            // of reporting `[x, _, .., _, y]`, we prefer to report `[x, .., y]`.
+                            // This is incorrect if the size is not known, since `[_, ..]` captures
+                            // arrays of lengths `>= 1` whereas `[..]` captures any length.
+                            while !prefix.is_empty() && is_wildcard(prefix.last().unwrap()) {
+                                prefix.pop();
+                            }
+                            while subpatterns.peek().is_some()
+                                && is_wildcard(subpatterns.peek().unwrap())
+                            {
+                                subpatterns.next();
+                            }
+                        }
+                        let suffix: Box<[_]> = subpatterns.collect();
+                        let wild = Pat { ty: pat.ty().inner(), kind: PatKind::Wild };
+                        PatKind::Slice {
+                            prefix: prefix.into_boxed_slice(),
+                            slice: Some(Box::new(wild)),
+                            suffix,
+                        }
+                    }
+                }
+            }
+            &Str(value) => PatKind::Constant { value },
+            Never if self.tcx.features().never_patterns => PatKind::Never,
+            Never | Wildcard | NonExhaustive | Hidden | PrivateUninhabited => PatKind::Wild,
+            Missing { .. } => bug!(
+                "trying to convert a `Missing` constructor into a `Pat`; this is probably a bug,
+                `Missing` should have been processed in `apply_constructors`"
+            ),
+            F16Range(..) | F32Range(..) | F64Range(..) | F128Range(..) | Opaque(..) | Or => {
+                bug!("can't convert to pattern: {:?}", pat)
+            }
+        };
+
+        Pat { ty: pat.ty().inner(), kind }
+    }
+}
+
+impl<'p, 'tcx: 'p> PatCx for RustcPatCtxt<'p, 'tcx> {
+    type Ty = RevealedTy<'tcx>;
+    type Error = ErrorGuaranteed;
+    type VariantIdx = VariantIdx;
+    type StrLit = Const<'tcx>;
+    type ArmData = HirId;
+    type PatData = &'p Pat<'tcx>;
+
+    fn is_exhaustive_patterns_feature_on(&self) -> bool {
+        self.tcx.features().exhaustive_patterns
+    }
+    fn is_min_exhaustive_patterns_feature_on(&self) -> bool {
+        self.tcx.features().min_exhaustive_patterns
+    }
+
+    fn ctor_arity(&self, ctor: &crate::constructor::Constructor<Self>, ty: &Self::Ty) -> usize {
+        self.ctor_arity(ctor, *ty)
+    }
+    fn ctor_sub_tys<'a>(
+        &'a self,
+        ctor: &'a crate::constructor::Constructor<Self>,
+        ty: &'a Self::Ty,
+    ) -> impl Iterator<Item = (Self::Ty, PrivateUninhabitedField)> + ExactSizeIterator + Captures<'a>
+    {
+        self.ctor_sub_tys(ctor, *ty)
+    }
+    fn ctors_for_ty(
+        &self,
+        ty: &Self::Ty,
+    ) -> Result<crate::constructor::ConstructorSet<Self>, Self::Error> {
+        self.ctors_for_ty(*ty)
+    }
+
+    fn write_variant_name(
+        f: &mut fmt::Formatter<'_>,
+        ctor: &crate::constructor::Constructor<Self>,
+        ty: &Self::Ty,
+    ) -> fmt::Result {
+        if let ty::Adt(adt, _) = ty.kind() {
+            if adt.is_box() {
+                write!(f, "Box")?
+            } else {
+                let variant = adt.variant(Self::variant_index_for_adt(ctor, *adt));
+                write!(f, "{}", variant.name)?;
+            }
+        }
+        Ok(())
+    }
+
+    fn bug(&self, fmt: fmt::Arguments<'_>) -> Self::Error {
+        span_bug!(self.scrut_span, "{}", fmt)
+    }
+
+    fn lint_overlapping_range_endpoints(
+        &self,
+        pat: &crate::pat::DeconstructedPat<Self>,
+        overlaps_on: IntRange,
+        overlaps_with: &[&crate::pat::DeconstructedPat<Self>],
+    ) {
+        let overlap_as_pat = self.hoist_pat_range(&overlaps_on, *pat.ty());
+        let overlaps: Vec<_> = overlaps_with
+            .iter()
+            .map(|pat| pat.data().span)
+            .map(|span| errors::Overlap { range: overlap_as_pat.to_string(), span })
+            .collect();
+        let pat_span = pat.data().span;
+        self.tcx.emit_node_span_lint(
+            lint::builtin::OVERLAPPING_RANGE_ENDPOINTS,
+            self.match_lint_level,
+            pat_span,
+            errors::OverlappingRangeEndpoints { overlap: overlaps, range: pat_span },
+        );
+    }
+
+    fn complexity_exceeded(&self) -> Result<(), Self::Error> {
+        let span = self.whole_match_span.unwrap_or(self.scrut_span);
+        Err(self.tcx.dcx().span_err(span, "reached pattern complexity limit"))
+    }
+
+    fn lint_non_contiguous_range_endpoints(
+        &self,
+        pat: &crate::pat::DeconstructedPat<Self>,
+        gap: IntRange,
+        gapped_with: &[&crate::pat::DeconstructedPat<Self>],
+    ) {
+        let &thir_pat = pat.data();
+        let thir::PatKind::Range(range) = &thir_pat.kind else { return };
+        // Only lint when the left range is an exclusive range.
+        if range.end != rustc_hir::RangeEnd::Excluded {
+            return;
+        }
+        // `pat` is an exclusive range like `lo..gap`. `gapped_with` contains ranges that start with
+        // `gap+1`.
+        let suggested_range: String = {
+            // Suggest `lo..=gap` instead.
+            let mut suggested_range = PatRange::clone(range);
+            suggested_range.end = rustc_hir::RangeEnd::Included;
+            suggested_range.to_string()
+        };
+        let gap_as_pat = self.hoist_pat_range(&gap, *pat.ty());
+        if gapped_with.is_empty() {
+            // If `gapped_with` is empty, `gap == T::MAX`.
+            self.tcx.emit_node_span_lint(
+                lint::builtin::NON_CONTIGUOUS_RANGE_ENDPOINTS,
+                self.match_lint_level,
+                thir_pat.span,
+                errors::ExclusiveRangeMissingMax {
+                    // Point at this range.
+                    first_range: thir_pat.span,
+                    // That's the gap that isn't covered.
+                    max: gap_as_pat.to_string(),
+                    // Suggest `lo..=max` instead.
+                    suggestion: suggested_range,
+                },
+            );
+        } else {
+            self.tcx.emit_node_span_lint(
+                lint::builtin::NON_CONTIGUOUS_RANGE_ENDPOINTS,
+                self.match_lint_level,
+                thir_pat.span,
+                errors::ExclusiveRangeMissingGap {
+                    // Point at this range.
+                    first_range: thir_pat.span,
+                    // That's the gap that isn't covered.
+                    gap: gap_as_pat.to_string(),
+                    // Suggest `lo..=gap` instead.
+                    suggestion: suggested_range,
+                    // All these ranges skipped over `gap` which we think is probably a
+                    // mistake.
+                    gap_with: gapped_with
+                        .iter()
+                        .map(|pat| errors::GappedRange {
+                            span: pat.data().span,
+                            gap: gap_as_pat.to_string(),
+                            first_range: range.to_string(),
+                        })
+                        .collect(),
+                },
+            );
+        }
+    }
+}
+
+/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns.
+fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> {
+    fn expand<'p, 'tcx>(pat: &'p Pat<'tcx>, vec: &mut Vec<&'p Pat<'tcx>>) {
+        if let PatKind::Or { pats } = &pat.kind {
+            for pat in pats.iter() {
+                expand(pat, vec);
+            }
+        } else {
+            vec.push(pat)
+        }
+    }
+
+    let mut pats = Vec::new();
+    expand(pat, &mut pats);
+    pats
+}
+
+/// The entrypoint for this crate. Computes whether a match is exhaustive and which of its arms are
+/// useful, and runs some lints.
+pub fn analyze_match<'p, 'tcx>(
+    tycx: &RustcPatCtxt<'p, 'tcx>,
+    arms: &[MatchArm<'p, 'tcx>],
+    scrut_ty: Ty<'tcx>,
+    pattern_complexity_limit: Option<usize>,
+) -> Result<UsefulnessReport<'p, 'tcx>, ErrorGuaranteed> {
+    let scrut_ty = tycx.reveal_opaque_ty(scrut_ty);
+    let scrut_validity = PlaceValidity::from_bool(tycx.known_valid_scrutinee);
+    let report =
+        compute_match_usefulness(tycx, arms, scrut_ty, scrut_validity, pattern_complexity_limit)?;
+
+    // Run the non_exhaustive_omitted_patterns lint. Only run on refutable patterns to avoid hitting
+    // `if let`s. Only run if the match is exhaustive otherwise the error is redundant.
+    if tycx.refutable && report.non_exhaustiveness_witnesses.is_empty() {
+        let pat_column = PatternColumn::new(arms);
+        lint_nonexhaustive_missing_variants(tycx, arms, &pat_column, scrut_ty)?;
+    }
+
+    Ok(report)
+}
diff --git a/compiler/rustc_pattern_analysis/src/rustc/print.rs b/compiler/rustc_pattern_analysis/src/rustc/print.rs
new file mode 100644
index 00000000000..4b76764e8b1
--- /dev/null
+++ b/compiler/rustc_pattern_analysis/src/rustc/print.rs
@@ -0,0 +1,193 @@
+//! Pattern analysis sometimes wants to print patterns as part of a user-visible
+//! diagnostic.
+//!
+//! Historically it did so by creating a synthetic [`thir::Pat`](rustc_middle::thir::Pat)
+//! and printing that, but doing so was making it hard to modify the THIR pattern
+//! representation for other purposes.
+//!
+//! So this module contains a forked copy of `thir::Pat` that is used _only_
+//! for diagnostics, and has been partly simplified to remove things that aren't
+//! needed for printing.
+
+use std::fmt;
+
+use rustc_middle::thir::PatRange;
+use rustc_middle::ty::{self, AdtDef, Ty};
+use rustc_middle::{bug, mir};
+use rustc_span::sym;
+use rustc_target::abi::{FieldIdx, VariantIdx};
+
+#[derive(Clone, Debug)]
+pub(crate) struct FieldPat<'tcx> {
+    pub(crate) field: FieldIdx,
+    pub(crate) pattern: Box<Pat<'tcx>>,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct Pat<'tcx> {
+    pub(crate) ty: Ty<'tcx>,
+    pub(crate) kind: PatKind<'tcx>,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) enum PatKind<'tcx> {
+    Wild,
+
+    Variant {
+        adt_def: AdtDef<'tcx>,
+        variant_index: VariantIdx,
+        subpatterns: Vec<FieldPat<'tcx>>,
+    },
+
+    Leaf {
+        subpatterns: Vec<FieldPat<'tcx>>,
+    },
+
+    Deref {
+        subpattern: Box<Pat<'tcx>>,
+    },
+
+    Constant {
+        value: mir::Const<'tcx>,
+    },
+
+    Range(Box<PatRange<'tcx>>),
+
+    Slice {
+        prefix: Box<[Box<Pat<'tcx>>]>,
+        slice: Option<Box<Pat<'tcx>>>,
+        suffix: Box<[Box<Pat<'tcx>>]>,
+    },
+
+    Never,
+}
+
+impl<'tcx> fmt::Display for Pat<'tcx> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        // Printing lists is a chore.
+        let mut first = true;
+        let mut start_or_continue = |s| {
+            if first {
+                first = false;
+                ""
+            } else {
+                s
+            }
+        };
+        let mut start_or_comma = || start_or_continue(", ");
+
+        match self.kind {
+            PatKind::Wild => write!(f, "_"),
+            PatKind::Never => write!(f, "!"),
+            PatKind::Variant { ref subpatterns, .. } | PatKind::Leaf { ref subpatterns } => {
+                let variant_and_name = match self.kind {
+                    PatKind::Variant { adt_def, variant_index, .. } => ty::tls::with(|tcx| {
+                        let variant = adt_def.variant(variant_index);
+                        let adt_did = adt_def.did();
+                        let name = if tcx.get_diagnostic_item(sym::Option) == Some(adt_did)
+                            || tcx.get_diagnostic_item(sym::Result) == Some(adt_did)
+                        {
+                            variant.name.to_string()
+                        } else {
+                            format!("{}::{}", tcx.def_path_str(adt_def.did()), variant.name)
+                        };
+                        Some((variant, name))
+                    }),
+                    _ => self.ty.ty_adt_def().and_then(|adt_def| {
+                        if !adt_def.is_enum() {
+                            ty::tls::with(|tcx| {
+                                Some((adt_def.non_enum_variant(), tcx.def_path_str(adt_def.did())))
+                            })
+                        } else {
+                            None
+                        }
+                    }),
+                };
+
+                if let Some((variant, name)) = &variant_and_name {
+                    write!(f, "{name}")?;
+
+                    // Only for Adt we can have `S {...}`,
+                    // which we handle separately here.
+                    if variant.ctor.is_none() {
+                        write!(f, " {{ ")?;
+
+                        let mut printed = 0;
+                        for p in subpatterns {
+                            if let PatKind::Wild = p.pattern.kind {
+                                continue;
+                            }
+                            let name = variant.fields[p.field].name;
+                            write!(f, "{}{}: {}", start_or_comma(), name, p.pattern)?;
+                            printed += 1;
+                        }
+
+                        let is_union = self.ty.ty_adt_def().is_some_and(|adt| adt.is_union());
+                        if printed < variant.fields.len() && (!is_union || printed == 0) {
+                            write!(f, "{}..", start_or_comma())?;
+                        }
+
+                        return write!(f, " }}");
+                    }
+                }
+
+                let num_fields =
+                    variant_and_name.as_ref().map_or(subpatterns.len(), |(v, _)| v.fields.len());
+                if num_fields != 0 || variant_and_name.is_none() {
+                    write!(f, "(")?;
+                    for i in 0..num_fields {
+                        write!(f, "{}", start_or_comma())?;
+
+                        // Common case: the field is where we expect it.
+                        if let Some(p) = subpatterns.get(i) {
+                            if p.field.index() == i {
+                                write!(f, "{}", p.pattern)?;
+                                continue;
+                            }
+                        }
+
+                        // Otherwise, we have to go looking for it.
+                        if let Some(p) = subpatterns.iter().find(|p| p.field.index() == i) {
+                            write!(f, "{}", p.pattern)?;
+                        } else {
+                            write!(f, "_")?;
+                        }
+                    }
+                    write!(f, ")")?;
+                }
+
+                Ok(())
+            }
+            PatKind::Deref { ref subpattern } => {
+                match self.ty.kind() {
+                    ty::Adt(def, _) if def.is_box() => write!(f, "box ")?,
+                    ty::Ref(_, _, mutbl) => {
+                        write!(f, "&{}", mutbl.prefix_str())?;
+                    }
+                    _ => bug!("{} is a bad Deref pattern type", self.ty),
+                }
+                write!(f, "{subpattern}")
+            }
+            PatKind::Constant { value } => write!(f, "{value}"),
+            PatKind::Range(ref range) => write!(f, "{range}"),
+            PatKind::Slice { ref prefix, ref slice, ref suffix } => {
+                write!(f, "[")?;
+                for p in prefix.iter() {
+                    write!(f, "{}{}", start_or_comma(), p)?;
+                }
+                if let Some(ref slice) = *slice {
+                    write!(f, "{}", start_or_comma())?;
+                    match slice.kind {
+                        PatKind::Wild => {}
+                        _ => write!(f, "{slice}")?,
+                    }
+                    write!(f, "..")?;
+                }
+                for p in suffix.iter() {
+                    write!(f, "{}{}", start_or_comma(), p)?;
+                }
+                write!(f, "]")
+            }
+        }
+    }
+}
diff --git a/compiler/rustc_pattern_analysis/src/usefulness.rs b/compiler/rustc_pattern_analysis/src/usefulness.rs
new file mode 100644
index 00000000000..9710c9e1303
--- /dev/null
+++ b/compiler/rustc_pattern_analysis/src/usefulness.rs
@@ -0,0 +1,1883 @@
+//! # Match exhaustiveness and redundancy algorithm
+//!
+//! This file contains the logic for exhaustiveness and usefulness checking for pattern-matching.
+//! Specifically, given a list of patterns in a match, we can tell whether:
+//! (a) a given pattern is redundant
+//! (b) the patterns cover every possible value for the type (exhaustiveness)
+//!
+//! The algorithm implemented here is inspired from the one described in [this
+//! paper](http://moscova.inria.fr/~maranget/papers/warn/index.html). We have however changed it in
+//! various ways to accommodate the variety of patterns that Rust supports. We thus explain our
+//! version here, without being as precise.
+//!
+//! Fun fact: computing exhaustiveness is NP-complete, because we can encode a SAT problem as an
+//! exhaustiveness problem. See [here](https://niedzejkob.p4.team/rust-np) for the fun details.
+//!
+//!
+//! # Summary
+//!
+//! The algorithm is given as input a list of patterns, one for each arm of a match, and computes
+//! the following:
+//! - a set of values that match none of the patterns (if any),
+//! - for each subpattern (taking into account or-patterns), whether removing it would change
+//!     anything about how the match executes, i.e. whether it is useful/not redundant.
+//!
+//! To a first approximation, the algorithm works by exploring all possible values for the type
+//! being matched on, and determining which arm(s) catch which value. To make this tractable we
+//! cleverly group together values, as we'll see below.
+//!
+//! The entrypoint of this file is the [`compute_match_usefulness`] function, which computes
+//! usefulness for each subpattern and exhaustiveness for the whole match.
+//!
+//! In this page we explain the necessary concepts to understand how the algorithm works.
+//!
+//!
+//! # Usefulness
+//!
+//! The central concept of this file is the notion of "usefulness". Given some patterns `p_1 ..
+//! p_n`, a pattern `q` is said to be *useful* if there is a value that is matched by `q` and by
+//! none of the `p_i`. We write `usefulness(p_1 .. p_n, q)` for a function that returns a list of
+//! such values. The aim of this file is to compute it efficiently.
+//!
+//! This is enough to compute usefulness: a pattern in a `match` expression is redundant iff it is
+//! not useful w.r.t. the patterns above it:
+//! ```compile_fail,E0004
+//! # fn foo() {
+//! match Some(0u32) {
+//!     Some(0..100) => {},
+//!     Some(90..190) => {}, // useful: `Some(150)` is matched by this but not the branch above
+//!     Some(50..150) => {}, // redundant: all the values this matches are already matched by
+//!                          //   the branches above
+//!     None => {},          // useful: `None` is matched by this but not the branches above
+//! }
+//! # }
+//! ```
+//!
+//! This is also enough to compute exhaustiveness: a match is exhaustive iff the wildcard `_`
+//! pattern is _not_ useful w.r.t. the patterns in the match. The values returned by `usefulness`
+//! are used to tell the user which values are missing.
+//! ```compile_fail,E0004
+//! # fn foo(x: Option<u32>) {
+//! match x {
+//!     None => {},
+//!     Some(0) => {},
+//!     // not exhaustive: `_` is useful because it matches `Some(1)`
+//! }
+//! # }
+//! ```
+//!
+//!
+//! # Constructors and fields
+//!
+//! In the value `Pair(Some(0), true)`, `Pair` is called the constructor of the value, and `Some(0)`
+//! and `true` are its fields. Every matcheable value can be decomposed in this way. Examples of
+//! constructors are: `Some`, `None`, `(,)` (the 2-tuple constructor), `Foo {..}` (the constructor
+//! for a struct `Foo`), and `2` (the constructor for the number `2`).
+//!
+//! Each constructor takes a fixed number of fields; this is called its arity. `Pair` and `(,)` have
+//! arity 2, `Some` has arity 1, `None` and `42` have arity 0. Each type has a known set of
+//! constructors. Some types have many constructors (like `u64`) or even an infinitely many (like
+//! `&str` and `&[T]`).
+//!
+//! Patterns are similar: `Pair(Some(_), _)` has constructor `Pair` and two fields. The difference
+//! is that we get some extra pattern-only constructors, namely: the wildcard `_`, variable
+//! bindings, integer ranges like `0..=10`, and variable-length slices like `[_, .., _]`. We treat
+//! or-patterns separately, see the dedicated section below.
+//!
+//! Now to check if a value `v` matches a pattern `p`, we check if `v`'s constructor matches `p`'s
+//! constructor, then recursively compare their fields if necessary. A few representative examples:
+//!
+//! - `matches!(v, _) := true`
+//! - `matches!((v0,  v1), (p0,  p1)) := matches!(v0, p0) && matches!(v1, p1)`
+//! - `matches!(Foo { bar: v0, baz: v1 }, Foo { bar: p0, baz: p1 }) := matches!(v0, p0) && matches!(v1, p1)`
+//! - `matches!(Ok(v0), Ok(p0)) := matches!(v0, p0)`
+//! - `matches!(Ok(v0), Err(p0)) := false` (incompatible variants)
+//! - `matches!(v, 1..=100) := matches!(v, 1) || ... || matches!(v, 100)`
+//! - `matches!([v0], [p0, .., p1]) := false` (incompatible lengths)
+//! - `matches!([v0, v1, v2], [p0, .., p1]) := matches!(v0, p0) && matches!(v2, p1)`
+//!
+//! Constructors and relevant operations are defined in the [`crate::constructor`] module. A
+//! representation of patterns that uses constructors is available in [`crate::pat`]. The question
+//! of whether a constructor is matched by another one is answered by
+//! [`Constructor::is_covered_by`].
+//!
+//! Note 1: variable bindings (like the `x` in `Some(x)`) match anything, so we treat them as wildcards.
+//! Note 2: this only applies to matcheable values. For example a value of type `Rc<u64>` can't be
+//! deconstructed that way.
+//!
+//!
+//!
+//! # Specialization
+//!
+//! The examples in the previous section motivate the operation at the heart of the algorithm:
+//! "specialization". It captures this idea of "removing one layer of constructor".
+//!
+//! `specialize(c, p)` takes a value-only constructor `c` and a pattern `p`, and returns a
+//! pattern-tuple or nothing. It works as follows:
+//!
+//! - Specializing for the wrong constructor returns nothing
+//!
+//!   - `specialize(None, Some(p0)) := <nothing>`
+//!   - `specialize([,,,], [p0]) := <nothing>`
+//!
+//! - Specializing for the correct constructor returns a tuple of the fields
+//!
+//!   - `specialize(Variant1, Variant1(p0, p1, p2)) := (p0, p1, p2)`
+//!   - `specialize(Foo{ bar, baz, quz }, Foo { bar: p0, baz: p1, .. }) := (p0, p1, _)`
+//!   - `specialize([,,,], [p0, .., p1]) := (p0, _, _, p1)`
+//!
+//! We get the following property: for any values `v_1, .., v_n` of appropriate types, we have:
+//! ```text
+//! matches!(c(v_1, .., v_n), p)
+//! <=> specialize(c, p) returns something
+//!     && matches!((v_1, .., v_n), specialize(c, p))
+//! ```
+//!
+//! We also extend specialization to pattern-tuples by applying it to the first pattern:
+//! `specialize(c, (p_0, .., p_n)) := specialize(c, p_0) ++ (p_1, .., p_m)`
+//! where `++` is concatenation of tuples.
+//!
+//!
+//! The previous property extends to pattern-tuples:
+//! ```text
+//! matches!((c(v_1, .., v_n), w_1, .., w_m), (p_0, p_1, .., p_m))
+//! <=> specialize(c, p_0) does not error
+//!     && matches!((v_1, .., v_n, w_1, .., w_m), specialize(c, (p_0, p_1, .., p_m)))
+//! ```
+//!
+//! Whether specialization returns something or not is given by [`Constructor::is_covered_by`].
+//! Specialization of a pattern is computed in [`DeconstructedPat::specialize`]. Specialization for
+//! a pattern-tuple is computed in [`PatStack::pop_head_constructor`]. Finally, specialization for a
+//! set of pattern-tuples is computed in [`Matrix::specialize_constructor`].
+//!
+//!
+//!
+//! # Undoing specialization
+//!
+//! To construct witnesses we will need an inverse of specialization. If `c` is a constructor of
+//! arity `n`, we define `unspecialize` as:
+//! `unspecialize(c, (p_1, .., p_n, q_1, .., q_m)) := (c(p_1, .., p_n), q_1, .., q_m)`.
+//!
+//! This is done for a single witness-tuple in [`WitnessStack::apply_constructor`], and for a set of
+//! witness-tuples in [`WitnessMatrix::apply_constructor`].
+//!
+//!
+//!
+//! # Computing usefulness
+//!
+//! We now present a naive version of the algorithm for computing usefulness. From now on we operate
+//! on pattern-tuples.
+//!
+//! Let `pt_1, .., pt_n` and `qt` be length-m tuples of patterns for the same type `(T_1, .., T_m)`.
+//! We compute `usefulness(tp_1, .., tp_n, tq)` as follows:
+//!
+//! - Base case: `m == 0`.
+//!     The pattern-tuples are all empty, i.e. they're all `()`. Thus `tq` is useful iff there are
+//!     no rows above it, i.e. if `n == 0`. In that case we return `()` as a witness-tuple of
+//!     usefulness of `tq`.
+//!
+//! - Inductive case: `m > 0`.
+//!     In this naive version, we list all the possible constructors for values of type `T1` (we
+//!     will be more clever in the next section).
+//!
+//!     - For each such constructor `c` for which `specialize(c, tq)` is not nothing:
+//!         - We recursively compute `usefulness(specialize(c, tp_1) ... specialize(c, tp_n), specialize(c, tq))`,
+//!             where we discard any `specialize(c, p_i)` that returns nothing.
+//!         - For each witness-tuple `w` found, we apply `unspecialize(c, w)` to it.
+//!
+//!     - We return the all the witnesses found, if any.
+//!
+//!
+//! Let's take the following example:
+//! ```compile_fail,E0004
+//! # enum Enum { Variant1(()), Variant2(Option<bool>, u32)}
+//! # use Enum::*;
+//! # fn foo(x: Enum) {
+//! match x {
+//!     Variant1(_) => {} // `p1`
+//!     Variant2(None, 0) => {} // `p2`
+//!     Variant2(Some(_), 0) => {} // `q`
+//! }
+//! # }
+//! ```
+//!
+//! To compute the usefulness of `q`, we would proceed as follows:
+//! ```text
+//! Start:
+//!   `tp1 = [Variant1(_)]`
+//!   `tp2 = [Variant2(None, 0)]`
+//!   `tq  = [Variant2(Some(true), 0)]`
+//!
+//!   Constructors are `Variant1` and `Variant2`. Only `Variant2` can specialize `tq`.
+//!   Specialize with `Variant2`:
+//!     `tp2 = [None, 0]`
+//!     `tq  = [Some(true), 0]`
+//!
+//!     Constructors are `None` and `Some`. Only `Some` can specialize `tq`.
+//!     Specialize with `Some`:
+//!       `tq  = [true, 0]`
+//!
+//!       Constructors are `false` and `true`. Only `true` can specialize `tq`.
+//!       Specialize with `true`:
+//!         `tq  = [0]`
+//!
+//!         Constructors are `0`, `1`, .. up to infinity. Only `0` can specialize `tq`.
+//!         Specialize with `0`:
+//!           `tq  = []`
+//!
+//!           m == 0 and n == 0, so `tq` is useful with witness `[]`.
+//!             `witness  = []`
+//!
+//!         Unspecialize with `0`:
+//!           `witness  = [0]`
+//!       Unspecialize with `true`:
+//!         `witness  = [true, 0]`
+//!     Unspecialize with `Some`:
+//!       `witness  = [Some(true), 0]`
+//!   Unspecialize with `Variant2`:
+//!     `witness  = [Variant2(Some(true), 0)]`
+//! ```
+//!
+//! Therefore `usefulness(tp_1, tp_2, tq)` returns the single witness-tuple `[Variant2(Some(true), 0)]`.
+//!
+//!
+//! Computing the set of constructors for a type is done in [`PatCx::ctors_for_ty`]. See
+//! the following sections for more accurate versions of the algorithm and corresponding links.
+//!
+//!
+//!
+//! # Computing usefulness and exhaustiveness in one go
+//!
+//! The algorithm we have described so far computes usefulness of each pattern in turn, and ends by
+//! checking if `_` is useful to determine exhaustiveness of the whole match. In practice, instead
+//! of doing "for each pattern { for each constructor { ... } }", we do "for each constructor { for
+//! each pattern { ... } }". This allows us to compute everything in one go.
+//!
+//! [`Matrix`] stores the set of pattern-tuples under consideration. We track usefulness of each
+//! row mutably in the matrix as we go along. We ignore witnesses of usefulness of the match rows.
+//! We gather witnesses of the usefulness of `_` in [`WitnessMatrix`]. The algorithm that computes
+//! all this is in [`compute_exhaustiveness_and_usefulness`].
+//!
+//! See the full example at the bottom of this documentation.
+//!
+//!
+//!
+//! # Making usefulness tractable: constructor splitting
+//!
+//! We're missing one last detail: which constructors do we list? Naively listing all value
+//! constructors cannot work for types like `u64` or `&str`, so we need to be more clever. The final
+//! clever idea for this algorithm is that we can group together constructors that behave the same.
+//!
+//! Examples:
+//! ```compile_fail,E0004
+//! match (0, false) {
+//!     (0 ..=100, true) => {}
+//!     (50..=150, false) => {}
+//!     (0 ..=200, _) => {}
+//! }
+//! ```
+//!
+//! In this example, trying any of `0`, `1`, .., `49` will give the same specialized matrix, and
+//! thus the same usefulness/exhaustiveness results. We can thus accelerate the algorithm by
+//! trying them all at once. Here in fact, the only cases we need to consider are: `0..50`,
+//! `50..=100`, `101..=150`,`151..=200` and `201..`.
+//!
+//! ```
+//! enum Direction { North, South, East, West }
+//! # let wind = (Direction::North, 0u8);
+//! match wind {
+//!     (Direction::North, 50..) => {}
+//!     (_, _) => {}
+//! }
+//! ```
+//!
+//! In this example, trying any of `South`, `East`, `West` will give the same specialized matrix. By
+//! the same reasoning, we only need to try two cases: `North`, and "everything else".
+//!
+//! We call _constructor splitting_ the operation that computes such a minimal set of cases to try.
+//! This is done in [`ConstructorSet::split`] and explained in [`crate::constructor`].
+//!
+//!
+//!
+//! # `Missing` and relevancy
+//!
+//! ## Relevant values
+//!
+//! Take the following example:
+//!
+//! ```compile_fail,E0004
+//! # let foo = (true, true);
+//! match foo {
+//!     (true, _) => 1,
+//!     (_, true) => 2,
+//! };
+//! ```
+//!
+//! Consider the value `(true, true)`:
+//! - Row 2 does not distinguish `(true, true)` and `(false, true)`;
+//! - `false` does not show up in the first column of the match, so without knowing anything else we
+//!     can deduce that `(false, true)` matches the same or fewer rows than `(true, true)`.
+//!
+//! Using those two facts together, we deduce that `(true, true)` will not give us more usefulness
+//! information about row 2 than `(false, true)` would. We say that "`(true, true)` is made
+//! irrelevant for row 2 by `(false, true)`". We will use this idea to prune the search tree.
+//!
+//!
+//! ## Computing relevancy
+//!
+//! We now generalize from the above example to approximate relevancy in a simple way. Note that we
+//! will only compute an approximation: we can sometimes determine when a case is irrelevant, but
+//! computing this precisely is at least as hard as computing usefulness.
+//!
+//! Our computation of relevancy relies on the `Missing` constructor. As explained in
+//! [`crate::constructor`], `Missing` represents the constructors not present in a given column. For
+//! example in the following:
+//!
+//! ```compile_fail,E0004
+//! enum Direction { North, South, East, West }
+//! # let wind = (Direction::North, 0u8);
+//! match wind {
+//!     (Direction::North, _) => 1,
+//!     (_, 50..) => 2,
+//! };
+//! ```
+//!
+//! Here `South`, `East` and `West` are missing in the first column, and `0..50`  is missing in the
+//! second. Both of these sets are represented by `Constructor::Missing` in their corresponding
+//! column.
+//!
+//! We then compute relevancy as follows: during the course of the algorithm, for a row `r`:
+//! - if `r` has a wildcard in the first column;
+//! - and some constructors are missing in that column;
+//! - then any `c != Missing` is considered irrelevant for row `r`.
+//!
+//! By this we mean that continuing the algorithm by specializing with `c` is guaranteed not to
+//! contribute more information about the usefulness of row `r` than what we would get by
+//! specializing with `Missing`. The argument is the same as in the previous subsection.
+//!
+//! Once we've specialized by a constructor `c` that is irrelevant for row `r`, we're guaranteed to
+//! only explore values irrelevant for `r`. If we then ever reach a point where we're only exploring
+//! values that are irrelevant to all of the rows (including the virtual wildcard row used for
+//! exhaustiveness), we skip that case entirely.
+//!
+//!
+//! ## Example
+//!
+//! Let's go through a variation on the first example:
+//!
+//! ```compile_fail,E0004
+//! # let foo = (true, true, true);
+//! match foo {
+//!     (true, _, true) => 1,
+//!     (_, true, _) => 2,
+//! };
+//! ```
+//!
+//! ```text
+//!  ┐ Patterns:
+//!  │   1. `[(true, _, true)]`
+//!  │   2. `[(_, true, _)]`
+//!  │   3. `[_]` // virtual extra wildcard row
+//!  │
+//!  │ Specialize with `(,,)`:
+//!  ├─┐ Patterns:
+//!  │ │   1. `[true, _, true]`
+//!  │ │   2. `[_, true, _]`
+//!  │ │   3. `[_, _, _]`
+//!  │ │
+//!  │ │ There are missing constructors in the first column (namely `false`), hence
+//!  │ │ `true` is irrelevant for rows 2 and 3.
+//!  │ │
+//!  │ │ Specialize with `true`:
+//!  │ ├─┐ Patterns:
+//!  │ │ │   1. `[_, true]`
+//!  │ │ │   2. `[true, _]` // now exploring irrelevant cases
+//!  │ │ │   3. `[_, _]`    // now exploring irrelevant cases
+//!  │ │ │
+//!  │ │ │ There are missing constructors in the first column (namely `false`), hence
+//!  │ │ │ `true` is irrelevant for rows 1 and 3.
+//!  │ │ │
+//!  │ │ │ Specialize with `true`:
+//!  │ │ ├─┐ Patterns:
+//!  │ │ │ │   1. `[true]` // now exploring irrelevant cases
+//!  │ │ │ │   2. `[_]`    // now exploring irrelevant cases
+//!  │ │ │ │   3. `[_]`    // now exploring irrelevant cases
+//!  │ │ │ │
+//!  │ │ │ │ The current case is irrelevant for all rows: we backtrack immediately.
+//!  │ │ ├─┘
+//!  │ │ │
+//!  │ │ │ Specialize with `false`:
+//!  │ │ ├─┐ Patterns:
+//!  │ │ │ │   1. `[true]`
+//!  │ │ │ │   3. `[_]`    // now exploring irrelevant cases
+//!  │ │ │ │
+//!  │ │ │ │ Specialize with `true`:
+//!  │ │ │ ├─┐ Patterns:
+//!  │ │ │ │ │   1. `[]`
+//!  │ │ │ │ │   3. `[]`    // now exploring irrelevant cases
+//!  │ │ │ │ │
+//!  │ │ │ │ │ Row 1 is therefore useful.
+//!  │ │ │ ├─┘
+//! <etc...>
+//! ```
+//!
+//! Relevancy allowed us to skip the case `(true, true, _)` entirely. In some cases this pruning can
+//! give drastic speedups. The case this was built for is the following (#118437):
+//!
+//! ```ignore(illustrative)
+//! match foo {
+//!     (true, _, _, _, ..) => 1,
+//!     (_, true, _, _, ..) => 2,
+//!     (_, _, true, _, ..) => 3,
+//!     (_, _, _, true, ..) => 4,
+//!     ...
+//! }
+//! ```
+//!
+//! Without considering relevancy, we would explore all 2^n combinations of the `true` and `Missing`
+//! constructors. Relevancy tells us that e.g. `(true, true, false, false, false, ...)` is
+//! irrelevant for all the rows. This allows us to skip all cases with more than one `true`
+//! constructor, changing the runtime from exponential to linear.
+//!
+//!
+//! ## Relevancy and exhaustiveness
+//!
+//! For exhaustiveness, we do something slightly different w.r.t relevancy: we do not report
+//! witnesses of non-exhaustiveness that are irrelevant for the virtual wildcard row. For example,
+//! in:
+//!
+//! ```ignore(illustrative)
+//! match foo {
+//!     (true, true) => {}
+//! }
+//! ```
+//!
+//! we only report `(false, _)` as missing. This was a deliberate choice made early in the
+//! development of rust, for diagnostic and performance purposes. As showed in the previous section,
+//! ignoring irrelevant cases preserves usefulness, so this choice still correctly computes whether
+//! a match is exhaustive.
+//!
+//!
+//!
+//! # Or-patterns
+//!
+//! What we have described so far works well if there are no or-patterns. To handle them, if the
+//! first pattern of any row in the matrix is an or-pattern, we expand it by duplicating the rest of
+//! the row as necessary. For code reuse, this is implemented as "specializing with the `Or`
+//! constructor".
+//!
+//! This makes usefulness tracking subtle, because we also want to compute whether an alternative of
+//! an or-pattern is redundant, e.g. in `Some(_) | Some(0)`. We therefore track usefulness of each
+//! subpattern of the match.
+//!
+//!
+//!
+//! # Constants and opaques
+//!
+//! There are two kinds of constants in patterns:
+//!
+//! * literals (`1`, `true`, `"foo"`)
+//! * named or inline consts (`FOO`, `const { 5 + 6 }`)
+//!
+//! The latter are converted into the corresponding patterns by a previous phase. For example
+//! `const_to_pat(const { [1, 2, 3] })` becomes an `Array(vec![Const(1), Const(2), Const(3)])`
+//! pattern. This gets problematic when comparing the constant via `==` would behave differently
+//! from matching on the constant converted to a pattern. The situation around this is currently
+//! unclear and the lang team is working on clarifying what we want to do there. In any case, there
+//! are constants we will not turn into patterns. We capture these with `Constructor::Opaque`. These
+//! `Opaque` patterns do not participate in exhaustiveness, specialization or overlap checking.
+//!
+//!
+//!
+//! # Usefulness vs reachability, validity, and empty patterns
+//!
+//! This is likely the subtlest aspect of the algorithm. To be fully precise, a match doesn't
+//! operate on a value, it operates on a place. In certain unsafe circumstances, it is possible for
+//! a place to not contain valid data for its type. This has subtle consequences for empty types.
+//! Take the following:
+//!
+//! ```rust
+//! enum Void {}
+//! let x: u8 = 0;
+//! let ptr: *const Void = &x as *const u8 as *const Void;
+//! unsafe {
+//!     match *ptr {
+//!         _ => println!("Reachable!"),
+//!     }
+//! }
+//! ```
+//!
+//! In this example, `ptr` is a valid pointer pointing to a place with invalid data. The `_` pattern
+//! does not look at the contents of `*ptr`, so this is ok and the arm is taken. In other words,
+//! despite the place we are inspecting being of type `Void`, there is a reachable arm. If the
+//! arm had a binding however:
+//!
+//! ```rust
+//! # #[derive(Copy, Clone)]
+//! # enum Void {}
+//! # let x: u8 = 0;
+//! # let ptr: *const Void = &x as *const u8 as *const Void;
+//! # unsafe {
+//! match *ptr {
+//!     _a => println!("Unreachable!"),
+//! }
+//! # }
+//! ```
+//!
+//! Here the binding loads the value of type `Void` from the `*ptr` place. In this example, this
+//! causes UB since the data is not valid. In the general case, this asserts validity of the data at
+//! `*ptr`. Either way, this arm will never be taken.
+//!
+//! Finally, let's consider the empty match `match *ptr {}`. If we consider this exhaustive, then
+//! having invalid data at `*ptr` is invalid. In other words, the empty match is semantically
+//! equivalent to the `_a => ...` match. In the interest of explicitness, we prefer the case with an
+//! arm, hence we won't tell the user to remove the `_a` arm. In other words, the `_a` arm is
+//! unreachable yet not redundant. This is why we lint on redundant arms rather than unreachable
+//! arms, despite the fact that the lint says "unreachable".
+//!
+//! These considerations only affects certain places, namely those that can contain non-valid data
+//! without UB. These are: pointer dereferences, reference dereferences, and union field accesses.
+//! We track in the algorithm whether a given place is known to contain valid data. This is done
+//! first by inspecting the scrutinee syntactically (which gives us `cx.known_valid_scrutinee`), and
+//! then by tracking validity of each column of the matrix (which correspond to places) as we
+//! recurse into subpatterns. That second part is done through [`PlaceValidity`], most notably
+//! [`PlaceValidity::specialize`].
+//!
+//! Having said all that, in practice we don't fully follow what's been presented in this section.
+//! Let's call "toplevel exception" the case where the match scrutinee itself has type `!` or
+//! `EmptyEnum`. First, on stable rust, we require `_` patterns for empty types in all cases apart
+//! from the toplevel exception. The `exhaustive_patterns` and `min_exaustive_patterns` allow
+//! omitting patterns in the cases described above. There's a final detail: in the toplevel
+//! exception or with the `exhaustive_patterns` feature, we ignore place validity when checking
+//! whether a pattern is required for exhaustiveness. I (Nadrieril) hope to deprecate this behavior.
+//!
+//!
+//!
+//! # Full example
+//!
+//! We illustrate a full run of the algorithm on the following match.
+//!
+//! ```compile_fail,E0004
+//! # struct Pair(Option<u32>, bool);
+//! # fn foo(x: Pair) -> u32 {
+//! match x {
+//!     Pair(Some(0), _) => 1,
+//!     Pair(_, false) => 2,
+//!     Pair(Some(0), false) => 3,
+//! }
+//! # }
+//! ```
+//!
+//! We keep track of the original row for illustration purposes, this is not what the algorithm
+//! actually does (it tracks usefulness as a boolean on each row).
+//!
+//! ```text
+//!  ┐ Patterns:
+//!  │   1. `[Pair(Some(0), _)]`
+//!  │   2. `[Pair(_, false)]`
+//!  │   3. `[Pair(Some(0), false)]`
+//!  │
+//!  │ Specialize with `Pair`:
+//!  ├─┐ Patterns:
+//!  │ │   1. `[Some(0), _]`
+//!  │ │   2. `[_, false]`
+//!  │ │   3. `[Some(0), false]`
+//!  │ │
+//!  │ │ Specialize with `Some`:
+//!  │ ├─┐ Patterns:
+//!  │ │ │   1. `[0, _]`
+//!  │ │ │   2. `[_, false]`
+//!  │ │ │   3. `[0, false]`
+//!  │ │ │
+//!  │ │ │ Specialize with `0`:
+//!  │ │ ├─┐ Patterns:
+//!  │ │ │ │   1. `[_]`
+//!  │ │ │ │   3. `[false]`
+//!  │ │ │ │
+//!  │ │ │ │ Specialize with `true`:
+//!  │ │ │ ├─┐ Patterns:
+//!  │ │ │ │ │   1. `[]`
+//!  │ │ │ │ │
+//!  │ │ │ │ │ We note arm 1 is useful (by `Pair(Some(0), true)`).
+//!  │ │ │ ├─┘
+//!  │ │ │ │
+//!  │ │ │ │ Specialize with `false`:
+//!  │ │ │ ├─┐ Patterns:
+//!  │ │ │ │ │   1. `[]`
+//!  │ │ │ │ │   3. `[]`
+//!  │ │ │ │ │
+//!  │ │ │ │ │ We note arm 1 is useful (by `Pair(Some(0), false)`).
+//!  │ │ │ ├─┘
+//!  │ │ ├─┘
+//!  │ │ │
+//!  │ │ │ Specialize with `1..`:
+//!  │ │ ├─┐ Patterns:
+//!  │ │ │ │   2. `[false]`
+//!  │ │ │ │
+//!  │ │ │ │ Specialize with `true`:
+//!  │ │ │ ├─┐ Patterns:
+//!  │ │ │ │ │   // no rows left
+//!  │ │ │ │ │
+//!  │ │ │ │ │ We have found an unmatched value (`Pair(Some(1..), true)`)! This gives us a witness.
+//!  │ │ │ │ │ New witnesses:
+//!  │ │ │ │ │   `[]`
+//!  │ │ │ ├─┘
+//!  │ │ │ │ Unspecialize new witnesses with `true`:
+//!  │ │ │ │   `[true]`
+//!  │ │ │ │
+//!  │ │ │ │ Specialize with `false`:
+//!  │ │ │ ├─┐ Patterns:
+//!  │ │ │ │ │   2. `[]`
+//!  │ │ │ │ │
+//!  │ │ │ │ │ We note arm 2 is useful (by `Pair(Some(1..), false)`).
+//!  │ │ │ ├─┘
+//!  │ │ │ │
+//!  │ │ │ │ Total witnesses for `1..`:
+//!  │ │ │ │   `[true]`
+//!  │ │ ├─┘
+//!  │ │ │ Unspecialize new witnesses with `1..`:
+//!  │ │ │   `[1.., true]`
+//!  │ │ │
+//!  │ │ │ Total witnesses for `Some`:
+//!  │ │ │   `[1.., true]`
+//!  │ ├─┘
+//!  │ │ Unspecialize new witnesses with `Some`:
+//!  │ │   `[Some(1..), true]`
+//!  │ │
+//!  │ │ Specialize with `None`:
+//!  │ ├─┐ Patterns:
+//!  │ │ │   2. `[false]`
+//!  │ │ │
+//!  │ │ │ Specialize with `true`:
+//!  │ │ ├─┐ Patterns:
+//!  │ │ │ │   // no rows left
+//!  │ │ │ │
+//!  │ │ │ │ We have found an unmatched value (`Pair(None, true)`)! This gives us a witness.
+//!  │ │ │ │ New witnesses:
+//!  │ │ │ │   `[]`
+//!  │ │ ├─┘
+//!  │ │ │ Unspecialize new witnesses with `true`:
+//!  │ │ │   `[true]`
+//!  │ │ │
+//!  │ │ │ Specialize with `false`:
+//!  │ │ ├─┐ Patterns:
+//!  │ │ │ │   2. `[]`
+//!  │ │ │ │
+//!  │ │ │ │ We note arm 2 is useful (by `Pair(None, false)`).
+//!  │ │ ├─┘
+//!  │ │ │
+//!  │ │ │ Total witnesses for `None`:
+//!  │ │ │   `[true]`
+//!  │ ├─┘
+//!  │ │ Unspecialize new witnesses with `None`:
+//!  │ │   `[None, true]`
+//!  │ │
+//!  │ │ Total witnesses for `Pair`:
+//!  │ │   `[Some(1..), true]`
+//!  │ │   `[None, true]`
+//!  ├─┘
+//!  │ Unspecialize new witnesses with `Pair`:
+//!  │   `[Pair(Some(1..), true)]`
+//!  │   `[Pair(None, true)]`
+//!  │
+//!  │ Final witnesses:
+//!  │   `[Pair(Some(1..), true)]`
+//!  │   `[Pair(None, true)]`
+//!  ┘
+//! ```
+//!
+//! We conclude:
+//! - Arm 3 is redundant (it was never marked as useful);
+//! - The match is not exhaustive;
+//! - Adding arms with `Pair(Some(1..), true)` and `Pair(None, true)` would make the match exhaustive.
+//!
+//! Note that when we're deep in the algorithm, we don't know what specialization steps got us here.
+//! We can only figure out what our witnesses correspond to by unspecializing back up the stack.
+//!
+//!
+//! # Tests
+//!
+//! Note: tests specific to this file can be found in:
+//!
+//!   - `ui/pattern/usefulness`
+//!   - `ui/or-patterns`
+//!   - `ui/consts/const_in_pattern`
+//!   - `ui/rfc-2008-non-exhaustive`
+//!   - `ui/half-open-range-patterns`
+//!   - probably many others
+//!
+//! I (Nadrieril) prefer to put new tests in `ui/pattern/usefulness` unless there's a specific
+//! reason not to, for example if they crucially depend on a particular feature like `or_patterns`.
+
+use std::fmt;
+
+#[cfg(feature = "rustc")]
+use rustc_data_structures::stack::ensure_sufficient_stack;
+use rustc_hash::{FxHashMap, FxHashSet};
+use rustc_index::bit_set::BitSet;
+use smallvec::{smallvec, SmallVec};
+use tracing::{debug, instrument};
+
+use self::PlaceValidity::*;
+use crate::constructor::{Constructor, ConstructorSet, IntRange};
+use crate::pat::{DeconstructedPat, PatId, PatOrWild, WitnessPat};
+use crate::{Captures, MatchArm, PatCx, PrivateUninhabitedField};
+#[cfg(not(feature = "rustc"))]
+pub fn ensure_sufficient_stack<R>(f: impl FnOnce() -> R) -> R {
+    f()
+}
+
+/// A pattern is a "branch" if it is the immediate child of an or-pattern, or if it is the whole
+/// pattern of a match arm. These are the patterns that can be meaningfully considered "redundant",
+/// since e.g. `0` in `(0, 1)` cannot be redundant on its own.
+///
+/// We track for each branch pattern whether it is useful, and if not why.
+struct BranchPatUsefulness<'p, Cx: PatCx> {
+    /// Whether this pattern is useful.
+    useful: bool,
+    /// A set of patterns that:
+    /// - come before this one in the match;
+    /// - intersect this one;
+    /// - at the end of the algorithm, if `!self.useful`, their union covers this pattern.
+    covered_by: FxHashSet<&'p DeconstructedPat<Cx>>,
+}
+
+impl<'p, Cx: PatCx> BranchPatUsefulness<'p, Cx> {
+    /// Update `self` with the usefulness information found in `row`.
+    fn update(&mut self, row: &MatrixRow<'p, Cx>, matrix: &Matrix<'p, Cx>) {
+        self.useful |= row.useful;
+        // This deserves an explanation: `intersects_at_least` does not contain all intersections
+        // because we skip irrelevant values (see the docs for `intersects_at_least` for an
+        // example). Yet we claim this suffices to build a covering set.
+        //
+        // Let `p` be our pattern. Assume it is found not useful. For a value `v`, if the value was
+        // relevant then we explored that value and found that there was another pattern `q` before
+        // `p` that matches it too. We therefore recorded an intersection with `q`. If `v` was
+        // irrelevant, we know there's another value `v2` that matches strictly fewer rows (while
+        // still matching our row) and is relevant. Since `p` is not useful, there must have been a
+        // `q` before `p` that matches `v2`, and we recorded that intersection. Since `v2` matches
+        // strictly fewer rows than `v`, `q` also matches `v`. In either case, we recorded in
+        // `intersects_at_least` a pattern that matches `v`. Hence using `intersects_at_least` is
+        // sufficient to build a covering set.
+        for row_id in row.intersects_at_least.iter() {
+            let row = &matrix.rows[row_id];
+            if row.useful && !row.is_under_guard {
+                if let PatOrWild::Pat(intersecting) = row.head() {
+                    self.covered_by.insert(intersecting);
+                }
+            }
+        }
+    }
+
+    /// Check whether this pattern is redundant, and if so explain why.
+    fn is_redundant(&self) -> Option<RedundancyExplanation<'p, Cx>> {
+        if self.useful {
+            None
+        } else {
+            // We avoid instability by sorting by `uid`. The order of `uid`s only depends on the
+            // pattern structure.
+            #[cfg_attr(feature = "rustc", allow(rustc::potential_query_instability))]
+            let mut covered_by: Vec<_> = self.covered_by.iter().copied().collect();
+            covered_by.sort_by_key(|pat| pat.uid); // sort to avoid instability
+            Some(RedundancyExplanation { covered_by })
+        }
+    }
+}
+
+impl<'p, Cx: PatCx> Default for BranchPatUsefulness<'p, Cx> {
+    fn default() -> Self {
+        Self { useful: Default::default(), covered_by: Default::default() }
+    }
+}
+
+/// Context that provides information for usefulness checking.
+struct UsefulnessCtxt<'a, 'p, Cx: PatCx> {
+    /// The context for type information.
+    tycx: &'a Cx,
+    /// Track information about the usefulness of branch patterns (see definition of "branch
+    /// pattern" at [`BranchPatUsefulness`]).
+    branch_usefulness: FxHashMap<PatId, BranchPatUsefulness<'p, Cx>>,
+    complexity_limit: Option<usize>,
+    complexity_level: usize,
+}
+
+impl<'a, 'p, Cx: PatCx> UsefulnessCtxt<'a, 'p, Cx> {
+    fn increase_complexity_level(&mut self, complexity_add: usize) -> Result<(), Cx::Error> {
+        self.complexity_level += complexity_add;
+        if self
+            .complexity_limit
+            .is_some_and(|complexity_limit| complexity_limit < self.complexity_level)
+        {
+            return self.tycx.complexity_exceeded();
+        }
+        Ok(())
+    }
+}
+
+/// Context that provides information local to a place under investigation.
+struct PlaceCtxt<'a, Cx: PatCx> {
+    cx: &'a Cx,
+    /// Type of the place under investigation.
+    ty: &'a Cx::Ty,
+}
+
+impl<'a, Cx: PatCx> Copy for PlaceCtxt<'a, Cx> {}
+impl<'a, Cx: PatCx> Clone for PlaceCtxt<'a, Cx> {
+    fn clone(&self) -> Self {
+        Self { cx: self.cx, ty: self.ty }
+    }
+}
+
+impl<'a, Cx: PatCx> fmt::Debug for PlaceCtxt<'a, Cx> {
+    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt.debug_struct("PlaceCtxt").field("ty", self.ty).finish()
+    }
+}
+
+impl<'a, Cx: PatCx> PlaceCtxt<'a, Cx> {
+    fn ctor_arity(&self, ctor: &Constructor<Cx>) -> usize {
+        self.cx.ctor_arity(ctor, self.ty)
+    }
+    fn wild_from_ctor(&self, ctor: Constructor<Cx>) -> WitnessPat<Cx> {
+        WitnessPat::wild_from_ctor(self.cx, ctor, self.ty.clone())
+    }
+}
+
+/// Track whether a given place (aka column) is known to contain a valid value or not.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum PlaceValidity {
+    ValidOnly,
+    MaybeInvalid,
+}
+
+impl PlaceValidity {
+    pub fn from_bool(is_valid_only: bool) -> Self {
+        if is_valid_only { ValidOnly } else { MaybeInvalid }
+    }
+
+    fn is_known_valid(self) -> bool {
+        matches!(self, ValidOnly)
+    }
+
+    /// If the place has validity given by `self` and we read that the value at the place has
+    /// constructor `ctor`, this computes what we can assume about the validity of the constructor
+    /// fields.
+    ///
+    /// Pending further opsem decisions, the current behavior is: validity is preserved, except
+    /// inside `&` and union fields where validity is reset to `MaybeInvalid`.
+    fn specialize<Cx: PatCx>(self, ctor: &Constructor<Cx>) -> Self {
+        // We preserve validity except when we go inside a reference or a union field.
+        if matches!(ctor, Constructor::Ref | Constructor::UnionField) {
+            // Validity of `x: &T` does not imply validity of `*x: T`.
+            MaybeInvalid
+        } else {
+            self
+        }
+    }
+}
+
+impl fmt::Display for PlaceValidity {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        let s = match self {
+            ValidOnly => "✓",
+            MaybeInvalid => "?",
+        };
+        write!(f, "{s}")
+    }
+}
+
+/// Data about a place under investigation. Its methods contain a lot of the logic used to analyze
+/// the constructors in the matrix.
+struct PlaceInfo<Cx: PatCx> {
+    /// The type of the place.
+    ty: Cx::Ty,
+    /// Whether the place is a private uninhabited field. If so we skip this field during analysis
+    /// so that we don't observe its emptiness.
+    private_uninhabited: bool,
+    /// Whether the place is known to contain valid data.
+    validity: PlaceValidity,
+    /// Whether the place is the scrutinee itself or a subplace of it.
+    is_scrutinee: bool,
+}
+
+impl<Cx: PatCx> PlaceInfo<Cx> {
+    /// Given a constructor for the current place, we return one `PlaceInfo` for each field of the
+    /// constructor.
+    fn specialize<'a>(
+        &'a self,
+        cx: &'a Cx,
+        ctor: &'a Constructor<Cx>,
+    ) -> impl Iterator<Item = Self> + ExactSizeIterator + Captures<'a> {
+        let ctor_sub_tys = cx.ctor_sub_tys(ctor, &self.ty);
+        let ctor_sub_validity = self.validity.specialize(ctor);
+        ctor_sub_tys.map(move |(ty, PrivateUninhabitedField(private_uninhabited))| PlaceInfo {
+            ty,
+            private_uninhabited,
+            validity: ctor_sub_validity,
+            is_scrutinee: false,
+        })
+    }
+
+    /// This analyzes a column of constructors corresponding to the current place. It returns a pair
+    /// `(split_ctors, missing_ctors)`.
+    ///
+    /// `split_ctors` is a splitted list of constructors that cover the whole type. This will be
+    /// used to specialize the matrix.
+    ///
+    /// `missing_ctors` is a list of the constructors not found in the column, for reporting
+    /// purposes.
+    fn split_column_ctors<'a>(
+        &self,
+        cx: &Cx,
+        ctors: impl Iterator<Item = &'a Constructor<Cx>> + Clone,
+    ) -> Result<(SmallVec<[Constructor<Cx>; 1]>, Vec<Constructor<Cx>>), Cx::Error>
+    where
+        Cx: 'a,
+    {
+        debug!(?self.ty);
+        if self.private_uninhabited {
+            // Skip the whole column
+            return Ok((smallvec![Constructor::PrivateUninhabited], vec![]));
+        }
+
+        if ctors.clone().any(|c| matches!(c, Constructor::Or)) {
+            // If any constructor is `Or`, we expand or-patterns.
+            return Ok((smallvec![Constructor::Or], vec![]));
+        }
+
+        let ctors_for_ty = cx.ctors_for_ty(&self.ty)?;
+        debug!(?ctors_for_ty);
+
+        // We treat match scrutinees of type `!` or `EmptyEnum` differently.
+        let is_toplevel_exception =
+            self.is_scrutinee && matches!(ctors_for_ty, ConstructorSet::NoConstructors);
+        // Whether empty patterns are counted as useful or not. We only warn an empty arm unreachable if
+        // it is guaranteed unreachable by the opsem (i.e. if the place is `known_valid`).
+        let empty_arms_are_unreachable = self.validity.is_known_valid()
+            && (is_toplevel_exception
+                || cx.is_exhaustive_patterns_feature_on()
+                || cx.is_min_exhaustive_patterns_feature_on());
+        // Whether empty patterns can be omitted for exhaustiveness. We ignore place validity in the
+        // toplevel exception and `exhaustive_patterns` cases for backwards compatibility.
+        let can_omit_empty_arms = empty_arms_are_unreachable
+            || is_toplevel_exception
+            || cx.is_exhaustive_patterns_feature_on();
+
+        // Analyze the constructors present in this column.
+        let mut split_set = ctors_for_ty.split(ctors);
+        debug!(?split_set);
+        let all_missing = split_set.present.is_empty();
+
+        // Build the set of constructors we will specialize with. It must cover the whole type, so
+        // we add `Missing` to represent the missing ones. This is explained under "Constructor
+        // Splitting" at the top of this file.
+        let mut split_ctors = split_set.present;
+        if !(split_set.missing.is_empty()
+            && (split_set.missing_empty.is_empty() || empty_arms_are_unreachable))
+        {
+            split_ctors.push(Constructor::Missing);
+        }
+
+        // Which empty constructors are considered missing. We ensure that
+        // `!missing_ctors.is_empty() => split_ctors.contains(Missing)`. The converse usually holds
+        // except when `!self.validity.is_known_valid()`.
+        let mut missing_ctors = split_set.missing;
+        if !can_omit_empty_arms {
+            missing_ctors.append(&mut split_set.missing_empty);
+        }
+
+        // Whether we should report "Enum::A and Enum::C are missing" or "_ is missing". At the top
+        // level we prefer to list all constructors.
+        let report_individual_missing_ctors = self.is_scrutinee || !all_missing;
+        if !missing_ctors.is_empty() && !report_individual_missing_ctors {
+            // Report `_` as missing.
+            missing_ctors = vec![Constructor::Wildcard];
+        } else if missing_ctors.iter().any(|c| c.is_non_exhaustive()) {
+            // We need to report a `_` anyway, so listing other constructors would be redundant.
+            // `NonExhaustive` is displayed as `_` just like `Wildcard`, but it will be picked
+            // up by diagnostics to add a note about why `_` is required here.
+            missing_ctors = vec![Constructor::NonExhaustive];
+        }
+
+        Ok((split_ctors, missing_ctors))
+    }
+}
+
+impl<Cx: PatCx> Clone for PlaceInfo<Cx> {
+    fn clone(&self) -> Self {
+        Self {
+            ty: self.ty.clone(),
+            private_uninhabited: self.private_uninhabited,
+            validity: self.validity,
+            is_scrutinee: self.is_scrutinee,
+        }
+    }
+}
+
+/// Represents a pattern-tuple under investigation.
+// The three lifetimes are:
+// - 'p coming from the input
+// - Cx global compilation context
+struct PatStack<'p, Cx: PatCx> {
+    // Rows of len 1 are very common, which is why `SmallVec[_; 2]` works well.
+    pats: SmallVec<[PatOrWild<'p, Cx>; 2]>,
+    /// Sometimes we know that as far as this row is concerned, the current case is already handled
+    /// by a different, more general, case. When the case is irrelevant for all rows this allows us
+    /// to skip a case entirely. This is purely an optimization. See at the top for details.
+    relevant: bool,
+}
+
+impl<'p, Cx: PatCx> Clone for PatStack<'p, Cx> {
+    fn clone(&self) -> Self {
+        Self { pats: self.pats.clone(), relevant: self.relevant }
+    }
+}
+
+impl<'p, Cx: PatCx> PatStack<'p, Cx> {
+    fn from_pattern(pat: &'p DeconstructedPat<Cx>) -> Self {
+        PatStack { pats: smallvec![PatOrWild::Pat(pat)], relevant: true }
+    }
+
+    fn len(&self) -> usize {
+        self.pats.len()
+    }
+
+    fn head(&self) -> PatOrWild<'p, Cx> {
+        self.pats[0]
+    }
+
+    fn iter(&self) -> impl Iterator<Item = PatOrWild<'p, Cx>> + Captures<'_> {
+        self.pats.iter().copied()
+    }
+
+    // Expand the first or-pattern into its subpatterns. Only useful if the pattern is an
+    // or-pattern. Panics if `self` is empty.
+    fn expand_or_pat(&self) -> impl Iterator<Item = PatStack<'p, Cx>> + Captures<'_> {
+        self.head().expand_or_pat().into_iter().map(move |pat| {
+            let mut new = self.clone();
+            new.pats[0] = pat;
+            new
+        })
+    }
+
+    /// This computes `specialize(ctor, self)`. See top of the file for explanations.
+    /// Only call if `ctor.is_covered_by(self.head().ctor())` is true.
+    fn pop_head_constructor(
+        &self,
+        cx: &Cx,
+        ctor: &Constructor<Cx>,
+        ctor_arity: usize,
+        ctor_is_relevant: bool,
+    ) -> Result<PatStack<'p, Cx>, Cx::Error> {
+        let head_pat = self.head();
+        if head_pat.as_pat().is_some_and(|pat| pat.arity() > ctor_arity) {
+            // Arity can be smaller in case of variable-length slices, but mustn't be larger.
+            return Err(cx.bug(format_args!(
+                "uncaught type error: pattern {:?} has inconsistent arity (expected arity <= {ctor_arity})",
+                head_pat.as_pat().unwrap()
+            )));
+        }
+        // We pop the head pattern and push the new fields extracted from the arguments of
+        // `self.head()`.
+        let mut new_pats = head_pat.specialize(ctor, ctor_arity);
+        new_pats.extend_from_slice(&self.pats[1..]);
+        // `ctor` is relevant for this row if it is the actual constructor of this row, or if the
+        // row has a wildcard and `ctor` is relevant for wildcards.
+        let ctor_is_relevant =
+            !matches!(self.head().ctor(), Constructor::Wildcard) || ctor_is_relevant;
+        Ok(PatStack { pats: new_pats, relevant: self.relevant && ctor_is_relevant })
+    }
+}
+
+impl<'p, Cx: PatCx> fmt::Debug for PatStack<'p, Cx> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        // We pretty-print similarly to the `Debug` impl of `Matrix`.
+        write!(f, "+")?;
+        for pat in self.iter() {
+            write!(f, " {pat:?} +")?;
+        }
+        Ok(())
+    }
+}
+
+/// A row of the matrix.
+#[derive(Clone)]
+struct MatrixRow<'p, Cx: PatCx> {
+    // The patterns in the row.
+    pats: PatStack<'p, Cx>,
+    /// Whether the original arm had a guard. This is inherited when specializing.
+    is_under_guard: bool,
+    /// When we specialize, we remember which row of the original matrix produced a given row of the
+    /// specialized matrix. When we unspecialize, we use this to propagate usefulness back up the
+    /// callstack. On creation, this stores the index of the original match arm.
+    parent_row: usize,
+    /// False when the matrix is just built. This is set to `true` by
+    /// [`compute_exhaustiveness_and_usefulness`] if the arm is found to be useful.
+    /// This is reset to `false` when specializing.
+    useful: bool,
+    /// Tracks some rows above this one that have an intersection with this one, i.e. such that
+    /// there is a value that matches both rows.
+    /// Because of relevancy we may miss some intersections. The intersections we do find are
+    /// correct. In other words, this is an underapproximation of the real set of intersections.
+    ///
+    /// For example:
+    /// ```rust,ignore(illustrative)
+    /// match ... {
+    ///     (true, _, _) => {} // `intersects_at_least = []`
+    ///     (_, true, 0..=10) => {} // `intersects_at_least = []`
+    ///     (_, true, 5..15) => {} // `intersects_at_least = [1]`
+    /// }
+    /// ```
+    /// Here the `(true, true)` case is irrelevant. Since we skip it, we will not detect that row 0
+    /// intersects rows 1 and 2.
+    intersects_at_least: BitSet<usize>,
+    /// Whether the head pattern is a branch (see definition of "branch pattern" at
+    /// [`BranchPatUsefulness`])
+    head_is_branch: bool,
+}
+
+impl<'p, Cx: PatCx> MatrixRow<'p, Cx> {
+    fn new(arm: &MatchArm<'p, Cx>, arm_id: usize) -> Self {
+        MatrixRow {
+            pats: PatStack::from_pattern(arm.pat),
+            parent_row: arm_id,
+            is_under_guard: arm.has_guard,
+            useful: false,
+            intersects_at_least: BitSet::new_empty(0), // Initialized in `Matrix::push`.
+            // This pattern is a branch because it comes from a match arm.
+            head_is_branch: true,
+        }
+    }
+
+    fn len(&self) -> usize {
+        self.pats.len()
+    }
+
+    fn head(&self) -> PatOrWild<'p, Cx> {
+        self.pats.head()
+    }
+
+    fn iter(&self) -> impl Iterator<Item = PatOrWild<'p, Cx>> + Captures<'_> {
+        self.pats.iter()
+    }
+
+    // Expand the first or-pattern (if any) into its subpatterns. Panics if `self` is empty.
+    fn expand_or_pat(
+        &self,
+        parent_row: usize,
+    ) -> impl Iterator<Item = MatrixRow<'p, Cx>> + Captures<'_> {
+        let is_or_pat = self.pats.head().is_or_pat();
+        self.pats.expand_or_pat().map(move |patstack| MatrixRow {
+            pats: patstack,
+            parent_row,
+            is_under_guard: self.is_under_guard,
+            useful: false,
+            intersects_at_least: BitSet::new_empty(0), // Initialized in `Matrix::push`.
+            head_is_branch: is_or_pat,
+        })
+    }
+
+    /// This computes `specialize(ctor, self)`. See top of the file for explanations.
+    /// Only call if `ctor.is_covered_by(self.head().ctor())` is true.
+    fn pop_head_constructor(
+        &self,
+        cx: &Cx,
+        ctor: &Constructor<Cx>,
+        ctor_arity: usize,
+        ctor_is_relevant: bool,
+        parent_row: usize,
+    ) -> Result<MatrixRow<'p, Cx>, Cx::Error> {
+        Ok(MatrixRow {
+            pats: self.pats.pop_head_constructor(cx, ctor, ctor_arity, ctor_is_relevant)?,
+            parent_row,
+            is_under_guard: self.is_under_guard,
+            useful: false,
+            intersects_at_least: BitSet::new_empty(0), // Initialized in `Matrix::push`.
+            head_is_branch: false,
+        })
+    }
+}
+
+impl<'p, Cx: PatCx> fmt::Debug for MatrixRow<'p, Cx> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.pats.fmt(f)
+    }
+}
+
+/// A 2D matrix. Represents a list of pattern-tuples under investigation.
+///
+/// Invariant: each row must have the same length, and each column must have the same type.
+///
+/// Invariant: the first column must not contain or-patterns. This is handled by
+/// [`Matrix::push`].
+///
+/// In fact each column corresponds to a place inside the scrutinee of the match. E.g. after
+/// specializing `(,)` and `Some` on a pattern of type `(Option<u32>, bool)`, the first column of
+/// the matrix will correspond to `scrutinee.0.Some.0` and the second column to `scrutinee.1`.
+#[derive(Clone)]
+struct Matrix<'p, Cx: PatCx> {
+    /// Vector of rows. The rows must form a rectangular 2D array. Moreover, all the patterns of
+    /// each column must have the same type. Each column corresponds to a place within the
+    /// scrutinee.
+    rows: Vec<MatrixRow<'p, Cx>>,
+    /// Track info about each place. Each place corresponds to a column in `rows`, and their types
+    /// must match.
+    place_info: SmallVec<[PlaceInfo<Cx>; 2]>,
+    /// Track whether the virtual wildcard row used to compute exhaustiveness is relevant. See top
+    /// of the file for details on relevancy.
+    wildcard_row_is_relevant: bool,
+}
+
+impl<'p, Cx: PatCx> Matrix<'p, Cx> {
+    /// Pushes a new row to the matrix. Internal method, prefer [`Matrix::new`].
+    fn push(&mut self, mut row: MatrixRow<'p, Cx>) {
+        row.intersects_at_least = BitSet::new_empty(self.rows.len());
+        self.rows.push(row);
+    }
+
+    /// Build a new matrix from an iterator of `MatchArm`s.
+    fn new(arms: &[MatchArm<'p, Cx>], scrut_ty: Cx::Ty, scrut_validity: PlaceValidity) -> Self {
+        let place_info = PlaceInfo {
+            ty: scrut_ty,
+            private_uninhabited: false,
+            validity: scrut_validity,
+            is_scrutinee: true,
+        };
+        let mut matrix = Matrix {
+            rows: Vec::with_capacity(arms.len()),
+            place_info: smallvec![place_info],
+            wildcard_row_is_relevant: true,
+        };
+        for (arm_id, arm) in arms.iter().enumerate() {
+            matrix.push(MatrixRow::new(arm, arm_id));
+        }
+        matrix
+    }
+
+    fn head_place(&self) -> Option<&PlaceInfo<Cx>> {
+        self.place_info.first()
+    }
+    fn column_count(&self) -> usize {
+        self.place_info.len()
+    }
+
+    fn rows(
+        &self,
+    ) -> impl Iterator<Item = &MatrixRow<'p, Cx>> + Clone + DoubleEndedIterator + ExactSizeIterator
+    {
+        self.rows.iter()
+    }
+    fn rows_mut(
+        &mut self,
+    ) -> impl Iterator<Item = &mut MatrixRow<'p, Cx>> + DoubleEndedIterator + ExactSizeIterator
+    {
+        self.rows.iter_mut()
+    }
+
+    /// Iterate over the first pattern of each row.
+    fn heads(&self) -> impl Iterator<Item = PatOrWild<'p, Cx>> + Clone + Captures<'_> {
+        self.rows().map(|r| r.head())
+    }
+
+    /// This computes `specialize(ctor, self)`. See top of the file for explanations.
+    fn specialize_constructor(
+        &self,
+        pcx: &PlaceCtxt<'_, Cx>,
+        ctor: &Constructor<Cx>,
+        ctor_is_relevant: bool,
+    ) -> Result<Matrix<'p, Cx>, Cx::Error> {
+        if matches!(ctor, Constructor::Or) {
+            // Specializing with `Or` means expanding rows with or-patterns.
+            let mut matrix = Matrix {
+                rows: Vec::new(),
+                place_info: self.place_info.clone(),
+                wildcard_row_is_relevant: self.wildcard_row_is_relevant,
+            };
+            for (i, row) in self.rows().enumerate() {
+                for new_row in row.expand_or_pat(i) {
+                    matrix.push(new_row);
+                }
+            }
+            Ok(matrix)
+        } else {
+            let subfield_place_info = self.place_info[0].specialize(pcx.cx, ctor);
+            let arity = subfield_place_info.len();
+            let specialized_place_info =
+                subfield_place_info.chain(self.place_info[1..].iter().cloned()).collect();
+            let mut matrix = Matrix {
+                rows: Vec::new(),
+                place_info: specialized_place_info,
+                wildcard_row_is_relevant: self.wildcard_row_is_relevant && ctor_is_relevant,
+            };
+            for (i, row) in self.rows().enumerate() {
+                if ctor.is_covered_by(pcx.cx, row.head().ctor())? {
+                    let new_row =
+                        row.pop_head_constructor(pcx.cx, ctor, arity, ctor_is_relevant, i)?;
+                    matrix.push(new_row);
+                }
+            }
+            Ok(matrix)
+        }
+    }
+
+    /// Recover row usefulness and intersection information from a processed specialized matrix.
+    /// `specialized` must come from `self.specialize_constructor`.
+    fn unspecialize(&mut self, specialized: Self) {
+        for child_row in specialized.rows() {
+            let parent_row_id = child_row.parent_row;
+            let parent_row = &mut self.rows[parent_row_id];
+            // A parent row is useful if any of its children is.
+            parent_row.useful |= child_row.useful;
+            for child_intersection in child_row.intersects_at_least.iter() {
+                // Convert the intersecting ids into ids for the parent matrix.
+                let parent_intersection = specialized.rows[child_intersection].parent_row;
+                // Note: self-intersection can happen with or-patterns.
+                if parent_intersection != parent_row_id {
+                    parent_row.intersects_at_least.insert(parent_intersection);
+                }
+            }
+        }
+    }
+}
+
+/// Pretty-printer for matrices of patterns, example:
+///
+/// ```text
+/// + _     + []                +
+/// + true  + [First]           +
+/// + true  + [Second(true)]    +
+/// + false + [_]               +
+/// + _     + [_, _, tail @ ..] +
+/// | ✓     | ?                 | // validity
+/// ```
+impl<'p, Cx: PatCx> fmt::Debug for Matrix<'p, Cx> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(f, "\n")?;
+
+        let mut pretty_printed_matrix: Vec<Vec<String>> = self
+            .rows
+            .iter()
+            .map(|row| row.iter().map(|pat| format!("{pat:?}")).collect())
+            .collect();
+        pretty_printed_matrix
+            .push(self.place_info.iter().map(|place| format!("{}", place.validity)).collect());
+
+        let column_count = self.column_count();
+        assert!(self.rows.iter().all(|row| row.len() == column_count));
+        assert!(self.place_info.len() == column_count);
+        let column_widths: Vec<usize> = (0..column_count)
+            .map(|col| pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0))
+            .collect();
+
+        for (row_i, row) in pretty_printed_matrix.into_iter().enumerate() {
+            let is_validity_row = row_i == self.rows.len();
+            let sep = if is_validity_row { "|" } else { "+" };
+            write!(f, "{sep}")?;
+            for (column, pat_str) in row.into_iter().enumerate() {
+                write!(f, " ")?;
+                write!(f, "{:1$}", pat_str, column_widths[column])?;
+                write!(f, " {sep}")?;
+            }
+            if is_validity_row {
+                write!(f, " // validity")?;
+            }
+            write!(f, "\n")?;
+        }
+        Ok(())
+    }
+}
+
+/// A witness-tuple of non-exhaustiveness for error reporting, represented as a list of patterns (in
+/// reverse order of construction).
+///
+/// This mirrors `PatStack`: they function similarly, except `PatStack` contains user patterns we
+/// are inspecting, and `WitnessStack` contains witnesses we are constructing.
+/// FIXME(Nadrieril): use the same order of patterns for both.
+///
+/// A `WitnessStack` should have the same types and length as the `PatStack`s we are inspecting
+/// (except we store the patterns in reverse order). The same way `PatStack` starts with length 1,
+/// at the end of the algorithm this will have length 1. In the middle of the algorithm, it can
+/// contain multiple patterns.
+///
+/// For example, if we are constructing a witness for the match against
+///
+/// ```compile_fail,E0004
+/// struct Pair(Option<(u32, u32)>, bool);
+/// # fn foo(p: Pair) {
+/// match p {
+///    Pair(None, _) => {}
+///    Pair(_, false) => {}
+/// }
+/// # }
+/// ```
+///
+/// We'll perform the following steps (among others):
+/// ```text
+/// - Start with a matrix representing the match
+///     `PatStack(vec![Pair(None, _)])`
+///     `PatStack(vec![Pair(_, false)])`
+/// - Specialize with `Pair`
+///     `PatStack(vec![None, _])`
+///     `PatStack(vec![_, false])`
+/// - Specialize with `Some`
+///     `PatStack(vec![_, false])`
+/// - Specialize with `_`
+///     `PatStack(vec![false])`
+/// - Specialize with `true`
+///     // no patstacks left
+/// - This is a non-exhaustive match: we have the empty witness stack as a witness.
+///     `WitnessStack(vec![])`
+/// - Apply `true`
+///     `WitnessStack(vec![true])`
+/// - Apply `_`
+///     `WitnessStack(vec![true, _])`
+/// - Apply `Some`
+///     `WitnessStack(vec![true, Some(_)])`
+/// - Apply `Pair`
+///     `WitnessStack(vec![Pair(Some(_), true)])`
+/// ```
+///
+/// The final `Pair(Some(_), true)` is then the resulting witness.
+///
+/// See the top of the file for more detailed explanations and examples.
+#[derive(Debug)]
+struct WitnessStack<Cx: PatCx>(Vec<WitnessPat<Cx>>);
+
+impl<Cx: PatCx> Clone for WitnessStack<Cx> {
+    fn clone(&self) -> Self {
+        Self(self.0.clone())
+    }
+}
+
+impl<Cx: PatCx> WitnessStack<Cx> {
+    /// Asserts that the witness contains a single pattern, and returns it.
+    fn single_pattern(self) -> WitnessPat<Cx> {
+        assert_eq!(self.0.len(), 1);
+        self.0.into_iter().next().unwrap()
+    }
+
+    /// Reverses specialization by the `Missing` constructor by pushing a whole new pattern.
+    fn push_pattern(&mut self, pat: WitnessPat<Cx>) {
+        self.0.push(pat);
+    }
+
+    /// Reverses specialization. Given a witness obtained after specialization, this constructs a
+    /// new witness valid for before specialization. See the section on `unspecialize` at the top of
+    /// the file.
+    ///
+    /// Examples:
+    /// ```text
+    /// ctor: tuple of 2 elements
+    /// pats: [false, "foo", _, true]
+    /// result: [(false, "foo"), _, true]
+    ///
+    /// ctor: Enum::Variant { a: (bool, &'static str), b: usize}
+    /// pats: [(false, "foo"), _, true]
+    /// result: [Enum::Variant { a: (false, "foo"), b: _ }, true]
+    /// ```
+    fn apply_constructor(
+        mut self,
+        pcx: &PlaceCtxt<'_, Cx>,
+        ctor: &Constructor<Cx>,
+    ) -> SmallVec<[Self; 1]> {
+        let len = self.0.len();
+        let arity = pcx.ctor_arity(ctor);
+        let fields: Vec<_> = self.0.drain((len - arity)..).rev().collect();
+        if matches!(ctor, Constructor::UnionField)
+            && fields.iter().filter(|p| !matches!(p.ctor(), Constructor::Wildcard)).count() >= 2
+        {
+            // Convert a `Union { a: p, b: q }` witness into `Union { a: p }` and `Union { b: q }`.
+            // First add `Union { .. }` to `self`.
+            self.0.push(WitnessPat::wild_from_ctor(pcx.cx, ctor.clone(), pcx.ty.clone()));
+            fields
+                .into_iter()
+                .enumerate()
+                .filter(|(_, p)| !matches!(p.ctor(), Constructor::Wildcard))
+                .map(|(i, p)| {
+                    let mut ret = self.clone();
+                    // Fill the `i`th field of the union with `p`.
+                    ret.0.last_mut().unwrap().fields[i] = p;
+                    ret
+                })
+                .collect()
+        } else {
+            self.0.push(WitnessPat::new(ctor.clone(), fields, pcx.ty.clone()));
+            smallvec![self]
+        }
+    }
+}
+
+/// Represents a set of pattern-tuples that are witnesses of non-exhaustiveness for error
+/// reporting. This has similar invariants as `Matrix` does.
+///
+/// The `WitnessMatrix` returned by [`compute_exhaustiveness_and_usefulness`] obeys the invariant
+/// that the union of the input `Matrix` and the output `WitnessMatrix` together matches the type
+/// exhaustively.
+///
+/// Just as the `Matrix` starts with a single column, by the end of the algorithm, this has a single
+/// column, which contains the patterns that are missing for the match to be exhaustive.
+#[derive(Debug)]
+struct WitnessMatrix<Cx: PatCx>(Vec<WitnessStack<Cx>>);
+
+impl<Cx: PatCx> Clone for WitnessMatrix<Cx> {
+    fn clone(&self) -> Self {
+        Self(self.0.clone())
+    }
+}
+
+impl<Cx: PatCx> WitnessMatrix<Cx> {
+    /// New matrix with no witnesses.
+    fn empty() -> Self {
+        WitnessMatrix(Vec::new())
+    }
+    /// New matrix with one `()` witness, i.e. with no columns.
+    fn unit_witness() -> Self {
+        WitnessMatrix(vec![WitnessStack(Vec::new())])
+    }
+
+    /// Whether this has any witnesses.
+    fn is_empty(&self) -> bool {
+        self.0.is_empty()
+    }
+    /// Asserts that there is a single column and returns the patterns in it.
+    fn single_column(self) -> Vec<WitnessPat<Cx>> {
+        self.0.into_iter().map(|w| w.single_pattern()).collect()
+    }
+
+    /// Reverses specialization by the `Missing` constructor by pushing a whole new pattern.
+    fn push_pattern(&mut self, pat: WitnessPat<Cx>) {
+        for witness in self.0.iter_mut() {
+            witness.push_pattern(pat.clone())
+        }
+    }
+
+    /// Reverses specialization by `ctor`. See the section on `unspecialize` at the top of the file.
+    fn apply_constructor(
+        &mut self,
+        pcx: &PlaceCtxt<'_, Cx>,
+        missing_ctors: &[Constructor<Cx>],
+        ctor: &Constructor<Cx>,
+    ) {
+        // The `Or` constructor indicates that we expanded or-patterns. This doesn't affect
+        // witnesses.
+        if self.is_empty() || matches!(ctor, Constructor::Or) {
+            return;
+        }
+        if matches!(ctor, Constructor::Missing) {
+            // We got the special `Missing` constructor that stands for the constructors not present
+            // in the match. For each missing constructor `c`, we add a `c(_, _, _)` witness
+            // appropriately filled with wildcards.
+            let mut ret = Self::empty();
+            for ctor in missing_ctors {
+                let pat = pcx.wild_from_ctor(ctor.clone());
+                // Clone `self` and add `c(_, _, _)` to each of its witnesses.
+                let mut wit_matrix = self.clone();
+                wit_matrix.push_pattern(pat);
+                ret.extend(wit_matrix);
+            }
+            *self = ret;
+        } else {
+            // Any other constructor we unspecialize as expected.
+            for witness in std::mem::take(&mut self.0) {
+                self.0.extend(witness.apply_constructor(pcx, ctor));
+            }
+        }
+    }
+
+    /// Merges the witnesses of two matrices. Their column types must match.
+    fn extend(&mut self, other: Self) {
+        self.0.extend(other.0)
+    }
+}
+
+/// Collect ranges that overlap like `lo..=overlap`/`overlap..=hi`. Must be called during
+/// exhaustiveness checking, if we find a singleton range after constructor splitting. This reuses
+/// row intersection information to only detect ranges that truly overlap.
+///
+/// If two ranges overlapped, the split set will contain their intersection as a singleton.
+/// Specialization will then select rows that match the overlap, and exhaustiveness will compute
+/// which rows have an intersection that includes the overlap. That gives us all the info we need to
+/// compute overlapping ranges without false positives.
+///
+/// We can however get false negatives because exhaustiveness does not explore all cases. See the
+/// section on relevancy at the top of the file.
+fn collect_overlapping_range_endpoints<'p, Cx: PatCx>(
+    cx: &Cx,
+    overlap_range: IntRange,
+    matrix: &Matrix<'p, Cx>,
+    specialized_matrix: &Matrix<'p, Cx>,
+) {
+    let overlap = overlap_range.lo;
+    // Ranges that look like `lo..=overlap`.
+    let mut prefixes: SmallVec<[_; 1]> = Default::default();
+    // Ranges that look like `overlap..=hi`.
+    let mut suffixes: SmallVec<[_; 1]> = Default::default();
+    // Iterate on patterns that contained `overlap`. We iterate on `specialized_matrix` which
+    // contains only rows that matched the current `ctor` as well as accurate intersection
+    // information. It doesn't contain the column that contains the range; that can be found in
+    // `matrix`.
+    for (child_row_id, child_row) in specialized_matrix.rows().enumerate() {
+        let PatOrWild::Pat(pat) = matrix.rows[child_row.parent_row].head() else { continue };
+        let Constructor::IntRange(this_range) = pat.ctor() else { continue };
+        // Don't lint when one of the ranges is a singleton.
+        if this_range.is_singleton() {
+            continue;
+        }
+        if this_range.lo == overlap {
+            // `this_range` looks like `overlap..=this_range.hi`; it overlaps with any
+            // ranges that look like `lo..=overlap`.
+            if !prefixes.is_empty() {
+                let overlaps_with: Vec<_> = prefixes
+                    .iter()
+                    .filter(|&&(other_child_row_id, _)| {
+                        child_row.intersects_at_least.contains(other_child_row_id)
+                    })
+                    .map(|&(_, pat)| pat)
+                    .collect();
+                if !overlaps_with.is_empty() {
+                    cx.lint_overlapping_range_endpoints(pat, overlap_range, &overlaps_with);
+                }
+            }
+            suffixes.push((child_row_id, pat))
+        } else if Some(this_range.hi) == overlap.plus_one() {
+            // `this_range` looks like `this_range.lo..=overlap`; it overlaps with any
+            // ranges that look like `overlap..=hi`.
+            if !suffixes.is_empty() {
+                let overlaps_with: Vec<_> = suffixes
+                    .iter()
+                    .filter(|&&(other_child_row_id, _)| {
+                        child_row.intersects_at_least.contains(other_child_row_id)
+                    })
+                    .map(|&(_, pat)| pat)
+                    .collect();
+                if !overlaps_with.is_empty() {
+                    cx.lint_overlapping_range_endpoints(pat, overlap_range, &overlaps_with);
+                }
+            }
+            prefixes.push((child_row_id, pat))
+        }
+    }
+}
+
+/// Collect ranges that have a singleton gap between them.
+fn collect_non_contiguous_range_endpoints<'p, Cx: PatCx>(
+    cx: &Cx,
+    gap_range: &IntRange,
+    matrix: &Matrix<'p, Cx>,
+) {
+    let gap = gap_range.lo;
+    // Ranges that look like `lo..gap`.
+    let mut onebefore: SmallVec<[_; 1]> = Default::default();
+    // Ranges that start on `gap+1` or singletons `gap+1`.
+    let mut oneafter: SmallVec<[_; 1]> = Default::default();
+    // Look through the column for ranges near the gap.
+    for pat in matrix.heads() {
+        let PatOrWild::Pat(pat) = pat else { continue };
+        let Constructor::IntRange(this_range) = pat.ctor() else { continue };
+        if gap == this_range.hi {
+            onebefore.push(pat)
+        } else if gap.plus_one() == Some(this_range.lo) {
+            oneafter.push(pat)
+        }
+    }
+
+    for pat_before in onebefore {
+        cx.lint_non_contiguous_range_endpoints(pat_before, *gap_range, oneafter.as_slice());
+    }
+}
+
+/// The core of the algorithm.
+///
+/// This recursively computes witnesses of the non-exhaustiveness of `matrix` (if any). Also tracks
+/// usefulness of each row in the matrix (in `row.useful`). We track usefulness of subpatterns in
+/// `mcx.branch_usefulness`.
+///
+/// The input `Matrix` and the output `WitnessMatrix` together match the type exhaustively.
+///
+/// The key steps are:
+/// - specialization, where we dig into the rows that have a specific constructor and call ourselves
+///     recursively;
+/// - unspecialization, where we lift the results from the previous step into results for this step
+///     (using `apply_constructor` and by updating `row.useful` for each parent row).
+/// This is all explained at the top of the file.
+#[instrument(level = "debug", skip(mcx), ret)]
+fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: PatCx>(
+    mcx: &mut UsefulnessCtxt<'a, 'p, Cx>,
+    matrix: &mut Matrix<'p, Cx>,
+) -> Result<WitnessMatrix<Cx>, Cx::Error> {
+    debug_assert!(matrix.rows().all(|r| r.len() == matrix.column_count()));
+
+    if !matrix.wildcard_row_is_relevant && matrix.rows().all(|r| !r.pats.relevant) {
+        // Here we know that nothing will contribute further to exhaustiveness or usefulness. This
+        // is purely an optimization: skipping this check doesn't affect correctness. See the top of
+        // the file for details.
+        return Ok(WitnessMatrix::empty());
+    }
+
+    let Some(place) = matrix.head_place() else {
+        mcx.increase_complexity_level(matrix.rows().len())?;
+        // The base case: there are no columns in the matrix. We are morally pattern-matching on ().
+        // A row is useful iff it has no (unguarded) rows above it.
+        let mut useful = true; // Whether the next row is useful.
+        for (i, row) in matrix.rows_mut().enumerate() {
+            row.useful = useful;
+            row.intersects_at_least.insert_range(0..i);
+            // The next rows stays useful if this one is under a guard.
+            useful &= row.is_under_guard;
+        }
+        return if useful && matrix.wildcard_row_is_relevant {
+            // The wildcard row is useful; the match is non-exhaustive.
+            Ok(WitnessMatrix::unit_witness())
+        } else {
+            // Either the match is exhaustive, or we choose not to report anything because of
+            // relevancy. See at the top for details.
+            Ok(WitnessMatrix::empty())
+        };
+    };
+
+    // Analyze the constructors present in this column.
+    let ctors = matrix.heads().map(|p| p.ctor());
+    let (split_ctors, missing_ctors) = place.split_column_ctors(mcx.tycx, ctors)?;
+
+    let ty = &place.ty.clone(); // Clone it out so we can mutate `matrix` later.
+    let pcx = &PlaceCtxt { cx: mcx.tycx, ty };
+    let mut ret = WitnessMatrix::empty();
+    for ctor in split_ctors {
+        // Dig into rows that match `ctor`.
+        debug!("specialize({:?})", ctor);
+        // `ctor` is *irrelevant* if there's another constructor in `split_ctors` that matches
+        // strictly fewer rows. In that case we can sometimes skip it. See the top of the file for
+        // details.
+        let ctor_is_relevant = matches!(ctor, Constructor::Missing) || missing_ctors.is_empty();
+        let mut spec_matrix = matrix.specialize_constructor(pcx, &ctor, ctor_is_relevant)?;
+        let mut witnesses = ensure_sufficient_stack(|| {
+            compute_exhaustiveness_and_usefulness(mcx, &mut spec_matrix)
+        })?;
+
+        // Transform witnesses for `spec_matrix` into witnesses for `matrix`.
+        witnesses.apply_constructor(pcx, &missing_ctors, &ctor);
+        // Accumulate the found witnesses.
+        ret.extend(witnesses);
+
+        // Detect ranges that overlap on their endpoints.
+        if let Constructor::IntRange(overlap_range) = ctor {
+            if overlap_range.is_singleton()
+                && spec_matrix.rows.len() >= 2
+                && spec_matrix.rows.iter().any(|row| !row.intersects_at_least.is_empty())
+            {
+                collect_overlapping_range_endpoints(mcx.tycx, overlap_range, matrix, &spec_matrix);
+            }
+        }
+
+        matrix.unspecialize(spec_matrix);
+    }
+
+    // Detect singleton gaps between ranges.
+    if missing_ctors.iter().any(|c| matches!(c, Constructor::IntRange(..))) {
+        for missing in &missing_ctors {
+            if let Constructor::IntRange(gap) = missing {
+                if gap.is_singleton() {
+                    collect_non_contiguous_range_endpoints(mcx.tycx, gap, matrix);
+                }
+            }
+        }
+    }
+
+    // Record usefulness of the branch patterns.
+    for row in matrix.rows() {
+        if row.head_is_branch {
+            if let PatOrWild::Pat(pat) = row.head() {
+                mcx.branch_usefulness.entry(pat.uid).or_default().update(row, matrix);
+            }
+        }
+    }
+
+    Ok(ret)
+}
+
+/// Indicates why a given pattern is considered redundant.
+#[derive(Clone, Debug)]
+pub struct RedundancyExplanation<'p, Cx: PatCx> {
+    /// All the values matched by this pattern are already matched by the given set of patterns.
+    /// This list is not guaranteed to be minimal but the contained patterns are at least guaranteed
+    /// to intersect this pattern.
+    pub covered_by: Vec<&'p DeconstructedPat<Cx>>,
+}
+
+/// Indicates whether or not a given arm is useful.
+#[derive(Clone, Debug)]
+pub enum Usefulness<'p, Cx: PatCx> {
+    /// The arm is useful. This additionally carries a set of or-pattern branches that have been
+    /// found to be redundant despite the overall arm being useful. Used only in the presence of
+    /// or-patterns, otherwise it stays empty.
+    Useful(Vec<(&'p DeconstructedPat<Cx>, RedundancyExplanation<'p, Cx>)>),
+    /// The arm is redundant and can be removed without changing the behavior of the match
+    /// expression.
+    Redundant(RedundancyExplanation<'p, Cx>),
+}
+
+/// The output of checking a match for exhaustiveness and arm usefulness.
+pub struct UsefulnessReport<'p, Cx: PatCx> {
+    /// For each arm of the input, whether that arm is useful after the arms above it.
+    pub arm_usefulness: Vec<(MatchArm<'p, Cx>, Usefulness<'p, Cx>)>,
+    /// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
+    /// exhaustiveness.
+    pub non_exhaustiveness_witnesses: Vec<WitnessPat<Cx>>,
+    /// For each arm, a set of indices of arms above it that have non-empty intersection, i.e. there
+    /// is a value matched by both arms. This may miss real intersections.
+    pub arm_intersections: Vec<BitSet<usize>>,
+}
+
+/// Computes whether a match is exhaustive and which of its arms are useful.
+#[instrument(skip(tycx, arms), level = "debug")]
+pub fn compute_match_usefulness<'p, Cx: PatCx>(
+    tycx: &Cx,
+    arms: &[MatchArm<'p, Cx>],
+    scrut_ty: Cx::Ty,
+    scrut_validity: PlaceValidity,
+    complexity_limit: Option<usize>,
+) -> Result<UsefulnessReport<'p, Cx>, Cx::Error> {
+    let mut cx = UsefulnessCtxt {
+        tycx,
+        branch_usefulness: FxHashMap::default(),
+        complexity_limit,
+        complexity_level: 0,
+    };
+    let mut matrix = Matrix::new(arms, scrut_ty, scrut_validity);
+    let non_exhaustiveness_witnesses = compute_exhaustiveness_and_usefulness(&mut cx, &mut matrix)?;
+
+    let non_exhaustiveness_witnesses: Vec<_> = non_exhaustiveness_witnesses.single_column();
+    let arm_usefulness: Vec<_> = arms
+        .iter()
+        .copied()
+        .map(|arm| {
+            debug!(?arm);
+            let usefulness = cx.branch_usefulness.get(&arm.pat.uid).unwrap();
+            let usefulness = if let Some(explanation) = usefulness.is_redundant() {
+                Usefulness::Redundant(explanation)
+            } else {
+                let mut redundant_subpats = Vec::new();
+                arm.pat.walk(&mut |subpat| {
+                    if let Some(u) = cx.branch_usefulness.get(&subpat.uid) {
+                        if let Some(explanation) = u.is_redundant() {
+                            redundant_subpats.push((subpat, explanation));
+                            false // stop recursing
+                        } else {
+                            true // keep recursing
+                        }
+                    } else {
+                        true // keep recursing
+                    }
+                });
+                Usefulness::Useful(redundant_subpats)
+            };
+            debug!(?usefulness);
+            (arm, usefulness)
+        })
+        .collect();
+
+    let arm_intersections: Vec<_> =
+        matrix.rows().map(|row| row.intersects_at_least.clone()).collect();
+
+    Ok(UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses, arm_intersections })
+}