about summary refs log tree commit diff
path: root/compiler/rustc_index
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_index')
-rw-r--r--compiler/rustc_index/Cargo.toml1
-rw-r--r--compiler/rustc_index/src/bit_set.rs153
-rw-r--r--compiler/rustc_index/src/bit_set/tests.rs95
-rw-r--r--compiler/rustc_index/src/interval.rs269
-rw-r--r--compiler/rustc_index/src/interval/tests.rs199
-rw-r--r--compiler/rustc_index/src/lib.rs8
-rw-r--r--compiler/rustc_index/src/vec.rs34
7 files changed, 744 insertions, 15 deletions
diff --git a/compiler/rustc_index/Cargo.toml b/compiler/rustc_index/Cargo.toml
index b984a1321e0..89419bfce6f 100644
--- a/compiler/rustc_index/Cargo.toml
+++ b/compiler/rustc_index/Cargo.toml
@@ -10,3 +10,4 @@ doctest = false
 arrayvec = { version = "0.7", default-features = false }
 rustc_serialize = { path = "../rustc_serialize" }
 rustc_macros = { path = "../rustc_macros" }
+smallvec = "1"
diff --git a/compiler/rustc_index/src/bit_set.rs b/compiler/rustc_index/src/bit_set.rs
index 67b3cec0a3e..5aa213cb701 100644
--- a/compiler/rustc_index/src/bit_set.rs
+++ b/compiler/rustc_index/src/bit_set.rs
@@ -4,7 +4,7 @@ use std::fmt;
 use std::iter;
 use std::marker::PhantomData;
 use std::mem;
-use std::ops::{BitAnd, BitAndAssign, BitOrAssign, Not, Range, Shl};
+use std::ops::{BitAnd, BitAndAssign, BitOrAssign, Bound, Not, Range, RangeBounds, Shl};
 use std::slice;
 
 use rustc_macros::{Decodable, Encodable};
@@ -22,6 +22,29 @@ pub trait BitRelations<Rhs> {
     fn intersect(&mut self, other: &Rhs) -> bool;
 }
 
+#[inline]
+fn inclusive_start_end<T: Idx>(
+    range: impl RangeBounds<T>,
+    domain: usize,
+) -> Option<(usize, usize)> {
+    // Both start and end are inclusive.
+    let start = match range.start_bound().cloned() {
+        Bound::Included(start) => start.index(),
+        Bound::Excluded(start) => start.index() + 1,
+        Bound::Unbounded => 0,
+    };
+    let end = match range.end_bound().cloned() {
+        Bound::Included(end) => end.index(),
+        Bound::Excluded(end) => end.index().checked_sub(1)?,
+        Bound::Unbounded => domain - 1,
+    };
+    assert!(end < domain);
+    if start > end {
+        return None;
+    }
+    Some((start, end))
+}
+
 macro_rules! bit_relations_inherent_impls {
     () => {
         /// Sets `self = self | other` and returns `true` if `self` changed
@@ -64,7 +87,7 @@ macro_rules! bit_relations_inherent_impls {
 /// to or greater than the domain size. All operations that involve two bitsets
 /// will panic if the bitsets have differing domain sizes.
 ///
-#[derive(Eq, PartialEq, Decodable, Encodable)]
+#[derive(Eq, PartialEq, Hash, Decodable, Encodable)]
 pub struct BitSet<T> {
     domain_size: usize,
     words: Vec<Word>,
@@ -151,6 +174,33 @@ impl<T: Idx> BitSet<T> {
         new_word != word
     }
 
+    #[inline]
+    pub fn insert_range(&mut self, elems: impl RangeBounds<T>) {
+        let Some((start, end)) = inclusive_start_end(elems, self.domain_size) else {
+            return;
+        };
+
+        let (start_word_index, start_mask) = word_index_and_mask(start);
+        let (end_word_index, end_mask) = word_index_and_mask(end);
+
+        // Set all words in between start and end (exclusively of both).
+        for word_index in (start_word_index + 1)..end_word_index {
+            self.words[word_index] = !0;
+        }
+
+        if start_word_index != end_word_index {
+            // Start and end are in different words, so we handle each in turn.
+            //
+            // We set all leading bits. This includes the start_mask bit.
+            self.words[start_word_index] |= !(start_mask - 1);
+            // And all trailing bits (i.e. from 0..=end) in the end word,
+            // including the end.
+            self.words[end_word_index] |= end_mask | end_mask - 1;
+        } else {
+            self.words[start_word_index] |= end_mask | (end_mask - start_mask);
+        }
+    }
+
     /// Sets all bits to true.
     pub fn insert_all(&mut self) {
         for word in &mut self.words {
@@ -227,6 +277,36 @@ impl<T: Idx> BitSet<T> {
         not_already
     }
 
+    fn last_set_in(&self, range: impl RangeBounds<T>) -> Option<T> {
+        let (start, end) = inclusive_start_end(range, self.domain_size)?;
+        let (start_word_index, _) = word_index_and_mask(start);
+        let (end_word_index, end_mask) = word_index_and_mask(end);
+
+        let end_word = self.words[end_word_index] & (end_mask | (end_mask - 1));
+        if end_word != 0 {
+            let pos = max_bit(end_word) + WORD_BITS * end_word_index;
+            if start <= pos {
+                return Some(T::new(pos));
+            }
+        }
+
+        // We exclude end_word_index from the range here, because we don't want
+        // to limit ourselves to *just* the last word: the bits set it in may be
+        // after `end`, so it may not work out.
+        if let Some(offset) =
+            self.words[start_word_index..end_word_index].iter().rposition(|&w| w != 0)
+        {
+            let word_idx = start_word_index + offset;
+            let start_word = self.words[word_idx];
+            let pos = max_bit(start_word) + WORD_BITS * word_idx;
+            if start <= pos {
+                return Some(T::new(pos));
+            }
+        }
+
+        None
+    }
+
     bit_relations_inherent_impls! {}
 }
 
@@ -595,7 +675,7 @@ impl<T: Idx> SparseBitSet<T> {
 
     fn insert(&mut self, elem: T) -> bool {
         assert!(elem.index() < self.domain_size);
-        let changed = if let Some(i) = self.elems.iter().position(|&e| e >= elem) {
+        let changed = if let Some(i) = self.elems.iter().position(|&e| e.index() >= elem.index()) {
             if self.elems[i] == elem {
                 // `elem` is already in the set.
                 false
@@ -638,6 +718,18 @@ impl<T: Idx> SparseBitSet<T> {
     bit_relations_inherent_impls! {}
 }
 
+impl<T: Idx + Ord> SparseBitSet<T> {
+    fn last_set_in(&self, range: impl RangeBounds<T>) -> Option<T> {
+        let mut last_leq = None;
+        for e in self.iter() {
+            if range.contains(e) {
+                last_leq = Some(*e);
+            }
+        }
+        last_leq
+    }
+}
+
 /// A fixed-size bitset type with a hybrid representation: sparse when there
 /// are up to a `SPARSE_MAX` elements in the set, but dense when there are more
 /// than `SPARSE_MAX`.
@@ -709,6 +801,19 @@ impl<T: Idx> HybridBitSet<T> {
         }
     }
 
+    /// Returns the previous element present in the bitset from `elem`,
+    /// inclusively of elem. That is, will return `Some(elem)` if elem is in the
+    /// bitset.
+    pub fn last_set_in(&self, range: impl RangeBounds<T>) -> Option<T>
+    where
+        T: Ord,
+    {
+        match self {
+            HybridBitSet::Sparse(sparse) => sparse.last_set_in(range),
+            HybridBitSet::Dense(dense) => dense.last_set_in(range),
+        }
+    }
+
     pub fn insert(&mut self, elem: T) -> bool {
         // No need to check `elem` against `self.domain_size` here because all
         // the match cases check it, one way or another.
@@ -734,6 +839,41 @@ impl<T: Idx> HybridBitSet<T> {
         }
     }
 
+    pub fn insert_range(&mut self, elems: impl RangeBounds<T>) {
+        // No need to check `elem` against `self.domain_size` here because all
+        // the match cases check it, one way or another.
+        let start = match elems.start_bound().cloned() {
+            Bound::Included(start) => start.index(),
+            Bound::Excluded(start) => start.index() + 1,
+            Bound::Unbounded => 0,
+        };
+        let end = match elems.end_bound().cloned() {
+            Bound::Included(end) => end.index() + 1,
+            Bound::Excluded(end) => end.index(),
+            Bound::Unbounded => self.domain_size() - 1,
+        };
+        let len = if let Some(l) = end.checked_sub(start) {
+            l
+        } else {
+            return;
+        };
+        match self {
+            HybridBitSet::Sparse(sparse) if sparse.len() + len < SPARSE_MAX => {
+                // The set is sparse and has space for `elems`.
+                for elem in start..end {
+                    sparse.insert(T::new(elem));
+                }
+            }
+            HybridBitSet::Sparse(sparse) => {
+                // The set is sparse and full. Convert to a dense set.
+                let mut dense = sparse.to_dense();
+                dense.insert_range(elems);
+                *self = HybridBitSet::Dense(dense);
+            }
+            HybridBitSet::Dense(dense) => dense.insert_range(elems),
+        }
+    }
+
     pub fn insert_all(&mut self) {
         let domain_size = self.domain_size();
         match self {
@@ -852,7 +992,7 @@ impl<T: Idx> GrowableBitSet<T> {
 ///
 /// All operations that involve a row and/or column index will panic if the
 /// index exceeds the relevant bound.
-#[derive(Clone, Eq, PartialEq, Decodable, Encodable)]
+#[derive(Clone, Eq, PartialEq, Hash, Decodable, Encodable)]
 pub struct BitMatrix<R: Idx, C: Idx> {
     num_rows: usize,
     num_columns: usize,
@@ -1205,6 +1345,11 @@ fn word_index_and_mask<T: Idx>(elem: T) -> (usize, Word) {
     (word_index, mask)
 }
 
+#[inline]
+fn max_bit(word: Word) -> usize {
+    WORD_BITS - 1 - word.leading_zeros() as usize
+}
+
 /// Integral type used to represent the bit set.
 pub trait FiniteBitSetTy:
     BitAnd<Output = Self>
diff --git a/compiler/rustc_index/src/bit_set/tests.rs b/compiler/rustc_index/src/bit_set/tests.rs
index aebc6d0ddd8..e2b07305c96 100644
--- a/compiler/rustc_index/src/bit_set/tests.rs
+++ b/compiler/rustc_index/src/bit_set/tests.rs
@@ -370,6 +370,101 @@ fn sparse_matrix_operations() {
     }
 }
 
+#[test]
+fn dense_insert_range() {
+    #[track_caller]
+    fn check<R>(domain: usize, range: R)
+    where
+        R: RangeBounds<usize> + Clone + IntoIterator<Item = usize> + std::fmt::Debug,
+    {
+        let mut set = BitSet::new_empty(domain);
+        set.insert_range(range.clone());
+        for i in set.iter() {
+            assert!(range.contains(&i));
+        }
+        for i in range.clone() {
+            assert!(set.contains(i), "{} in {:?}, inserted {:?}", i, set, range);
+        }
+    }
+    check(300, 10..10);
+    check(300, WORD_BITS..WORD_BITS * 2);
+    check(300, WORD_BITS - 1..WORD_BITS * 2);
+    check(300, WORD_BITS - 1..WORD_BITS);
+    check(300, 10..100);
+    check(300, 10..30);
+    check(300, 0..5);
+    check(300, 0..250);
+    check(300, 200..250);
+
+    check(300, 10..=10);
+    check(300, WORD_BITS..=WORD_BITS * 2);
+    check(300, WORD_BITS - 1..=WORD_BITS * 2);
+    check(300, WORD_BITS - 1..=WORD_BITS);
+    check(300, 10..=100);
+    check(300, 10..=30);
+    check(300, 0..=5);
+    check(300, 0..=250);
+    check(300, 200..=250);
+
+    for i in 0..WORD_BITS * 2 {
+        for j in i..WORD_BITS * 2 {
+            check(WORD_BITS * 2, i..j);
+            check(WORD_BITS * 2, i..=j);
+            check(300, i..j);
+            check(300, i..=j);
+        }
+    }
+}
+
+#[test]
+fn dense_last_set_before() {
+    fn easy(set: &BitSet<usize>, needle: impl RangeBounds<usize>) -> Option<usize> {
+        let mut last_leq = None;
+        for e in set.iter() {
+            if needle.contains(&e) {
+                last_leq = Some(e);
+            }
+        }
+        last_leq
+    }
+
+    #[track_caller]
+    fn cmp(set: &BitSet<usize>, needle: impl RangeBounds<usize> + Clone + std::fmt::Debug) {
+        assert_eq!(
+            set.last_set_in(needle.clone()),
+            easy(set, needle.clone()),
+            "{:?} in {:?}",
+            needle,
+            set
+        );
+    }
+    let mut set = BitSet::new_empty(300);
+    cmp(&set, 50..=50);
+    set.insert(WORD_BITS);
+    cmp(&set, WORD_BITS..=WORD_BITS);
+    set.insert(WORD_BITS - 1);
+    cmp(&set, 0..=WORD_BITS - 1);
+    cmp(&set, 0..=5);
+    cmp(&set, 10..100);
+    set.insert(100);
+    cmp(&set, 100..110);
+    cmp(&set, 99..100);
+    cmp(&set, 99..=100);
+
+    for i in 0..=WORD_BITS * 2 {
+        for j in i..=WORD_BITS * 2 {
+            for k in 0..WORD_BITS * 2 {
+                let mut set = BitSet::new_empty(300);
+                cmp(&set, i..j);
+                cmp(&set, i..=j);
+                set.insert(k);
+                cmp(&set, i..j);
+                cmp(&set, i..=j);
+            }
+        }
+    }
+}
+
 /// Merge dense hybrid set into empty sparse hybrid set.
 #[bench]
 fn union_hybrid_sparse_empty_to_dense(b: &mut Bencher) {
diff --git a/compiler/rustc_index/src/interval.rs b/compiler/rustc_index/src/interval.rs
new file mode 100644
index 00000000000..6da95053b11
--- /dev/null
+++ b/compiler/rustc_index/src/interval.rs
@@ -0,0 +1,269 @@
+use std::iter::Step;
+use std::marker::PhantomData;
+use std::ops::Bound;
+use std::ops::RangeBounds;
+
+use crate::vec::Idx;
+use crate::vec::IndexVec;
+use smallvec::SmallVec;
+
+#[cfg(test)]
+mod tests;
+
+/// Stores a set of intervals on the indices.
+#[derive(Debug, Clone)]
+pub struct IntervalSet<I> {
+    // Start, end
+    map: SmallVec<[(u32, u32); 4]>,
+    domain: usize,
+    _data: PhantomData<I>,
+}
+
+#[inline]
+fn inclusive_start<T: Idx>(range: impl RangeBounds<T>) -> u32 {
+    match range.start_bound() {
+        Bound::Included(start) => start.index() as u32,
+        Bound::Excluded(start) => start.index() as u32 + 1,
+        Bound::Unbounded => 0,
+    }
+}
+
+#[inline]
+fn inclusive_end<T: Idx>(domain: usize, range: impl RangeBounds<T>) -> Option<u32> {
+    let end = match range.end_bound() {
+        Bound::Included(end) => end.index() as u32,
+        Bound::Excluded(end) => end.index().checked_sub(1)? as u32,
+        Bound::Unbounded => domain.checked_sub(1)? as u32,
+    };
+    Some(end)
+}
+
+impl<I: Idx> IntervalSet<I> {
+    pub fn new(domain: usize) -> IntervalSet<I> {
+        IntervalSet { map: SmallVec::new(), domain, _data: PhantomData }
+    }
+
+    pub fn clear(&mut self) {
+        self.map.clear();
+    }
+
+    pub fn iter(&self) -> impl Iterator<Item = I> + '_
+    where
+        I: Step,
+    {
+        self.iter_intervals().flatten()
+    }
+
+    /// Iterates through intervals stored in the set, in order.
+    pub fn iter_intervals(&self) -> impl Iterator<Item = std::ops::Range<I>> + '_
+    where
+        I: Step,
+    {
+        self.map.iter().map(|&(start, end)| I::new(start as usize)..I::new(end as usize + 1))
+    }
+
+    /// Returns true if we increased the number of elements present.
+    pub fn insert(&mut self, point: I) -> bool {
+        self.insert_range(point..=point)
+    }
+
+    /// Returns true if we increased the number of elements present.
+    pub fn insert_range(&mut self, range: impl RangeBounds<I> + Clone) -> bool {
+        let start = inclusive_start(range.clone());
+        let Some(mut end) = inclusive_end(self.domain, range) else {
+            // empty range
+            return false;
+        };
+        if start > end {
+            return false;
+        }
+
+        loop {
+            // This condition looks a bit weird, but actually makes sense.
+            //
+            // if r.0 == end + 1, then we're actually adjacent, so we want to
+            // continue to the next range. We're looking here for the first
+            // range which starts *non-adjacently* to our end.
+            let next = self.map.partition_point(|r| r.0 <= end + 1);
+            if let Some(last) = next.checked_sub(1) {
+                let (prev_start, prev_end) = &mut self.map[last];
+                if *prev_end + 1 >= start {
+                    // If the start for the inserted range is adjacent to the
+                    // end of the previous, we can extend the previous range.
+                    if start < *prev_start {
+                        // Our range starts before the one we found. We'll need
+                        // to *remove* it, and then try again.
+                        //
+                        // FIXME: This is not so efficient; we may need to
+                        // recurse a bunch of times here. Instead, it's probably
+                        // better to do something like drain_filter(...) on the
+                        // map to be able to delete or modify all the ranges in
+                        // start..=end and then potentially re-insert a new
+                        // range.
+                        end = std::cmp::max(end, *prev_end);
+                        self.map.remove(last);
+                    } else {
+                        // We overlap with the previous range, increase it to
+                        // include us.
+                        //
+                        // Make sure we're actually going to *increase* it though --
+                        // it may be that end is just inside the previously existing
+                        // set.
+                        return if end > *prev_end {
+                            *prev_end = end;
+                            true
+                        } else {
+                            false
+                        };
+                    }
+                } else {
+                    // Otherwise, we don't overlap, so just insert
+                    self.map.insert(last + 1, (start, end));
+                    return true;
+                }
+            } else {
+                if self.map.is_empty() {
+                    // Quite common in practice, and expensive to call memcpy
+                    // with length zero.
+                    self.map.push((start, end));
+                } else {
+                    self.map.insert(next, (start, end));
+                }
+                return true;
+            }
+        }
+    }
+
+    pub fn contains(&self, needle: I) -> bool {
+        let needle = needle.index() as u32;
+        let last = match self.map.partition_point(|r| r.0 <= needle).checked_sub(1) {
+            Some(idx) => idx,
+            None => {
+                // All ranges in the map start after the new range's end
+                return false;
+            }
+        };
+        let (_, prev_end) = &self.map[last];
+        needle <= *prev_end
+    }
+
+    pub fn superset(&self, other: &IntervalSet<I>) -> bool
+    where
+        I: Step,
+    {
+        // FIXME: Performance here is probably not great. We will be doing a lot
+        // of pointless tree traversals.
+        other.iter().all(|elem| self.contains(elem))
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.map.is_empty()
+    }
+
+    /// Returns the maximum (last) element present in the set from `range`.
+    pub fn last_set_in(&self, range: impl RangeBounds<I> + Clone) -> Option<I> {
+        let start = inclusive_start(range.clone());
+        let Some(end) = inclusive_end(self.domain, range) else {
+            // empty range
+            return None;
+        };
+        if start > end {
+            return None;
+        }
+        let last = match self.map.partition_point(|r| r.0 <= end).checked_sub(1) {
+            Some(idx) => idx,
+            None => {
+                // All ranges in the map start after the new range's end
+                return None;
+            }
+        };
+        let (_, prev_end) = &self.map[last];
+        if start <= *prev_end { Some(I::new(std::cmp::min(*prev_end, end) as usize)) } else { None }
+    }
+
+    pub fn insert_all(&mut self) {
+        self.clear();
+        self.map.push((0, self.domain.try_into().unwrap()));
+    }
+
+    pub fn union(&mut self, other: &IntervalSet<I>) -> bool
+    where
+        I: Step,
+    {
+        assert_eq!(self.domain, other.domain);
+        let mut did_insert = false;
+        for range in other.iter_intervals() {
+            did_insert |= self.insert_range(range);
+        }
+        did_insert
+    }
+}
+
+/// This data structure optimizes for cases where the stored bits in each row
+/// are expected to be highly contiguous (long ranges of 1s or 0s), in contrast
+/// to BitMatrix and SparseBitMatrix which are optimized for
+/// "random"/non-contiguous bits and cheap(er) point queries at the expense of
+/// memory usage.
+#[derive(Clone)]
+pub struct SparseIntervalMatrix<R, C>
+where
+    R: Idx,
+    C: Idx,
+{
+    rows: IndexVec<R, IntervalSet<C>>,
+    column_size: usize,
+}
+
+impl<R: Idx, C: Step + Idx> SparseIntervalMatrix<R, C> {
+    pub fn new(column_size: usize) -> SparseIntervalMatrix<R, C> {
+        SparseIntervalMatrix { rows: IndexVec::new(), column_size }
+    }
+
+    pub fn rows(&self) -> impl Iterator<Item = R> {
+        self.rows.indices()
+    }
+
+    pub fn row(&self, row: R) -> Option<&IntervalSet<C>> {
+        self.rows.get(row)
+    }
+
+    fn ensure_row(&mut self, row: R) -> &mut IntervalSet<C> {
+        self.rows.ensure_contains_elem(row, || IntervalSet::new(self.column_size));
+        &mut self.rows[row]
+    }
+
+    pub fn union_row(&mut self, row: R, from: &IntervalSet<C>) -> bool
+    where
+        C: Step,
+    {
+        self.ensure_row(row).union(from)
+    }
+
+    pub fn union_rows(&mut self, read: R, write: R) -> bool
+    where
+        C: Step,
+    {
+        if read == write || self.rows.get(read).is_none() {
+            return false;
+        }
+        self.ensure_row(write);
+        let (read_row, write_row) = self.rows.pick2_mut(read, write);
+        write_row.union(read_row)
+    }
+
+    pub fn insert_all_into_row(&mut self, row: R) {
+        self.ensure_row(row).insert_all();
+    }
+
+    pub fn insert_range(&mut self, row: R, range: impl RangeBounds<C> + Clone) {
+        self.ensure_row(row).insert_range(range);
+    }
+
+    pub fn insert(&mut self, row: R, point: C) -> bool {
+        self.ensure_row(row).insert(point)
+    }
+
+    pub fn contains(&self, row: R, point: C) -> bool {
+        self.row(row).map_or(false, |r| r.contains(point))
+    }
+}
diff --git a/compiler/rustc_index/src/interval/tests.rs b/compiler/rustc_index/src/interval/tests.rs
new file mode 100644
index 00000000000..d90b449f326
--- /dev/null
+++ b/compiler/rustc_index/src/interval/tests.rs
@@ -0,0 +1,199 @@
+use super::*;
+
+#[test]
+fn insert_collapses() {
+    let mut set = IntervalSet::<u32>::new(3000);
+    set.insert_range(9831..=9837);
+    set.insert_range(43..=9830);
+    assert_eq!(set.iter_intervals().collect::<Vec<_>>(), [43..9838]);
+}
+
+#[test]
+fn contains() {
+    let mut set = IntervalSet::new(300);
+    set.insert(0u32);
+    assert!(set.contains(0));
+    set.insert_range(0..10);
+    assert!(set.contains(9));
+    assert!(!set.contains(10));
+    set.insert_range(10..11);
+    assert!(set.contains(10));
+}
+
+#[test]
+fn insert() {
+    for i in 0..30usize {
+        let mut set = IntervalSet::new(300);
+        for j in i..30usize {
+            set.insert(j);
+            for k in i..j {
+                assert!(set.contains(k));
+            }
+        }
+    }
+
+    let mut set = IntervalSet::new(300);
+    set.insert_range(0..1u32);
+    assert!(set.contains(0), "{:?}", set.map);
+    assert!(!set.contains(1));
+    set.insert_range(1..1);
+    assert!(set.contains(0));
+    assert!(!set.contains(1));
+
+    let mut set = IntervalSet::new(300);
+    set.insert_range(4..5u32);
+    set.insert_range(5..10);
+    assert_eq!(set.iter().collect::<Vec<_>>(), [4, 5, 6, 7, 8, 9]);
+    set.insert_range(3..7);
+    assert_eq!(set.iter().collect::<Vec<_>>(), [3, 4, 5, 6, 7, 8, 9]);
+
+    let mut set = IntervalSet::new(300);
+    set.insert_range(0..10u32);
+    set.insert_range(3..5);
+    assert_eq!(set.iter().collect::<Vec<_>>(), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
+
+    let mut set = IntervalSet::new(300);
+    set.insert_range(0..10u32);
+    set.insert_range(0..3);
+    assert_eq!(set.iter().collect::<Vec<_>>(), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
+
+    let mut set = IntervalSet::new(300);
+    set.insert_range(0..10u32);
+    set.insert_range(0..10);
+    assert_eq!(set.iter().collect::<Vec<_>>(), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
+
+    let mut set = IntervalSet::new(300);
+    set.insert_range(0..10u32);
+    set.insert_range(5..10);
+    assert_eq!(set.iter().collect::<Vec<_>>(), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
+
+    let mut set = IntervalSet::new(300);
+    set.insert_range(0..10u32);
+    set.insert_range(5..13);
+    assert_eq!(set.iter().collect::<Vec<_>>(), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]);
+}
+
+#[test]
+fn insert_range() {
+    #[track_caller]
+    fn check<R>(range: R)
+    where
+        R: RangeBounds<usize> + Clone + IntoIterator<Item = usize> + std::fmt::Debug,
+    {
+        let mut set = IntervalSet::new(300);
+        set.insert_range(range.clone());
+        for i in set.iter() {
+            assert!(range.contains(&i));
+        }
+        for i in range.clone() {
+            assert!(set.contains(i), "A: {} in {:?}, inserted {:?}", i, set, range);
+        }
+        set.insert_range(range.clone());
+        for i in set.iter() {
+            assert!(range.contains(&i), "{} in {:?}", i, set);
+        }
+        for i in range.clone() {
+            assert!(set.contains(i), "B: {} in {:?}, inserted {:?}", i, set, range);
+        }
+    }
+    check(10..10);
+    check(10..100);
+    check(10..30);
+    check(0..5);
+    check(0..250);
+    check(200..250);
+
+    check(10..=10);
+    check(10..=100);
+    check(10..=30);
+    check(0..=5);
+    check(0..=250);
+    check(200..=250);
+
+    for i in 0..30 {
+        for j in i..30 {
+            check(i..j);
+            check(i..=j);
+        }
+    }
+}
+
+#[test]
+fn insert_range_dual() {
+    let mut set = IntervalSet::<u32>::new(300);
+    set.insert_range(0..3);
+    assert_eq!(set.iter().collect::<Vec<_>>(), [0, 1, 2]);
+    set.insert_range(5..7);
+    assert_eq!(set.iter().collect::<Vec<_>>(), [0, 1, 2, 5, 6]);
+    set.insert_range(3..4);
+    assert_eq!(set.iter().collect::<Vec<_>>(), [0, 1, 2, 3, 5, 6]);
+    set.insert_range(3..5);
+    assert_eq!(set.iter().collect::<Vec<_>>(), [0, 1, 2, 3, 4, 5, 6]);
+}
+
+#[test]
+fn last_set_before_adjacent() {
+    let mut set = IntervalSet::<u32>::new(300);
+    set.insert_range(0..3);
+    set.insert_range(3..5);
+    assert_eq!(set.last_set_in(0..3), Some(2));
+    assert_eq!(set.last_set_in(0..5), Some(4));
+    assert_eq!(set.last_set_in(3..5), Some(4));
+    set.insert_range(2..5);
+    assert_eq!(set.last_set_in(0..3), Some(2));
+    assert_eq!(set.last_set_in(0..5), Some(4));
+    assert_eq!(set.last_set_in(3..5), Some(4));
+}
+
+#[test]
+fn last_set_in() {
+    fn easy(set: &IntervalSet<usize>, needle: impl RangeBounds<usize>) -> Option<usize> {
+        let mut last_leq = None;
+        for e in set.iter() {
+            if needle.contains(&e) {
+                last_leq = Some(e);
+            }
+        }
+        last_leq
+    }
+
+    #[track_caller]
+    fn cmp(set: &IntervalSet<usize>, needle: impl RangeBounds<usize> + Clone + std::fmt::Debug) {
+        assert_eq!(
+            set.last_set_in(needle.clone()),
+            easy(set, needle.clone()),
+            "{:?} in {:?}",
+            needle,
+            set
+        );
+    }
+    let mut set = IntervalSet::new(300);
+    cmp(&set, 50..=50);
+    set.insert(64);
+    cmp(&set, 64..=64);
+    set.insert(64 - 1);
+    cmp(&set, 0..=64 - 1);
+    cmp(&set, 0..=5);
+    cmp(&set, 10..100);
+    set.insert(100);
+    cmp(&set, 100..110);
+    cmp(&set, 99..100);
+    cmp(&set, 99..=100);
+
+    for i in 0..=30 {
+        for j in i..=30 {
+            for k in 0..30 {
+                let mut set = IntervalSet::new(100);
+                cmp(&set, ..j);
+                cmp(&set, i..);
+                cmp(&set, i..j);
+                cmp(&set, i..=j);
+                set.insert(k);
+                cmp(&set, ..j);
+                cmp(&set, i..);
+                cmp(&set, i..j);
+                cmp(&set, i..=j);
+            }
+        }
+    }
+}
diff --git a/compiler/rustc_index/src/lib.rs b/compiler/rustc_index/src/lib.rs
index a72a27e07bd..7919e409253 100644
--- a/compiler/rustc_index/src/lib.rs
+++ b/compiler/rustc_index/src/lib.rs
@@ -1,13 +1,11 @@
 #![feature(allow_internal_unstable)]
 #![feature(bench_black_box)]
 #![feature(extend_one)]
-#![feature(iter_zip)]
 #![feature(min_specialization)]
+#![feature(step_trait)]
 #![feature(test)]
+#![feature(let_else)]
 
 pub mod bit_set;
+pub mod interval;
 pub mod vec;
-
-// FIXME(#56935): Work around ICEs during cross-compilation.
-#[allow(unused)]
-extern crate rustc_macros;
diff --git a/compiler/rustc_index/src/vec.rs b/compiler/rustc_index/src/vec.rs
index 45639bad243..8b61530577d 100644
--- a/compiler/rustc_index/src/vec.rs
+++ b/compiler/rustc_index/src/vec.rs
@@ -12,7 +12,7 @@ use std::vec;
 /// Represents some newtyped `usize` wrapper.
 ///
 /// Purpose: avoid mixing indexes for different bitvector domains.
-pub trait Idx: Copy + 'static + Ord + Debug + Hash {
+pub trait Idx: Copy + 'static + Eq + PartialEq + Debug + Hash {
     fn new(idx: usize) -> Self;
 
     fn index(self) -> usize;
@@ -118,32 +118,54 @@ macro_rules! newtype_index {
         }
 
         impl $type {
+            /// Maximum value the index can take, as a `u32`.
             $v const MAX_AS_U32: u32 = $max;
 
+            /// Maximum value the index can take.
             $v const MAX: Self = Self::from_u32($max);
 
+            /// Creates a new index from a given `usize`.
+            ///
+            /// # Panics
+            ///
+            /// Will panic if `value` exceeds `MAX`.
             #[inline]
             $v const fn from_usize(value: usize) -> Self {
                 assert!(value <= ($max as usize));
+                // SAFETY: We just checked that `value <= max`.
                 unsafe {
                     Self::from_u32_unchecked(value as u32)
                 }
             }
 
+            /// Creates a new index from a given `u32`.
+            ///
+            /// # Panics
+            ///
+            /// Will panic if `value` exceeds `MAX`.
             #[inline]
             $v const fn from_u32(value: u32) -> Self {
                 assert!(value <= $max);
+                // SAFETY: We just checked that `value <= max`.
                 unsafe {
                     Self::from_u32_unchecked(value)
                 }
             }
 
+            /// Creates a new index from a given `u32`.
+            ///
+            /// # Safety
+            ///
+            /// The provided value must be less than or equal to the maximum value for the newtype.
+            /// Providing a value outside this range is undefined due to layout restrictions.
+            ///
+            /// Prefer using `from_u32`.
             #[inline]
             $v const unsafe fn from_u32_unchecked(value: u32) -> Self {
                 Self { private: value }
             }
 
-            /// Extracts the value of this index as an integer.
+            /// Extracts the value of this index as a `usize`.
             #[inline]
             $v const fn index(self) -> usize {
                 self.as_usize()
@@ -373,8 +395,8 @@ macro_rules! newtype_index {
 
     (@serializable $type:ident) => (
         impl<D: ::rustc_serialize::Decoder> ::rustc_serialize::Decodable<D> for $type {
-            fn decode(d: &mut D) -> Result<Self, D::Error> {
-                d.read_u32().map(Self::from_u32)
+            fn decode(d: &mut D) -> Self {
+                Self::from_u32(d.read_u32())
             }
         }
         impl<E: ::rustc_serialize::Encoder> ::rustc_serialize::Encodable<E> for $type {
@@ -505,8 +527,8 @@ impl<S: Encoder, I: Idx, T: Encodable<S>> Encodable<S> for &IndexVec<I, T> {
 }
 
 impl<D: Decoder, I: Idx, T: Decodable<D>> Decodable<D> for IndexVec<I, T> {
-    fn decode(d: &mut D) -> Result<Self, D::Error> {
-        Decodable::decode(d).map(|v| IndexVec { raw: v, _marker: PhantomData })
+    fn decode(d: &mut D) -> Self {
+        IndexVec { raw: Decodable::decode(d), _marker: PhantomData }
     }
 }