diff options
| author | Camille GILLOT <gillot.camille@gmail.com> | 2023-08-28 18:28:43 +0000 |
|---|---|---|
| committer | Camille Gillot <gillot.camille@gmail.com> | 2025-09-07 16:06:40 +0000 |
| commit | 2ff92e83af6d646e05218374954c6ed2ebb67b3d (patch) | |
| tree | 55bb57893046fdfa0995b6febf32c6a9d6f13ec1 | |
| parent | bea625f3275e3c897dc965ed97a1d19ef7831f01 (diff) | |
| download | rust-2ff92e83af6d646e05218374954c6ed2ebb67b3d.tar.gz rust-2ff92e83af6d646e05218374954c6ed2ebb67b3d.zip | |
Introduce fast insertion at extremities to IntervalSet.
| -rw-r--r-- | compiler/rustc_index/src/interval.rs | 51 | ||||
| -rw-r--r-- | compiler/rustc_mir_dataflow/src/points.rs | 66 |
2 files changed, 90 insertions, 27 deletions
diff --git a/compiler/rustc_index/src/interval.rs b/compiler/rustc_index/src/interval.rs index 0225c5c4f32..4af5bfcaee6 100644 --- a/compiler/rustc_index/src/interval.rs +++ b/compiler/rustc_index/src/interval.rs @@ -140,6 +140,49 @@ impl<I: Idx> IntervalSet<I> { result } + /// Specialized version of `insert` when we know that the inserted point is *before* any + /// contained. + pub fn prepend(&mut self, point: I) { + let point = point.index() as u32; + + if let Some((first_start, _)) = self.map.first_mut() { + assert!(point < *first_start); + if point + 1 == *first_start { + *first_start = point; + } else { + self.map.insert(0, (point, point)); + } + } else { + // If the map is empty, push is faster than insert. + self.map.push((point, point)); + } + + debug_assert!( + self.check_invariants(), + "wrong intervals after prepend {point:?} to {self:?}" + ); + } + + /// Specialized version of `insert` when we know that the inserted point is *after* any + /// contained. + pub fn append(&mut self, point: I) { + let point = point.index() as u32; + + if let Some((_, last_end)) = self.map.last_mut() + && let _ = assert!(*last_end < point) + && point == *last_end + 1 + { + *last_end = point; + } else { + self.map.push((point, point)); + } + + debug_assert!( + self.check_invariants(), + "wrong intervals after append {point:?} to {self:?}" + ); + } + pub fn contains(&self, needle: I) -> bool { let needle = needle.index() as u32; let Some(last) = self.map.partition_point(|r| r.0 <= needle).checked_sub(1) else { @@ -325,6 +368,14 @@ impl<R: Idx, C: Step + Idx> SparseIntervalMatrix<R, C> { self.ensure_row(row).insert(point) } + pub fn prepend(&mut self, row: R, point: C) { + self.ensure_row(row).prepend(point) + } + + pub fn append(&mut self, row: R, point: C) { + self.ensure_row(row).append(point) + } + pub fn contains(&self, row: R, point: C) -> bool { self.row(row).is_some_and(|r| r.contains(point)) } diff --git a/compiler/rustc_mir_dataflow/src/points.rs b/compiler/rustc_mir_dataflow/src/points.rs index 70d1a34b5fb..55a373d4c51 100644 --- a/compiler/rustc_mir_dataflow/src/points.rs +++ b/compiler/rustc_mir_dataflow/src/points.rs @@ -3,7 +3,7 @@ use rustc_index::interval::SparseIntervalMatrix; use rustc_index::{Idx, IndexVec}; use rustc_middle::mir::{self, BasicBlock, Body, Location}; -use crate::framework::{Analysis, Results, ResultsVisitor, visit_results}; +use crate::framework::{Analysis, Direction, Results, ResultsVisitor, visit_results}; /// Maps between a `Location` and a `PointIndex` (and vice versa). pub struct DenseLocationMap { @@ -105,27 +105,47 @@ where N: Idx, A: Analysis<'tcx, Domain = DenseBitSet<N>>, { - let values = SparseIntervalMatrix::new(elements.num_points()); - let mut visitor = Visitor { elements, values }; - visit_results( - body, - body.basic_blocks.reverse_postorder().iter().copied(), - &mut analysis, - &results, - &mut visitor, - ); - visitor.values + let mut values = SparseIntervalMatrix::new(elements.num_points()); + let reachable_blocks = mir::traversal::reachable_as_bitset(body); + if A::Direction::IS_BACKWARD { + // Iterate blocks in decreasing order, to visit locations in decreasing order. This + // allows to use the more efficient `prepend` method to interval sets. + let callback = |state: &DenseBitSet<N>, location| { + let point = elements.point_from_location(location); + // Use internal iterator manually as it is much more efficient. + state.iter().for_each(|node| values.prepend(node, point)); + }; + let mut visitor = Visitor { callback }; + visit_results( + body, + // Note the `.rev()`. + body.basic_blocks.indices().filter(|&bb| reachable_blocks.contains(bb)).rev(), + &mut analysis, + &results, + &mut visitor, + ); + } else { + // Iterate blocks in increasing order, to visit locations in increasing order. This + // allows to use the more efficient `append` method to interval sets. + let callback = |state: &DenseBitSet<N>, location| { + let point = elements.point_from_location(location); + // Use internal iterator manually as it is much more efficient. + state.iter().for_each(|node| values.append(node, point)); + }; + let mut visitor = Visitor { callback }; + visit_results(body, reachable_blocks.iter(), &mut analysis, &results, &mut visitor); + } + values } -struct Visitor<'a, N: Idx> { - elements: &'a DenseLocationMap, - values: SparseIntervalMatrix<N, PointIndex>, +struct Visitor<F> { + callback: F, } -impl<'tcx, A, N> ResultsVisitor<'tcx, A> for Visitor<'_, N> +impl<'tcx, A, F> ResultsVisitor<'tcx, A> for Visitor<F> where - A: Analysis<'tcx, Domain = DenseBitSet<N>>, - N: Idx, + A: Analysis<'tcx>, + F: FnMut(&A::Domain, Location), { fn visit_after_primary_statement_effect<'mir>( &mut self, @@ -134,11 +154,7 @@ where _statement: &'mir mir::Statement<'tcx>, location: Location, ) { - let point = self.elements.point_from_location(location); - // Use internal iterator manually as it is much more efficient. - state.iter().for_each(|node| { - self.values.insert(node, point); - }); + (self.callback)(state, location); } fn visit_after_primary_terminator_effect<'mir>( @@ -148,10 +164,6 @@ where _terminator: &'mir mir::Terminator<'tcx>, location: Location, ) { - let point = self.elements.point_from_location(location); - // Use internal iterator manually as it is much more efficient. - state.iter().for_each(|node| { - self.values.insert(node, point); - }); + (self.callback)(state, location); } } |
