about summary refs log tree commit diff
diff options
context:
space:
mode:
authorNicholas Nethercote <n.nethercote@gmail.com>2024-12-05 14:20:05 +1100
committerNicholas Nethercote <n.nethercote@gmail.com>2024-12-05 20:07:26 +1100
commita06547508a33e432ed45f814a29a1102d5c6c289 (patch)
treec1e1a2da8436ab20a89d3ee3dd41ac23afc7c88f
parent6ee1a7aaa02f7e7713c8b60d785610983dc69ee7 (diff)
downloadrust-a06547508a33e432ed45f814a29a1102d5c6c289.tar.gz
rust-a06547508a33e432ed45f814a29a1102d5c6c289.zip
Change `ChunkedBitSet<MovePathIndex>`s to `MixedBitSet`.
It's a performance win because `MixedBitSet` is faster and uses less
memory than `ChunkedBitSet`.

Also reflow some overlong comment lines in
`lint_tail_expr_drop_order.rs`.
-rw-r--r--compiler/rustc_borrowck/src/lib.rs4
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/initialized.rs22
-rw-r--r--compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs51
-rw-r--r--compiler/rustc_mir_transform/src/remove_uninit_drops.rs4
4 files changed, 42 insertions, 39 deletions
diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs
index 9b7474c2208..d7db92da18f 100644
--- a/compiler/rustc_borrowck/src/lib.rs
+++ b/compiler/rustc_borrowck/src/lib.rs
@@ -26,7 +26,7 @@ use rustc_data_structures::graph::dominators::Dominators;
 use rustc_errors::Diag;
 use rustc_hir as hir;
 use rustc_hir::def_id::LocalDefId;
-use rustc_index::bit_set::{BitSet, ChunkedBitSet};
+use rustc_index::bit_set::{BitSet, MixedBitSet};
 use rustc_index::{IndexSlice, IndexVec};
 use rustc_infer::infer::{
     InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin, TyCtxtInferExt,
@@ -1797,7 +1797,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> {
         location: Location,
         desired_action: InitializationRequiringAction,
         place_span: (PlaceRef<'tcx>, Span),
-        maybe_uninits: &ChunkedBitSet<MovePathIndex>,
+        maybe_uninits: &MixedBitSet<MovePathIndex>,
         from: u64,
         to: u64,
     ) {
diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized.rs b/compiler/rustc_mir_dataflow/src/impls/initialized.rs
index 2c10d4b1cd3..91677657602 100644
--- a/compiler/rustc_mir_dataflow/src/impls/initialized.rs
+++ b/compiler/rustc_mir_dataflow/src/impls/initialized.rs
@@ -1,7 +1,7 @@
 use std::assert_matches::assert_matches;
 
 use rustc_index::Idx;
-use rustc_index::bit_set::{BitSet, ChunkedBitSet};
+use rustc_index::bit_set::{BitSet, MixedBitSet};
 use rustc_middle::bug;
 use rustc_middle::mir::{self, Body, CallReturnPlaces, Location, TerminatorEdges};
 use rustc_middle::ty::{self, TyCtxt};
@@ -70,7 +70,7 @@ impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
     pub fn is_unwind_dead(
         &self,
         place: mir::Place<'tcx>,
-        state: &MaybeReachable<ChunkedBitSet<MovePathIndex>>,
+        state: &MaybeReachable<MixedBitSet<MovePathIndex>>,
     ) -> bool {
         if let LookupResult::Exact(path) = self.move_data().rev_lookup.find(place.as_ref()) {
             let mut maybe_live = false;
@@ -244,8 +244,8 @@ impl<'tcx> MaybeUninitializedPlaces<'_, 'tcx> {
 
 impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
     /// There can be many more `MovePathIndex` than there are locals in a MIR body.
-    /// We use a chunked bitset to avoid paying too high a memory footprint.
-    type Domain = MaybeReachable<ChunkedBitSet<MovePathIndex>>;
+    /// We use a mixed bitset to avoid paying too high a memory footprint.
+    type Domain = MaybeReachable<MixedBitSet<MovePathIndex>>;
 
     const NAME: &'static str = "maybe_init";
 
@@ -256,7 +256,7 @@ impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
 
     fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
         *state =
-            MaybeReachable::Reachable(ChunkedBitSet::new_empty(self.move_data().move_paths.len()));
+            MaybeReachable::Reachable(MixedBitSet::new_empty(self.move_data().move_paths.len()));
         drop_flag_effects_for_function_entry(self.body, self.move_data, |path, s| {
             assert!(s == DropFlagState::Present);
             state.gen_(path);
@@ -371,14 +371,14 @@ impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
 
 impl<'tcx> Analysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
     /// There can be many more `MovePathIndex` than there are locals in a MIR body.
-    /// We use a chunked bitset to avoid paying too high a memory footprint.
-    type Domain = ChunkedBitSet<MovePathIndex>;
+    /// We use a mixed bitset to avoid paying too high a memory footprint.
+    type Domain = MixedBitSet<MovePathIndex>;
 
     const NAME: &'static str = "maybe_uninit";
 
     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
         // bottom = initialized (start_block_effect counters this at outset)
-        ChunkedBitSet::new_empty(self.move_data().move_paths.len())
+        MixedBitSet::new_empty(self.move_data().move_paths.len())
     }
 
     // sets on_entry bits for Arg places
@@ -492,14 +492,14 @@ impl<'tcx> Analysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
 
 impl<'tcx> Analysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
     /// There can be many more `InitIndex` than there are locals in a MIR body.
-    /// We use a chunked bitset to avoid paying too high a memory footprint.
-    type Domain = ChunkedBitSet<InitIndex>;
+    /// We use a mixed bitset to avoid paying too high a memory footprint.
+    type Domain = MixedBitSet<InitIndex>;
 
     const NAME: &'static str = "ever_init";
 
     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
         // bottom = no initialized variables by default
-        ChunkedBitSet::new_empty(self.move_data().inits.len())
+        MixedBitSet::new_empty(self.move_data().inits.len())
     }
 
     fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
diff --git a/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs b/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs
index 732a9cd9890..7fb421dea0c 100644
--- a/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs
+++ b/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs
@@ -7,7 +7,7 @@ use rustc_data_structures::unord::{UnordMap, UnordSet};
 use rustc_errors::Subdiagnostic;
 use rustc_hir::CRATE_HIR_ID;
 use rustc_hir::def_id::{DefId, LocalDefId};
-use rustc_index::bit_set::ChunkedBitSet;
+use rustc_index::bit_set::MixedBitSet;
 use rustc_index::{IndexSlice, IndexVec};
 use rustc_macros::{LintDiagnostic, Subdiagnostic};
 use rustc_middle::bug;
@@ -49,24 +49,24 @@ struct DropsReachable<'a, 'mir, 'tcx> {
     move_data: &'a MoveData<'tcx>,
     maybe_init: &'a mut ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>,
     block_drop_value_info: &'a mut IndexSlice<BasicBlock, MovePathIndexAtBlock>,
-    collected_drops: &'a mut ChunkedBitSet<MovePathIndex>,
-    visited: FxHashMap<BasicBlock, Rc<RefCell<ChunkedBitSet<MovePathIndex>>>>,
+    collected_drops: &'a mut MixedBitSet<MovePathIndex>,
+    visited: FxHashMap<BasicBlock, Rc<RefCell<MixedBitSet<MovePathIndex>>>>,
 }
 
 impl<'a, 'mir, 'tcx> DropsReachable<'a, 'mir, 'tcx> {
     fn visit(&mut self, block: BasicBlock) {
         let move_set_size = self.move_data.move_paths.len();
-        let make_new_path_set = || Rc::new(RefCell::new(ChunkedBitSet::new_empty(move_set_size)));
+        let make_new_path_set = || Rc::new(RefCell::new(MixedBitSet::new_empty(move_set_size)));
 
         let data = &self.body.basic_blocks[block];
         let Some(terminator) = &data.terminator else { return };
-        // Given that we observe these dropped locals here at `block` so far,
-        // we will try to update the successor blocks.
-        // An occupied entry at `block` in `self.visited` signals that we have visited `block` before.
+        // Given that we observe these dropped locals here at `block` so far, we will try to update
+        // the successor blocks. An occupied entry at `block` in `self.visited` signals that we
+        // have visited `block` before.
         let dropped_local_here =
             Rc::clone(self.visited.entry(block).or_insert_with(make_new_path_set));
-        // We could have invoked reverse lookup for a `MovePathIndex` every time, but unfortunately it is expensive.
-        // Let's cache them in `self.block_drop_value_info`.
+        // We could have invoked reverse lookup for a `MovePathIndex` every time, but unfortunately
+        // it is expensive. Let's cache them in `self.block_drop_value_info`.
         match self.block_drop_value_info[block] {
             MovePathIndexAtBlock::Some(dropped) => {
                 dropped_local_here.borrow_mut().insert(dropped);
@@ -76,23 +76,24 @@ impl<'a, 'mir, 'tcx> DropsReachable<'a, 'mir, 'tcx> {
                     && let LookupResult::Exact(idx) | LookupResult::Parent(Some(idx)) =
                         self.move_data.rev_lookup.find(place.as_ref())
                 {
-                    // Since we are working with MIRs at a very early stage,
-                    // observing a `drop` terminator is not indicative enough that
-                    // the drop will definitely happen.
-                    // That is decided in the drop elaboration pass instead.
-                    // Therefore, we need to consult with the maybe-initialization information.
+                    // Since we are working with MIRs at a very early stage, observing a `drop`
+                    // terminator is not indicative enough that the drop will definitely happen.
+                    // That is decided in the drop elaboration pass instead. Therefore, we need to
+                    // consult with the maybe-initialization information.
                     self.maybe_init.seek_before_primary_effect(Location {
                         block,
                         statement_index: data.statements.len(),
                     });
 
-                    // Check if the drop of `place` under inspection is really in effect.
-                    // This is true only when `place` may have been initialized along a control flow path from a BID to the drop program point today.
-                    // In other words, this is where the drop of `place` will happen in the future instead.
+                    // Check if the drop of `place` under inspection is really in effect. This is
+                    // true only when `place` may have been initialized along a control flow path
+                    // from a BID to the drop program point today. In other words, this is where
+                    // the drop of `place` will happen in the future instead.
                     if let MaybeReachable::Reachable(maybe_init) = self.maybe_init.get()
                         && maybe_init.contains(idx)
                     {
-                        // We also cache the drop information, so that we do not need to check on data-flow cursor again
+                        // We also cache the drop information, so that we do not need to check on
+                        // data-flow cursor again.
                         self.block_drop_value_info[block] = MovePathIndexAtBlock::Some(idx);
                         dropped_local_here.borrow_mut().insert(idx);
                     } else {
@@ -139,8 +140,9 @@ impl<'a, 'mir, 'tcx> DropsReachable<'a, 'mir, 'tcx> {
                 // Let's check the observed dropped places in.
                 self.collected_drops.union(&*dropped_local_there.borrow());
                 if self.drop_span.is_none() {
-                    // FIXME(@dingxiangfei2009): it turns out that `self.body.source_scopes` are still a bit wonky.
-                    // There is a high chance that this span still points to a block rather than a statement semicolon.
+                    // FIXME(@dingxiangfei2009): it turns out that `self.body.source_scopes` are
+                    // still a bit wonky. There is a high chance that this span still points to a
+                    // block rather than a statement semicolon.
                     *self.drop_span = Some(terminator.source_info.span);
                 }
                 // Now we have discovered a simple control flow path from a future drop point
@@ -394,10 +396,10 @@ pub(crate) fn run_lint<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId, body: &Body<
     for (&block, candidates) in &bid_per_block {
         // We will collect drops on locals on paths between BID points to their actual drop locations
         // into `all_locals_dropped`.
-        let mut all_locals_dropped = ChunkedBitSet::new_empty(move_data.move_paths.len());
+        let mut all_locals_dropped = MixedBitSet::new_empty(move_data.move_paths.len());
         let mut drop_span = None;
         for &(_, place) in candidates.iter() {
-            let mut collected_drops = ChunkedBitSet::new_empty(move_data.move_paths.len());
+            let mut collected_drops = MixedBitSet::new_empty(move_data.move_paths.len());
             // ## On detecting change in relative drop order ##
             // Iterate through each BID-containing block `block`.
             // If the place `P` targeted by the BID is "maybe initialized",
@@ -425,8 +427,9 @@ pub(crate) fn run_lint<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId, body: &Body<
 
         // We shall now exclude some local bindings for the following cases.
         {
-            let mut to_exclude = ChunkedBitSet::new_empty(all_locals_dropped.domain_size());
-            // We will now do subtraction from the candidate dropped locals, because of the following reasons.
+            let mut to_exclude = MixedBitSet::new_empty(all_locals_dropped.domain_size());
+            // We will now do subtraction from the candidate dropped locals, because of the
+            // following reasons.
             for path_idx in all_locals_dropped.iter() {
                 let move_path = &move_data.move_paths[path_idx];
                 let dropped_local = move_path.place.local;
diff --git a/compiler/rustc_mir_transform/src/remove_uninit_drops.rs b/compiler/rustc_mir_transform/src/remove_uninit_drops.rs
index f786c676e9e..e955d8277a4 100644
--- a/compiler/rustc_mir_transform/src/remove_uninit_drops.rs
+++ b/compiler/rustc_mir_transform/src/remove_uninit_drops.rs
@@ -1,5 +1,5 @@
 use rustc_abi::FieldIdx;
-use rustc_index::bit_set::ChunkedBitSet;
+use rustc_index::bit_set::MixedBitSet;
 use rustc_middle::mir::{Body, TerminatorKind};
 use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, VariantDef};
 use rustc_mir_dataflow::impls::MaybeInitializedPlaces;
@@ -67,7 +67,7 @@ impl<'tcx> crate::MirPass<'tcx> for RemoveUninitDrops {
 fn is_needs_drop_and_init<'tcx>(
     tcx: TyCtxt<'tcx>,
     typing_env: ty::TypingEnv<'tcx>,
-    maybe_inits: &ChunkedBitSet<MovePathIndex>,
+    maybe_inits: &MixedBitSet<MovePathIndex>,
     move_data: &MoveData<'tcx>,
     ty: Ty<'tcx>,
     mpi: MovePathIndex,