diff options
| author | Boxy <rust@boxyuwu.dev> | 2025-02-25 21:27:44 +0000 |
|---|---|---|
| committer | Boxy <rust@boxyuwu.dev> | 2025-02-25 21:27:44 +0000 |
| commit | d9683df7c2f6d4141b1321e27635d2ce3167eaa4 (patch) | |
| tree | dce0d46d1b7d624ec9b9b09b2c1854f6245a5ff4 /compiler/rustc_mir_transform/src/coverage | |
| parent | 46392d1661540e256fd9573d8f06c2784a58c983 (diff) | |
| parent | 4ecd70ddd1039a3954056c1071e40278048476fa (diff) | |
| download | rust-d9683df7c2f6d4141b1321e27635d2ce3167eaa4.tar.gz rust-d9683df7c2f6d4141b1321e27635d2ce3167eaa4.zip | |
Merge from rustc
Diffstat (limited to 'compiler/rustc_mir_transform/src/coverage')
8 files changed, 161 insertions, 408 deletions
diff --git a/compiler/rustc_mir_transform/src/coverage/counters.rs b/compiler/rustc_mir_transform/src/coverage/counters.rs index adb99a75a9e..5568d42ab8f 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters.rs @@ -2,7 +2,6 @@ use std::cmp::Ordering; use either::Either; use itertools::Itertools; -use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; use rustc_data_structures::graph::DirectedGraph; use rustc_index::IndexVec; @@ -11,31 +10,35 @@ use rustc_middle::mir::coverage::{CounterId, CovTerm, Expression, ExpressionId, use crate::coverage::counters::balanced_flow::BalancedFlowGraph; use crate::coverage::counters::node_flow::{ - CounterTerm, NodeCounters, make_node_counters, node_flow_data_for_balanced_graph, + CounterTerm, NodeCounters, NodeFlowData, node_flow_data_for_balanced_graph, }; use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph}; mod balanced_flow; -mod node_flow; +pub(crate) mod node_flow; mod union_find; -/// Ensures that each BCB node needing a counter has one, by creating physical -/// counters or counter expressions for nodes as required. -pub(super) fn make_bcb_counters( - graph: &CoverageGraph, - bcb_needs_counter: &DenseBitSet<BasicCoverageBlock>, -) -> CoverageCounters { +/// Struct containing the results of [`prepare_bcb_counters_data`]. +pub(crate) struct BcbCountersData { + pub(crate) node_flow_data: NodeFlowData<BasicCoverageBlock>, + pub(crate) priority_list: Vec<BasicCoverageBlock>, +} + +/// Analyzes the coverage graph to create intermediate data structures that +/// will later be used (during codegen) to create physical counters or counter +/// expressions for each BCB node that needs one. +pub(crate) fn prepare_bcb_counters_data(graph: &CoverageGraph) -> BcbCountersData { // Create the derived graphs that are necessary for subsequent steps. let balanced_graph = BalancedFlowGraph::for_graph(graph, |n| !graph[n].is_out_summable); let node_flow_data = node_flow_data_for_balanced_graph(&balanced_graph); - // Use those graphs to determine which nodes get physical counters, and how - // to compute the execution counts of other nodes from those counters. + // Also create a "priority list" of coverage graph nodes, to help determine + // which ones get physical counters or counter expressions. This needs to + // be done now, because the later parts of the counter-creation process + // won't have access to the original coverage graph. let priority_list = make_node_flow_priority_list(graph, balanced_graph); - let node_counters = make_node_counters(&node_flow_data, &priority_list); - // Convert the counters into a form suitable for embedding into MIR. - transcribe_counters(&node_counters, bcb_needs_counter) + BcbCountersData { node_flow_data, priority_list } } /// Arranges the nodes in `balanced_graph` into a list, such that earlier nodes @@ -47,7 +50,7 @@ fn make_node_flow_priority_list( // A "reloop" node has exactly one out-edge, which jumps back to the top // of an enclosing loop. Reloop nodes are typically visited more times // than loop-exit nodes, so try to avoid giving them physical counters. - let is_reloop_node = IndexVec::from_fn_n( + let is_reloop_node = IndexVec::<BasicCoverageBlock, _>::from_fn_n( |node| match graph.successors[node].as_slice() { &[succ] => graph.dominates(succ, node), _ => false, @@ -74,31 +77,33 @@ fn make_node_flow_priority_list( } // Converts node counters into a form suitable for embedding into MIR. -fn transcribe_counters( +pub(crate) fn transcribe_counters( old: &NodeCounters<BasicCoverageBlock>, bcb_needs_counter: &DenseBitSet<BasicCoverageBlock>, + bcbs_seen: &DenseBitSet<BasicCoverageBlock>, ) -> CoverageCounters { let mut new = CoverageCounters::with_num_bcbs(bcb_needs_counter.domain_size()); for bcb in bcb_needs_counter.iter() { + if !bcbs_seen.contains(bcb) { + // This BCB's code was removed by MIR opts, so its counter is always zero. + new.set_node_counter(bcb, CovTerm::Zero); + continue; + } + // Our counter-creation algorithm doesn't guarantee that a node's list // of terms starts or ends with a positive term, so partition the // counters into "positive" and "negative" lists for easier handling. - let (mut pos, mut neg): (Vec<_>, Vec<_>) = - old.counter_terms[bcb].iter().partition_map(|&CounterTerm { node, op }| match op { + let (mut pos, mut neg): (Vec<_>, Vec<_>) = old.counter_terms[bcb] + .iter() + // Filter out any BCBs that were removed by MIR opts; + // this treats them as having an execution count of 0. + .filter(|term| bcbs_seen.contains(term.node)) + .partition_map(|&CounterTerm { node, op }| match op { Op::Add => Either::Left(node), Op::Subtract => Either::Right(node), }); - if pos.is_empty() { - // If we somehow end up with no positive terms, fall back to - // creating a physical counter. There's no known way for this - // to happen, but we can avoid an ICE if it does. - debug_assert!(false, "{bcb:?} has no positive counter terms"); - pos = vec![bcb]; - neg = vec![]; - } - // These intermediate sorts are not strictly necessary, but were helpful // in reducing churn when switching to the current counter-creation scheme. // They also help to slightly decrease the overall size of the expression @@ -109,14 +114,10 @@ fn transcribe_counters( let mut new_counters_for_sites = |sites: Vec<BasicCoverageBlock>| { sites.into_iter().map(|node| new.ensure_phys_counter(node)).collect::<Vec<_>>() }; - let mut pos = new_counters_for_sites(pos); - let mut neg = new_counters_for_sites(neg); - - // These sorts are also not strictly necessary; see above. - pos.sort(); - neg.sort(); + let pos = new_counters_for_sites(pos); + let neg = new_counters_for_sites(neg); - let pos_counter = new.make_sum(&pos).expect("`pos` should not be empty"); + let pos_counter = new.make_sum(&pos).unwrap_or(CovTerm::Zero); let new_counter = new.make_subtracted_sum(pos_counter, &neg); new.set_node_counter(bcb, new_counter); } @@ -129,15 +130,15 @@ fn transcribe_counters( pub(super) struct CoverageCounters { /// List of places where a counter-increment statement should be injected /// into MIR, each with its corresponding counter ID. - phys_counter_for_node: FxIndexMap<BasicCoverageBlock, CounterId>, - next_counter_id: CounterId, + pub(crate) phys_counter_for_node: FxIndexMap<BasicCoverageBlock, CounterId>, + pub(crate) next_counter_id: CounterId, /// Coverage counters/expressions that are associated with individual BCBs. - node_counters: IndexVec<BasicCoverageBlock, Option<CovTerm>>, + pub(crate) node_counters: IndexVec<BasicCoverageBlock, Option<CovTerm>>, /// Table of expression data, associating each expression ID with its /// corresponding operator (+ or -) and its LHS/RHS operands. - expressions: IndexVec<ExpressionId, Expression>, + pub(crate) expressions: IndexVec<ExpressionId, Expression>, /// Remember expressions that have already been created (or simplified), /// so that we don't create unnecessary duplicates. expressions_memo: FxHashMap<Expression, CovTerm>, @@ -188,12 +189,6 @@ impl CoverageCounters { self.make_expression(lhs, Op::Subtract, rhs_sum) } - pub(super) fn num_counters(&self) -> usize { - let num_counters = self.phys_counter_for_node.len(); - assert_eq!(num_counters, self.next_counter_id.as_usize()); - num_counters - } - fn set_node_counter(&mut self, bcb: BasicCoverageBlock, counter: CovTerm) -> CovTerm { let existing = self.node_counters[bcb].replace(counter); assert!( @@ -202,34 +197,4 @@ impl CoverageCounters { ); counter } - - pub(super) fn term_for_bcb(&self, bcb: BasicCoverageBlock) -> Option<CovTerm> { - self.node_counters[bcb] - } - - /// Returns an iterator over all the nodes in the coverage graph that - /// should have a counter-increment statement injected into MIR, along with - /// each site's corresponding counter ID. - pub(super) fn counter_increment_sites( - &self, - ) -> impl Iterator<Item = (CounterId, BasicCoverageBlock)> + Captures<'_> { - self.phys_counter_for_node.iter().map(|(&site, &id)| (id, site)) - } - - /// Returns an iterator over the subset of BCB nodes that have been associated - /// with a counter *expression*, along with the ID of that expression. - pub(super) fn bcb_nodes_with_coverage_expressions( - &self, - ) -> impl Iterator<Item = (BasicCoverageBlock, ExpressionId)> + Captures<'_> { - self.node_counters.iter_enumerated().filter_map(|(bcb, &counter)| match counter { - // Yield the BCB along with its associated expression ID. - Some(CovTerm::Expression(id)) => Some((bcb, id)), - // This BCB is associated with a counter or nothing, so skip it. - Some(CovTerm::Counter { .. } | CovTerm::Zero) | None => None, - }) - } - - pub(super) fn into_expressions(self) -> IndexVec<ExpressionId, Expression> { - self.expressions - } } diff --git a/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs b/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs index 9d80b3af42d..91ed54b8b59 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs @@ -9,6 +9,7 @@ use rustc_data_structures::graph; use rustc_index::bit_set::DenseBitSet; use rustc_index::{Idx, IndexSlice, IndexVec}; +pub(crate) use rustc_middle::mir::coverage::NodeFlowData; use rustc_middle::mir::coverage::Op; use crate::coverage::counters::union_find::UnionFind; @@ -16,30 +17,6 @@ use crate::coverage::counters::union_find::UnionFind; #[cfg(test)] mod tests; -/// Data representing a view of some underlying graph, in which each node's -/// successors have been merged into a single "supernode". -/// -/// The resulting supernodes have no obvious meaning on their own. -/// However, merging successor nodes means that a node's out-edges can all -/// be combined into a single out-edge, whose flow is the same as the flow -/// (execution count) of its corresponding node in the original graph. -/// -/// With all node flows now in the original graph now represented as edge flows -/// in the merged graph, it becomes possible to analyze the original node flows -/// using techniques for analyzing edge flows. -#[derive(Debug)] -pub(crate) struct NodeFlowData<Node: Idx> { - /// Maps each node to the supernode that contains it, indicated by some - /// arbitrary "root" node that is part of that supernode. - supernodes: IndexVec<Node, Node>, - /// For each node, stores the single supernode that all of its successors - /// have been merged into. - /// - /// (Note that each node in a supernode can potentially have a _different_ - /// successor supernode from its peers.) - succ_supernodes: IndexVec<Node, Node>, -} - /// Creates a "merged" view of an underlying graph. /// /// The given graph is assumed to have [“balanced flow”](balanced-flow), diff --git a/compiler/rustc_mir_transform/src/coverage/graph.rs b/compiler/rustc_mir_transform/src/coverage/graph.rs index 392b54c8d81..dcc7c5b91d7 100644 --- a/compiler/rustc_mir_transform/src/coverage/graph.rs +++ b/compiler/rustc_mir_transform/src/coverage/graph.rs @@ -2,12 +2,12 @@ use std::cmp::Ordering; use std::ops::{Index, IndexMut}; use std::{mem, slice}; -use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::graph::dominators::Dominators; use rustc_data_structures::graph::{self, DirectedGraph, StartNode}; use rustc_index::IndexVec; use rustc_index::bit_set::DenseBitSet; +pub(crate) use rustc_middle::mir::coverage::{BasicCoverageBlock, START_BCB}; use rustc_middle::mir::{self, BasicBlock, Terminator, TerminatorKind}; use tracing::debug; @@ -42,7 +42,7 @@ impl CoverageGraph { // `SwitchInt` to have multiple targets to the same destination `BasicBlock`, so // de-duplication is required. This is done without reordering the successors. - let successors = IndexVec::from_fn_n( + let successors = IndexVec::<BasicCoverageBlock, _>::from_fn_n( |bcb| { let mut seen_bcbs = FxHashSet::default(); let terminator = mir_body[bcbs[bcb].last_bb()].terminator(); @@ -217,7 +217,7 @@ impl CoverageGraph { pub(crate) fn reloop_predecessors( &self, to_bcb: BasicCoverageBlock, - ) -> impl Iterator<Item = BasicCoverageBlock> + Captures<'_> { + ) -> impl Iterator<Item = BasicCoverageBlock> { self.predecessors[to_bcb].iter().copied().filter(move |&pred| self.dominates(to_bcb, pred)) } } @@ -269,15 +269,6 @@ impl graph::Predecessors for CoverageGraph { } } -rustc_index::newtype_index! { - /// A node in the control-flow graph of CoverageGraph. - #[orderable] - #[debug_format = "bcb{}"] - pub(crate) struct BasicCoverageBlock { - const START_BCB = 0; - } -} - /// `BasicCoverageBlockData` holds the data indexed by a `BasicCoverageBlock`. /// /// A `BasicCoverageBlock` (BCB) represents the maximal-length sequence of MIR `BasicBlock`s without diff --git a/compiler/rustc_mir_transform/src/coverage/mappings.rs b/compiler/rustc_mir_transform/src/coverage/mappings.rs index 8d0d92dc367..d83c0d40a7e 100644 --- a/compiler/rustc_mir_transform/src/coverage/mappings.rs +++ b/compiler/rustc_mir_transform/src/coverage/mappings.rs @@ -1,9 +1,7 @@ use std::collections::BTreeSet; use rustc_data_structures::fx::FxIndexMap; -use rustc_data_structures::graph::DirectedGraph; use rustc_index::IndexVec; -use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::coverage::{ BlockMarkerId, BranchSpan, ConditionId, ConditionInfo, CoverageInfoHi, CoverageKind, }; @@ -63,10 +61,6 @@ const MCDC_MAX_BITMAP_SIZE: usize = i32::MAX as usize; #[derive(Default)] pub(super) struct ExtractedMappings { - /// Store our own copy of [`CoverageGraph::num_nodes`], so that we don't - /// need access to the whole graph when allocating per-BCB data. This is - /// only public so that other code can still use exhaustive destructuring. - pub(super) num_bcbs: usize, pub(super) code_mappings: Vec<CodeMapping>, pub(super) branch_pairs: Vec<BranchPair>, pub(super) mcdc_bitmap_bits: usize, @@ -118,7 +112,6 @@ pub(super) fn extract_all_mapping_info_from_mir<'tcx>( ); ExtractedMappings { - num_bcbs: graph.num_nodes(), code_mappings, branch_pairs, mcdc_bitmap_bits, @@ -127,60 +120,6 @@ pub(super) fn extract_all_mapping_info_from_mir<'tcx>( } } -impl ExtractedMappings { - pub(super) fn all_bcbs_with_counter_mappings(&self) -> DenseBitSet<BasicCoverageBlock> { - // Fully destructure self to make sure we don't miss any fields that have mappings. - let Self { - num_bcbs, - code_mappings, - branch_pairs, - mcdc_bitmap_bits: _, - mcdc_degraded_branches, - mcdc_mappings, - } = self; - - // Identify which BCBs have one or more mappings. - let mut bcbs_with_counter_mappings = DenseBitSet::new_empty(*num_bcbs); - let mut insert = |bcb| { - bcbs_with_counter_mappings.insert(bcb); - }; - - for &CodeMapping { span: _, bcb } in code_mappings { - insert(bcb); - } - for &BranchPair { true_bcb, false_bcb, .. } in branch_pairs { - insert(true_bcb); - insert(false_bcb); - } - for &MCDCBranch { true_bcb, false_bcb, .. } in mcdc_degraded_branches - .iter() - .chain(mcdc_mappings.iter().map(|(_, branches)| branches.into_iter()).flatten()) - { - insert(true_bcb); - insert(false_bcb); - } - - // MC/DC decisions refer to BCBs, but don't require those BCBs to have counters. - if bcbs_with_counter_mappings.is_empty() { - debug_assert!( - mcdc_mappings.is_empty(), - "A function with no counter mappings shouldn't have any decisions: {mcdc_mappings:?}", - ); - } - - bcbs_with_counter_mappings - } - - /// Returns the set of BCBs that have one or more `Code` mappings. - pub(super) fn bcbs_with_ordinary_code_mappings(&self) -> DenseBitSet<BasicCoverageBlock> { - let mut bcbs = DenseBitSet::new_empty(self.num_bcbs); - for &CodeMapping { span: _, bcb } in &self.code_mappings { - bcbs.insert(bcb); - } - bcbs - } -} - fn resolve_block_markers( coverage_info_hi: &CoverageInfoHi, mir_body: &mir::Body<'_>, diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs index 15487d05a30..1ccae0fd7fe 100644 --- a/compiler/rustc_mir_transform/src/coverage/mod.rs +++ b/compiler/rustc_mir_transform/src/coverage/mod.rs @@ -10,7 +10,6 @@ mod unexpand; use rustc_hir as hir; use rustc_hir::intravisit::{Visitor, walk_expr}; -use rustc_middle::hir::map::Map; use rustc_middle::hir::nested_filter; use rustc_middle::mir::coverage::{ CoverageKind, DecisionInfo, FunctionCoverageInfo, Mapping, MappingKind, @@ -21,7 +20,7 @@ use rustc_span::Span; use rustc_span::def_id::LocalDefId; use tracing::{debug, debug_span, trace}; -use crate::coverage::counters::CoverageCounters; +use crate::coverage::counters::BcbCountersData; use crate::coverage::graph::CoverageGraph; use crate::coverage::mappings::ExtractedMappings; @@ -82,28 +81,21 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir: let extracted_mappings = mappings::extract_all_mapping_info_from_mir(tcx, mir_body, &hir_info, &graph); - //////////////////////////////////////////////////// - // Create an optimized mix of `Counter`s and `Expression`s for the `CoverageGraph`. Ensure - // every coverage span has a `Counter` or `Expression` assigned to its `BasicCoverageBlock` - // and all `Expression` dependencies (operands) are also generated, for any other - // `BasicCoverageBlock`s not already associated with a coverage span. - let bcbs_with_counter_mappings = extracted_mappings.all_bcbs_with_counter_mappings(); - if bcbs_with_counter_mappings.is_empty() { - // No relevant spans were found in MIR, so skip instrumenting this function. - return; - } - - let coverage_counters = counters::make_bcb_counters(&graph, &bcbs_with_counter_mappings); - - let mappings = create_mappings(&extracted_mappings, &coverage_counters); + let mappings = create_mappings(&extracted_mappings); if mappings.is_empty() { // No spans could be converted into valid mappings, so skip this function. debug!("no spans could be converted into valid mappings; skipping"); return; } - inject_coverage_statements(mir_body, &graph, &extracted_mappings, &coverage_counters); + // Use the coverage graph to prepare intermediate data that will eventually + // be used to assign physical counters and counter expressions to points in + // the control-flow graph + let BcbCountersData { node_flow_data, priority_list } = + counters::prepare_bcb_counters_data(&graph); + // Inject coverage statements into MIR. + inject_coverage_statements(mir_body, &graph); inject_mcdc_statements(mir_body, &graph, &extracted_mappings); let mcdc_num_condition_bitmaps = extracted_mappings @@ -116,29 +108,25 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir: mir_body.function_coverage_info = Some(Box::new(FunctionCoverageInfo { function_source_hash: hir_info.function_source_hash, body_span: hir_info.body_span, - num_counters: coverage_counters.num_counters(), - mcdc_bitmap_bits: extracted_mappings.mcdc_bitmap_bits, - expressions: coverage_counters.into_expressions(), + + node_flow_data, + priority_list, + mappings, + + mcdc_bitmap_bits: extracted_mappings.mcdc_bitmap_bits, mcdc_num_condition_bitmaps, })); } -/// For each coverage span extracted from MIR, create a corresponding -/// mapping. +/// For each coverage span extracted from MIR, create a corresponding mapping. /// -/// Precondition: All BCBs corresponding to those spans have been given -/// coverage counters. -fn create_mappings( - extracted_mappings: &ExtractedMappings, - coverage_counters: &CoverageCounters, -) -> Vec<Mapping> { - let term_for_bcb = - |bcb| coverage_counters.term_for_bcb(bcb).expect("all BCBs with spans were given counters"); - +/// FIXME(Zalathar): This used to be where BCBs in the extracted mappings were +/// resolved to a `CovTerm`. But that is now handled elsewhere, so this +/// function can potentially be simplified even further. +fn create_mappings(extracted_mappings: &ExtractedMappings) -> Vec<Mapping> { // Fully destructure the mappings struct to make sure we don't miss any kinds. let ExtractedMappings { - num_bcbs: _, code_mappings, branch_pairs, mcdc_bitmap_bits: _, @@ -150,23 +138,18 @@ fn create_mappings( mappings.extend(code_mappings.iter().map( // Ordinary code mappings are the simplest kind. |&mappings::CodeMapping { span, bcb }| { - let kind = MappingKind::Code(term_for_bcb(bcb)); + let kind = MappingKind::Code { bcb }; Mapping { kind, span } }, )); mappings.extend(branch_pairs.iter().map( |&mappings::BranchPair { span, true_bcb, false_bcb }| { - let true_term = term_for_bcb(true_bcb); - let false_term = term_for_bcb(false_bcb); - let kind = MappingKind::Branch { true_term, false_term }; + let kind = MappingKind::Branch { true_bcb, false_bcb }; Mapping { kind, span } }, )); - let term_for_bcb = - |bcb| coverage_counters.term_for_bcb(bcb).expect("all BCBs with spans were given counters"); - // MCDC branch mappings are appended with their decisions in case decisions were ignored. mappings.extend(mcdc_degraded_branches.iter().map( |&mappings::MCDCBranch { @@ -176,11 +159,7 @@ fn create_mappings( condition_info: _, true_index: _, false_index: _, - }| { - let true_term = term_for_bcb(true_bcb); - let false_term = term_for_bcb(false_bcb); - Mapping { kind: MappingKind::Branch { true_term, false_term }, span } - }, + }| { Mapping { kind: MappingKind::Branch { true_bcb, false_bcb }, span } }, )); for (decision, branches) in mcdc_mappings { @@ -201,12 +180,10 @@ fn create_mappings( true_index: _, false_index: _, }| { - let true_term = term_for_bcb(true_bcb); - let false_term = term_for_bcb(false_bcb); Mapping { kind: MappingKind::MCDCBranch { - true_term, - false_term, + true_bcb, + false_bcb, mcdc_params: condition_info, }, span, @@ -227,41 +204,11 @@ fn create_mappings( mappings } -/// For each BCB node or BCB edge that has an associated coverage counter, -/// inject any necessary coverage statements into MIR. -fn inject_coverage_statements<'tcx>( - mir_body: &mut mir::Body<'tcx>, - graph: &CoverageGraph, - extracted_mappings: &ExtractedMappings, - coverage_counters: &CoverageCounters, -) { - // Inject counter-increment statements into MIR. - for (id, bcb) in coverage_counters.counter_increment_sites() { - let target_bb = graph[bcb].leader_bb(); - inject_statement(mir_body, CoverageKind::CounterIncrement { id }, target_bb); - } - - // For each counter expression that is directly associated with at least one - // span, we inject an "expression-used" statement, so that coverage codegen - // can check whether the injected statement survived MIR optimization. - // (BCB edges can't have spans, so we only need to process BCB nodes here.) - // - // We only do this for ordinary `Code` mappings, because branch and MC/DC - // mappings might have expressions that don't correspond to any single - // point in the control-flow graph. - // - // See the code in `rustc_codegen_llvm::coverageinfo::map_data` that deals - // with "expressions seen" and "zero terms". - let eligible_bcbs = extracted_mappings.bcbs_with_ordinary_code_mappings(); - for (bcb, expression_id) in coverage_counters - .bcb_nodes_with_coverage_expressions() - .filter(|&(bcb, _)| eligible_bcbs.contains(bcb)) - { - inject_statement( - mir_body, - CoverageKind::ExpressionUsed { id: expression_id }, - graph[bcb].leader_bb(), - ); +/// Inject any necessary coverage statements into MIR, so that they influence codegen. +fn inject_coverage_statements<'tcx>(mir_body: &mut mir::Body<'tcx>, graph: &CoverageGraph) { + for (bcb, data) in graph.iter_enumerated() { + let target_bb = data.leader_bb(); + inject_statement(mir_body, CoverageKind::VirtualCounter { bcb }, target_bb); } } @@ -343,7 +290,7 @@ fn extract_hir_info<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> ExtractedHir let hir_node = tcx.hir_node_by_def_id(def_id); let fn_body_id = hir_node.body_id().expect("HIR node is a function with body"); - let hir_body = tcx.hir().body(fn_body_id); + let hir_body = tcx.hir_body(fn_body_id); let maybe_fn_sig = hir_node.fn_sig(); let is_async_fn = maybe_fn_sig.is_some_and(|fn_sig| fn_sig.header.is_async()); @@ -399,35 +346,37 @@ fn extract_hole_spans_from_hir<'tcx>( body_span: Span, // Usually `hir_body.value.span`, but not always hir_body: &hir::Body<'tcx>, ) -> Vec<Span> { - struct HolesVisitor<'hir, F> { - hir: Map<'hir>, - visit_hole_span: F, + struct HolesVisitor<'tcx> { + tcx: TyCtxt<'tcx>, + body_span: Span, + hole_spans: Vec<Span>, } - impl<'hir, F: FnMut(Span)> Visitor<'hir> for HolesVisitor<'hir, F> { - /// - We need `NestedFilter::INTRA = true` so that `visit_item` will be called. - /// - Bodies of nested items don't actually get visited, because of the - /// `visit_item` override. - /// - For nested bodies that are not part of an item, we do want to visit any - /// items contained within them. - type NestedFilter = nested_filter::All; + impl<'tcx> Visitor<'tcx> for HolesVisitor<'tcx> { + /// We have special handling for nested items, but we still want to + /// traverse into nested bodies of things that are not considered items, + /// such as "anon consts" (e.g. array lengths). + type NestedFilter = nested_filter::OnlyBodies; - fn nested_visit_map(&mut self) -> Self::Map { - self.hir + fn maybe_tcx(&mut self) -> TyCtxt<'tcx> { + self.tcx } - fn visit_item(&mut self, item: &'hir hir::Item<'hir>) { - (self.visit_hole_span)(item.span); + /// We override `visit_nested_item` instead of `visit_item` because we + /// only need the item's span, not the item itself. + fn visit_nested_item(&mut self, id: hir::ItemId) -> Self::Result { + let span = self.tcx.def_span(id.owner_id.def_id); + self.visit_hole_span(span); // Having visited this item, we don't care about its children, // so don't call `walk_item`. } // We override `visit_expr` instead of the more specific expression // visitors, so that we have direct access to the expression span. - fn visit_expr(&mut self, expr: &'hir hir::Expr<'hir>) { + fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) { match expr.kind { hir::ExprKind::Closure(_) | hir::ExprKind::ConstBlock(_) => { - (self.visit_hole_span)(expr.span); + self.visit_hole_span(expr.span); // Having visited this expression, we don't care about its // children, so don't call `walk_expr`. } @@ -437,18 +386,17 @@ fn extract_hole_spans_from_hir<'tcx>( } } } - - let mut hole_spans = vec![]; - let mut visitor = HolesVisitor { - hir: tcx.hir(), - visit_hole_span: |hole_span| { + impl HolesVisitor<'_> { + fn visit_hole_span(&mut self, hole_span: Span) { // Discard any holes that aren't directly visible within the body span. - if body_span.contains(hole_span) && body_span.eq_ctxt(hole_span) { - hole_spans.push(hole_span); + if self.body_span.contains(hole_span) && self.body_span.eq_ctxt(hole_span) { + self.hole_spans.push(hole_span); } - }, - }; + } + } + + let mut visitor = HolesVisitor { tcx, body_span, hole_spans: vec![] }; visitor.visit_body(hir_body); - hole_spans + visitor.hole_spans } diff --git a/compiler/rustc_mir_transform/src/coverage/query.rs b/compiler/rustc_mir_transform/src/coverage/query.rs index a849ed4c3e2..ccf76dc7108 100644 --- a/compiler/rustc_mir_transform/src/coverage/query.rs +++ b/compiler/rustc_mir_transform/src/coverage/query.rs @@ -1,10 +1,6 @@ -use rustc_data_structures::captures::Captures; use rustc_index::bit_set::DenseBitSet; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; -use rustc_middle::mir::coverage::{ - CounterId, CovTerm, CoverageIdsInfo, CoverageKind, Expression, ExpressionId, - FunctionCoverageInfo, MappingKind, Op, -}; +use rustc_middle::mir::coverage::{BasicCoverageBlock, CoverageIdsInfo, CoverageKind, MappingKind}; use rustc_middle::mir::{Body, Statement, StatementKind}; use rustc_middle::ty::{self, TyCtxt}; use rustc_middle::util::Providers; @@ -12,6 +8,9 @@ use rustc_span::def_id::LocalDefId; use rustc_span::sym; use tracing::trace; +use crate::coverage::counters::node_flow::make_node_counters; +use crate::coverage::counters::{CoverageCounters, transcribe_counters}; + /// Registers query/hook implementations related to coverage. pub(crate) fn provide(providers: &mut Providers) { providers.hooks.is_eligible_for_coverage = is_eligible_for_coverage; @@ -66,7 +65,7 @@ fn coverage_attr_on(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool { Some(_) | None => { // Other possibilities should have been rejected by `rustc_parse::validate_attr`. // Use `span_delayed_bug` to avoid an ICE in failing builds (#127880). - tcx.dcx().span_delayed_bug(attr.span, "unexpected value of coverage attribute"); + tcx.dcx().span_delayed_bug(attr.span(), "unexpected value of coverage attribute"); } } } @@ -89,44 +88,71 @@ fn coverage_ids_info<'tcx>( let mir_body = tcx.instance_mir(instance_def); let fn_cov_info = mir_body.function_coverage_info.as_deref()?; - let mut counters_seen = DenseBitSet::new_empty(fn_cov_info.num_counters); - let mut expressions_seen = DenseBitSet::new_filled(fn_cov_info.expressions.len()); - - // For each expression ID that is directly used by one or more mappings, - // mark it as not-yet-seen. This indicates that we expect to see a - // corresponding `ExpressionUsed` statement during MIR traversal. - for mapping in fn_cov_info.mappings.iter() { - // Currently we only worry about ordinary code mappings. - // For branch and MC/DC mappings, expressions might not correspond - // to any particular point in the control-flow graph. - // (Keep this in sync with the injection of `ExpressionUsed` - // statements in the `InstrumentCoverage` MIR pass.) - if let MappingKind::Code(CovTerm::Expression(id)) = mapping.kind { - expressions_seen.remove(id); - } - } - + // Scan through the final MIR to see which BCBs survived MIR opts. + // Any BCB not in this set was optimized away. + let mut bcbs_seen = DenseBitSet::new_empty(fn_cov_info.priority_list.len()); for kind in all_coverage_in_mir_body(mir_body) { match *kind { - CoverageKind::CounterIncrement { id } => { - counters_seen.insert(id); - } - CoverageKind::ExpressionUsed { id } => { - expressions_seen.insert(id); + CoverageKind::VirtualCounter { bcb } => { + bcbs_seen.insert(bcb); } _ => {} } } - let zero_expressions = - identify_zero_expressions(fn_cov_info, &counters_seen, &expressions_seen); + // Determine the set of BCBs that are referred to by mappings, and therefore + // need a counter. Any node not in this set will only get a counter if it + // is part of the counter expression for a node that is in the set. + let mut bcb_needs_counter = + DenseBitSet::<BasicCoverageBlock>::new_empty(fn_cov_info.priority_list.len()); + for mapping in &fn_cov_info.mappings { + match mapping.kind { + MappingKind::Code { bcb } => { + bcb_needs_counter.insert(bcb); + } + MappingKind::Branch { true_bcb, false_bcb } => { + bcb_needs_counter.insert(true_bcb); + bcb_needs_counter.insert(false_bcb); + } + MappingKind::MCDCBranch { true_bcb, false_bcb, mcdc_params: _ } => { + bcb_needs_counter.insert(true_bcb); + bcb_needs_counter.insert(false_bcb); + } + MappingKind::MCDCDecision(_) => {} + } + } - Some(CoverageIdsInfo { counters_seen, zero_expressions }) + // Clone the priority list so that we can re-sort it. + let mut priority_list = fn_cov_info.priority_list.clone(); + // The first ID in the priority list represents the synthetic "sink" node, + // and must remain first so that it _never_ gets a physical counter. + debug_assert_eq!(priority_list[0], priority_list.iter().copied().max().unwrap()); + assert!(!bcbs_seen.contains(priority_list[0])); + // Partition the priority list, so that unreachable nodes (removed by MIR opts) + // are sorted later and therefore are _more_ likely to get a physical counter. + // This is counter-intuitive, but it means that `transcribe_counters` can + // easily skip those unused physical counters and replace them with zero. + // (The original ordering remains in effect within both partitions.) + priority_list[1..].sort_by_key(|&bcb| !bcbs_seen.contains(bcb)); + + let node_counters = make_node_counters(&fn_cov_info.node_flow_data, &priority_list); + let coverage_counters = transcribe_counters(&node_counters, &bcb_needs_counter, &bcbs_seen); + + let CoverageCounters { + phys_counter_for_node, next_counter_id, node_counters, expressions, .. + } = coverage_counters; + + Some(CoverageIdsInfo { + num_counters: next_counter_id.as_u32(), + phys_counter_for_node, + term_for_bcb: node_counters, + expressions, + }) } fn all_coverage_in_mir_body<'a, 'tcx>( body: &'a Body<'tcx>, -) -> impl Iterator<Item = &'a CoverageKind> + Captures<'tcx> { +) -> impl Iterator<Item = &'a CoverageKind> { body.basic_blocks.iter().flat_map(|bb_data| &bb_data.statements).filter_map(|statement| { match statement.kind { StatementKind::Coverage(ref kind) if !is_inlined(body, statement) => Some(kind), @@ -139,94 +165,3 @@ fn is_inlined(body: &Body<'_>, statement: &Statement<'_>) -> bool { let scope_data = &body.source_scopes[statement.source_info.scope]; scope_data.inlined.is_some() || scope_data.inlined_parent_scope.is_some() } - -/// Identify expressions that will always have a value of zero, and note their -/// IDs in a `DenseBitSet`. Mappings that refer to a zero expression can instead -/// become mappings to a constant zero value. -/// -/// This function mainly exists to preserve the simplifications that were -/// already being performed by the Rust-side expression renumbering, so that -/// the resulting coverage mappings don't get worse. -fn identify_zero_expressions( - fn_cov_info: &FunctionCoverageInfo, - counters_seen: &DenseBitSet<CounterId>, - expressions_seen: &DenseBitSet<ExpressionId>, -) -> DenseBitSet<ExpressionId> { - // The set of expressions that either were optimized out entirely, or - // have zero as both of their operands, and will therefore always have - // a value of zero. Other expressions that refer to these as operands - // can have those operands replaced with `CovTerm::Zero`. - let mut zero_expressions = DenseBitSet::new_empty(fn_cov_info.expressions.len()); - - // Simplify a copy of each expression based on lower-numbered expressions, - // and then update the set of always-zero expressions if necessary. - // (By construction, expressions can only refer to other expressions - // that have lower IDs, so one pass is sufficient.) - for (id, expression) in fn_cov_info.expressions.iter_enumerated() { - if !expressions_seen.contains(id) { - // If an expression was not seen, it must have been optimized away, - // so any operand that refers to it can be replaced with zero. - zero_expressions.insert(id); - continue; - } - - // We don't need to simplify the actual expression data in the - // expressions list; we can just simplify a temporary copy and then - // use that to update the set of always-zero expressions. - let Expression { mut lhs, op, mut rhs } = *expression; - - // If an expression has an operand that is also an expression, the - // operand's ID must be strictly lower. This is what lets us find - // all zero expressions in one pass. - let assert_operand_expression_is_lower = |operand_id: ExpressionId| { - assert!( - operand_id < id, - "Operand {operand_id:?} should be less than {id:?} in {expression:?}", - ) - }; - - // If an operand refers to a counter or expression that is always - // zero, then that operand can be replaced with `CovTerm::Zero`. - let maybe_set_operand_to_zero = |operand: &mut CovTerm| { - if let CovTerm::Expression(id) = *operand { - assert_operand_expression_is_lower(id); - } - - if is_zero_term(&counters_seen, &zero_expressions, *operand) { - *operand = CovTerm::Zero; - } - }; - maybe_set_operand_to_zero(&mut lhs); - maybe_set_operand_to_zero(&mut rhs); - - // Coverage counter values cannot be negative, so if an expression - // involves subtraction from zero, assume that its RHS must also be zero. - // (Do this after simplifications that could set the LHS to zero.) - if lhs == CovTerm::Zero && op == Op::Subtract { - rhs = CovTerm::Zero; - } - - // After the above simplifications, if both operands are zero, then - // we know that this expression is always zero too. - if lhs == CovTerm::Zero && rhs == CovTerm::Zero { - zero_expressions.insert(id); - } - } - - zero_expressions -} - -/// Returns `true` if the given term is known to have a value of zero, taking -/// into account knowledge of which counters are unused and which expressions -/// are always zero. -fn is_zero_term( - counters_seen: &DenseBitSet<CounterId>, - zero_expressions: &DenseBitSet<ExpressionId>, - term: CovTerm, -) -> bool { - match term { - CovTerm::Zero => true, - CovTerm::Counter(id) => !counters_seen.contains(id), - CovTerm::Expression(id) => zero_expressions.contains(id), - } -} diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs index 314a86ea52f..b9ed6984ddb 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans.rs @@ -1,6 +1,5 @@ use std::collections::VecDeque; -use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::FxHashSet; use rustc_middle::mir; use rustc_span::{DesugaringKind, ExpnKind, MacroKind, Span}; @@ -182,7 +181,7 @@ fn divide_spans_into_buckets(input_covspans: Vec<Covspan>, holes: &[Hole]) -> Ve fn drain_front_while<'a, T>( queue: &'a mut VecDeque<T>, mut pred_fn: impl FnMut(&T) -> bool, -) -> impl Iterator<Item = T> + Captures<'a> { +) -> impl Iterator<Item = T> { std::iter::from_fn(move || if pred_fn(queue.front()?) { queue.pop_front() } else { None }) } diff --git a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs index 26ce743be36..73b68d7155c 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs @@ -137,8 +137,7 @@ fn filtered_statement_span(statement: &Statement<'_>) -> Option<Span> { // These coverage statements should not exist prior to coverage instrumentation. StatementKind::Coverage( - CoverageKind::CounterIncrement { .. } - | CoverageKind::ExpressionUsed { .. } + CoverageKind::VirtualCounter { .. } | CoverageKind::CondBitmapUpdate { .. } | CoverageKind::TestVectorBitmapUpdate { .. }, ) => bug!( |
