about summary refs log tree commit diff
path: root/compiler/rustc_mir_transform/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_mir_transform/src')
-rw-r--r--compiler/rustc_mir_transform/src/lib.rs15
-rw-r--r--compiler/rustc_mir_transform/src/promote_consts.rs98
2 files changed, 87 insertions, 26 deletions
diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs
index e477c068229..9bd5d1a2a97 100644
--- a/compiler/rustc_mir_transform/src/lib.rs
+++ b/compiler/rustc_mir_transform/src/lib.rs
@@ -333,13 +333,6 @@ fn mir_promoted(
         body.tainted_by_errors = Some(error_reported);
     }
 
-    let mut required_consts = Vec::new();
-    let mut required_consts_visitor = RequiredConstsVisitor::new(&mut required_consts);
-    for (bb, bb_data) in traversal::reverse_postorder(&body) {
-        required_consts_visitor.visit_basic_block_data(bb, bb_data);
-    }
-    body.required_consts = required_consts;
-
     // What we need to run borrowck etc.
     let promote_pass = promote_consts::PromoteTemps::default();
     pm::run_passes(
@@ -349,6 +342,14 @@ fn mir_promoted(
         Some(MirPhase::Analysis(AnalysisPhase::Initial)),
     );
 
+    // Promotion generates new consts; we run this after promotion to ensure they are accounted for.
+    let mut required_consts = Vec::new();
+    let mut required_consts_visitor = RequiredConstsVisitor::new(&mut required_consts);
+    for (bb, bb_data) in traversal::reverse_postorder(&body) {
+        required_consts_visitor.visit_basic_block_data(bb, bb_data);
+    }
+    body.required_consts = required_consts;
+
     let promoted = promote_pass.promoted_fragments.into_inner();
     (tcx.alloc_steal_mir(body), tcx.alloc_steal_promoted(promoted))
 }
diff --git a/compiler/rustc_mir_transform/src/promote_consts.rs b/compiler/rustc_mir_transform/src/promote_consts.rs
index c14d4ceb091..03ca322e4db 100644
--- a/compiler/rustc_mir_transform/src/promote_consts.rs
+++ b/compiler/rustc_mir_transform/src/promote_consts.rs
@@ -13,6 +13,7 @@
 //! move analysis runs after promotion on broken MIR.
 
 use either::{Left, Right};
+use rustc_data_structures::fx::FxHashSet;
 use rustc_hir as hir;
 use rustc_middle::mir;
 use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor};
@@ -175,6 +176,12 @@ fn collect_temps_and_candidates<'tcx>(
 struct Validator<'a, 'tcx> {
     ccx: &'a ConstCx<'a, 'tcx>,
     temps: &'a mut IndexSlice<Local, TempState>,
+    /// For backwards compatibility, we are promoting function calls in `const`/`static`
+    /// initializers. But we want to avoid evaluating code that might panic and that otherwise would
+    /// not have been evaluated, so we only promote such calls in basic blocks that are guaranteed
+    /// to execute. In other words, we only promote such calls in basic blocks that are definitely
+    /// not dead code. Here we cache the result of computing that set of basic blocks.
+    promotion_safe_blocks: Option<FxHashSet<BasicBlock>>,
 }
 
 impl<'a, 'tcx> std::ops::Deref for Validator<'a, 'tcx> {
@@ -260,7 +267,9 @@ impl<'tcx> Validator<'_, 'tcx> {
                     self.validate_rvalue(rhs)
                 }
                 Right(terminator) => match &terminator.kind {
-                    TerminatorKind::Call { func, args, .. } => self.validate_call(func, args),
+                    TerminatorKind::Call { func, args, .. } => {
+                        self.validate_call(func, args, loc.block)
+                    }
                     TerminatorKind::Yield { .. } => Err(Unpromotable),
                     kind => {
                         span_bug!(terminator.source_info.span, "{:?} not promotable", kind);
@@ -588,29 +597,79 @@ impl<'tcx> Validator<'_, 'tcx> {
         Ok(())
     }
 
+    /// Computes the sets of blocks of this MIR that are definitely going to be executed
+    /// if the function returns successfully. That makes it safe to promote calls in them
+    /// that might fail.
+    fn promotion_safe_blocks(body: &mir::Body<'tcx>) -> FxHashSet<BasicBlock> {
+        let mut safe_blocks = FxHashSet::default();
+        let mut safe_block = START_BLOCK;
+        loop {
+            safe_blocks.insert(safe_block);
+            // Let's see if we can find another safe block.
+            safe_block = match body.basic_blocks[safe_block].terminator().kind {
+                TerminatorKind::Goto { target } => target,
+                TerminatorKind::Call { target: Some(target), .. }
+                | TerminatorKind::Drop { target, .. } => {
+                    // This calls a function or the destructor. `target` does not get executed if
+                    // the callee loops or panics. But in both cases the const already fails to
+                    // evaluate, so we are fine considering `target` a safe block for promotion.
+                    target
+                }
+                TerminatorKind::Assert { target, .. } => {
+                    // Similar to above, we only consider successful execution.
+                    target
+                }
+                _ => {
+                    // No next safe block.
+                    break;
+                }
+            };
+        }
+        safe_blocks
+    }
+
+    /// Returns whether the block is "safe" for promotion, which means it cannot be dead code.
+    /// We use this to avoid promoting operations that can fail in dead code.
+    fn is_promotion_safe_block(&mut self, block: BasicBlock) -> bool {
+        let body = self.body;
+        let safe_blocks =
+            self.promotion_safe_blocks.get_or_insert_with(|| Self::promotion_safe_blocks(body));
+        safe_blocks.contains(&block)
+    }
+
     fn validate_call(
         &mut self,
         callee: &Operand<'tcx>,
         args: &[Spanned<Operand<'tcx>>],
+        block: BasicBlock,
     ) -> Result<(), Unpromotable> {
+        // Validate the operands. If they fail, there's no question -- we cannot promote.
+        self.validate_operand(callee)?;
+        for arg in args {
+            self.validate_operand(&arg.node)?;
+        }
+
+        // Functions marked `#[rustc_promotable]` are explicitly allowed to be promoted, so we can
+        // accept them at this point.
         let fn_ty = callee.ty(self.body, self.tcx);
+        if let ty::FnDef(def_id, _) = *fn_ty.kind() {
+            if self.tcx.is_promotable_const_fn(def_id) {
+                return Ok(());
+            }
+        }
 
-        // Inside const/static items, we promote all (eligible) function calls.
-        // Everywhere else, we require `#[rustc_promotable]` on the callee.
-        let promote_all_const_fn = matches!(
+        // Ideally, we'd stop here and reject the rest.
+        // But for backward compatibility, we have to accept some promotion in const/static
+        // initializers. Inline consts are explicitly excluded, they are more recent so we have no
+        // backwards compatibility reason to allow more promotion inside of them.
+        let promote_all_fn = matches!(
             self.const_kind,
             Some(hir::ConstContext::Static(_) | hir::ConstContext::Const { inline: false })
         );
-        if !promote_all_const_fn {
-            if let ty::FnDef(def_id, _) = *fn_ty.kind() {
-                // Never promote runtime `const fn` calls of
-                // functions without `#[rustc_promotable]`.
-                if !self.tcx.is_promotable_const_fn(def_id) {
-                    return Err(Unpromotable);
-                }
-            }
+        if !promote_all_fn {
+            return Err(Unpromotable);
         }
-
+        // Make sure the callee is a `const fn`.
         let is_const_fn = match *fn_ty.kind() {
             ty::FnDef(def_id, _) => self.tcx.is_const_fn_raw(def_id),
             _ => false,
@@ -618,12 +677,13 @@ impl<'tcx> Validator<'_, 'tcx> {
         if !is_const_fn {
             return Err(Unpromotable);
         }
-
-        self.validate_operand(callee)?;
-        for arg in args {
-            self.validate_operand(&arg.node)?;
+        // The problem is, this may promote calls to functions that panic.
+        // We don't want to introduce compilation errors if there's a panic in a call in dead code.
+        // So we ensure that this is not dead code.
+        if !self.is_promotion_safe_block(block) {
+            return Err(Unpromotable);
         }
-
+        // This passed all checks, so let's accept.
         Ok(())
     }
 }
@@ -634,7 +694,7 @@ fn validate_candidates(
     temps: &mut IndexSlice<Local, TempState>,
     candidates: &[Candidate],
 ) -> Vec<Candidate> {
-    let mut validator = Validator { ccx, temps };
+    let mut validator = Validator { ccx, temps, promotion_safe_blocks: None };
 
     candidates
         .iter()