about summary refs log tree commit diff
path: root/compiler/rustc_mir_build
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_mir_build')
-rw-r--r--compiler/rustc_mir_build/src/build/expr/stmt.rs39
-rw-r--r--compiler/rustc_mir_build/src/build/scope.rs86
-rw-r--r--compiler/rustc_mir_build/src/lints.rs16
3 files changed, 139 insertions, 2 deletions
diff --git a/compiler/rustc_mir_build/src/build/expr/stmt.rs b/compiler/rustc_mir_build/src/build/expr/stmt.rs
index 2bdeb579a02..88b76c46c90 100644
--- a/compiler/rustc_mir_build/src/build/expr/stmt.rs
+++ b/compiler/rustc_mir_build/src/build/expr/stmt.rs
@@ -2,7 +2,9 @@ use crate::build::scope::BreakableTarget;
 use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder};
 use rustc_middle::middle::region;
 use rustc_middle::mir::*;
+use rustc_middle::span_bug;
 use rustc_middle::thir::*;
+use rustc_span::source_map::Spanned;
 use tracing::debug;
 
 impl<'a, 'tcx> Builder<'a, 'tcx> {
@@ -91,9 +93,42 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
             ExprKind::Return { value } => {
                 this.break_scope(block, value, BreakableTarget::Return, source_info)
             }
-            // FIXME(explicit_tail_calls): properly lower tail calls here
             ExprKind::Become { value } => {
-                this.break_scope(block, Some(value), BreakableTarget::Return, source_info)
+                let v = &this.thir[value];
+                let ExprKind::Scope { value, lint_level, region_scope } = v.kind else {
+                    span_bug!(v.span, "`thir_check_tail_calls` should have disallowed this {v:?}")
+                };
+
+                let v = &this.thir[value];
+                let ExprKind::Call { ref args, fun, fn_span, .. } = v.kind else {
+                    span_bug!(v.span, "`thir_check_tail_calls` should have disallowed this {v:?}")
+                };
+
+                this.in_scope((region_scope, source_info), lint_level, |this| {
+                    let fun = unpack!(block = this.as_local_operand(block, fun));
+                    let args: Box<[_]> = args
+                        .into_iter()
+                        .copied()
+                        .map(|arg| Spanned {
+                            node: unpack!(block = this.as_local_call_operand(block, arg)),
+                            span: this.thir.exprs[arg].span,
+                        })
+                        .collect();
+
+                    this.record_operands_moved(&args);
+
+                    debug!("expr_into_dest: fn_span={:?}", fn_span);
+
+                    unpack!(block = this.break_for_tail_call(block, &args, source_info));
+
+                    this.cfg.terminate(
+                        block,
+                        source_info,
+                        TerminatorKind::TailCall { func: fun, args, fn_span },
+                    );
+
+                    this.cfg.start_new_block().unit()
+                })
             }
             _ => {
                 assert!(
diff --git a/compiler/rustc_mir_build/src/build/scope.rs b/compiler/rustc_mir_build/src/build/scope.rs
index 5b6de39bb2e..948301e2ece 100644
--- a/compiler/rustc_mir_build/src/build/scope.rs
+++ b/compiler/rustc_mir_build/src/build/scope.rs
@@ -745,6 +745,91 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
         self.cfg.terminate(block, source_info, TerminatorKind::UnwindResume);
     }
 
+    /// Sets up the drops for explict tail calls.
+    ///
+    /// Unlike other kinds of early exits, tail calls do not go through the drop tree.
+    /// Instead, all scheduled drops are immediately added to the CFG.
+    pub(crate) fn break_for_tail_call(
+        &mut self,
+        mut block: BasicBlock,
+        args: &[Spanned<Operand<'tcx>>],
+        source_info: SourceInfo,
+    ) -> BlockAnd<()> {
+        let arg_drops: Vec<_> = args
+            .iter()
+            .rev()
+            .filter_map(|arg| match &arg.node {
+                Operand::Copy(_) => bug!("copy op in tail call args"),
+                Operand::Move(place) => {
+                    let local =
+                        place.as_local().unwrap_or_else(|| bug!("projection in tail call args"));
+
+                    Some(DropData { source_info, local, kind: DropKind::Value })
+                }
+                Operand::Constant(_) => None,
+            })
+            .collect();
+
+        let mut unwind_to = self.diverge_cleanup_target(
+            self.scopes.scopes.iter().rev().nth(1).unwrap().region_scope,
+            DUMMY_SP,
+        );
+        let unwind_drops = &mut self.scopes.unwind_drops;
+
+        // the innermost scope contains only the destructors for the tail call arguments
+        // we only want to drop these in case of a panic, so we skip it
+        for scope in self.scopes.scopes[1..].iter().rev().skip(1) {
+            // FIXME(explicit_tail_calls) code duplication with `build_scope_drops`
+            for drop_data in scope.drops.iter().rev() {
+                let source_info = drop_data.source_info;
+                let local = drop_data.local;
+
+                match drop_data.kind {
+                    DropKind::Value => {
+                        // `unwind_to` should drop the value that we're about to
+                        // schedule. If dropping this value panics, then we continue
+                        // with the *next* value on the unwind path.
+                        debug_assert_eq!(unwind_drops.drops[unwind_to].data.local, drop_data.local);
+                        debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind);
+                        unwind_to = unwind_drops.drops[unwind_to].next;
+
+                        let mut unwind_entry_point = unwind_to;
+
+                        // the tail call arguments must be dropped if any of these drops panic
+                        for drop in arg_drops.iter().copied() {
+                            unwind_entry_point = unwind_drops.add_drop(drop, unwind_entry_point);
+                        }
+
+                        unwind_drops.add_entry_point(block, unwind_entry_point);
+
+                        let next = self.cfg.start_new_block();
+                        self.cfg.terminate(
+                            block,
+                            source_info,
+                            TerminatorKind::Drop {
+                                place: local.into(),
+                                target: next,
+                                unwind: UnwindAction::Continue,
+                                replace: false,
+                            },
+                        );
+                        block = next;
+                    }
+                    DropKind::Storage => {
+                        // Only temps and vars need their storage dead.
+                        assert!(local.index() > self.arg_count);
+                        self.cfg.push(
+                            block,
+                            Statement { source_info, kind: StatementKind::StorageDead(local) },
+                        );
+                    }
+                }
+            }
+        }
+
+        block.unit()
+    }
+
     fn leave_top_scope(&mut self, block: BasicBlock) -> BasicBlock {
         // If we are emitting a `drop` statement, we need to have the cached
         // diverge cleanup pads ready in case that drop panics.
@@ -1523,6 +1608,7 @@ impl<'tcx> DropTreeBuilder<'tcx> for Unwind {
             | TerminatorKind::UnwindResume
             | TerminatorKind::UnwindTerminate(_)
             | TerminatorKind::Return
+            | TerminatorKind::TailCall { .. }
             | TerminatorKind::Unreachable
             | TerminatorKind::Yield { .. }
             | TerminatorKind::CoroutineDrop
diff --git a/compiler/rustc_mir_build/src/lints.rs b/compiler/rustc_mir_build/src/lints.rs
index 1c7aa9f9ed0..263e777d03a 100644
--- a/compiler/rustc_mir_build/src/lints.rs
+++ b/compiler/rustc_mir_build/src/lints.rs
@@ -217,12 +217,28 @@ impl<'mir, 'tcx, C: TerminatorClassifier<'tcx>> TriColorVisitor<BasicBlocks<'tcx
             | TerminatorKind::FalseUnwind { .. }
             | TerminatorKind::Goto { .. }
             | TerminatorKind::SwitchInt { .. } => ControlFlow::Continue(()),
+
+            // Note that tail call terminator technically returns to the caller,
+            // but for purposes of this lint it makes sense to count it as possibly recursive,
+            // since it's still a call.
+            //
+            // If this'll be repurposed for something else, this might need to be changed.
+            TerminatorKind::TailCall { .. } => ControlFlow::Continue(()),
         }
     }
 
     fn node_settled(&mut self, bb: BasicBlock) -> ControlFlow<Self::BreakVal> {
         // When we examine a node for the last time, remember it if it is a recursive call.
         let terminator = self.body[bb].terminator();
+
+        // FIXME(explicit_tail_calls): highlight tail calls as "recursive call site"
+        //
+        // We don't want to lint functions that recurse only through tail calls
+        // (such as `fn g() { become () }`), so just adding `| TailCall { ... }`
+        // here won't work.
+        //
+        // But at the same time we would like to highlight both calls in a function like
+        // `fn f() { if false { become f() } else { f() } }`, so we need to figure something out.
         if self.classifier.is_recursive_terminator(self.tcx, self.body, terminator) {
             self.reachable_recursive_calls.push(terminator.source_info.span);
         }