about summary refs log tree commit diff
path: root/compiler/rustc_mir_transform/src
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2025-07-31 05:43:55 +0000
committerbors <bors@rust-lang.org>2025-07-31 05:43:55 +0000
commitcc0a5b73053c62a3df5f84b3ee85079c9b65fa87 (patch)
treefff3600fcabfaa5b18abf03493ccab74d9f5c1fc /compiler/rustc_mir_transform/src
parent32e7a4b92b109c24e9822c862a7c74436b50e564 (diff)
parent9c6a618a7b54eacad05e3d3659c61e3cbdc75963 (diff)
downloadrust-cc0a5b73053c62a3df5f84b3ee85079c9b65fa87.tar.gz
rust-cc0a5b73053c62a3df5f84b3ee85079c9b65fa87.zip
Auto merge of #144718 - Zalathar:rollup-76lrtf2, r=Zalathar
Rollup of 6 pull requests

Successful merges:

 - rust-lang/rust#135975 (Implement `push_mut`)
 - rust-lang/rust#143672 (Fix Box allocator drop elaboration)
 - rust-lang/rust#144232 (Implement support for `become` and explicit tail call codegen for the LLVM backend)
 - rust-lang/rust#144663 (coverage: Re-land "Enlarge empty spans during MIR instrumentation")
 - rust-lang/rust#144685 (Only extract lang items once in codegen_fn_attrs)
 - rust-lang/rust#144717 (Move `rustc_middle::parameterized`)

r? `@ghost`
`@rustbot` modify labels: rollup
Diffstat (limited to 'compiler/rustc_mir_transform/src')
-rw-r--r--compiler/rustc_mir_transform/src/coverage/spans.rs38
-rw-r--r--compiler/rustc_mir_transform/src/elaborate_drop.rs54
2 files changed, 64 insertions, 28 deletions
diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs
index ec76076020e..ddeae093df5 100644
--- a/compiler/rustc_mir_transform/src/coverage/spans.rs
+++ b/compiler/rustc_mir_transform/src/coverage/spans.rs
@@ -1,7 +1,8 @@
 use rustc_data_structures::fx::FxHashSet;
 use rustc_middle::mir;
 use rustc_middle::ty::TyCtxt;
-use rustc_span::{DesugaringKind, ExpnKind, MacroKind, Span};
+use rustc_span::source_map::SourceMap;
+use rustc_span::{BytePos, DesugaringKind, ExpnKind, MacroKind, Span};
 use tracing::instrument;
 
 use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph};
@@ -83,8 +84,18 @@ pub(super) fn extract_refined_covspans<'tcx>(
     // Discard any span that overlaps with a hole.
     discard_spans_overlapping_holes(&mut covspans, &holes);
 
-    // Perform more refinement steps after holes have been dealt with.
+    // Discard spans that overlap in unwanted ways.
     let mut covspans = remove_unwanted_overlapping_spans(covspans);
+
+    // For all empty spans, either enlarge them to be non-empty, or discard them.
+    let source_map = tcx.sess.source_map();
+    covspans.retain_mut(|covspan| {
+        let Some(span) = ensure_non_empty_span(source_map, covspan.span) else { return false };
+        covspan.span = span;
+        true
+    });
+
+    // Merge covspans that can be merged.
     covspans.dedup_by(|b, a| a.merge_if_eligible(b));
 
     code_mappings.extend(covspans.into_iter().map(|Covspan { span, bcb }| {
@@ -230,3 +241,26 @@ fn compare_spans(a: Span, b: Span) -> std::cmp::Ordering {
         // - Both have the same start and span A extends further right
         .then_with(|| Ord::cmp(&a.hi(), &b.hi()).reverse())
 }
+
+fn ensure_non_empty_span(source_map: &SourceMap, span: Span) -> Option<Span> {
+    if !span.is_empty() {
+        return Some(span);
+    }
+
+    // The span is empty, so try to enlarge it to cover an adjacent '{' or '}'.
+    source_map
+        .span_to_source(span, |src, start, end| try {
+            // Adjusting span endpoints by `BytePos(1)` is normally a bug,
+            // but in this case we have specifically checked that the character
+            // we're skipping over is one of two specific ASCII characters, so
+            // adjusting by exactly 1 byte is correct.
+            if src.as_bytes().get(end).copied() == Some(b'{') {
+                Some(span.with_hi(span.hi() + BytePos(1)))
+            } else if start > 0 && src.as_bytes()[start - 1] == b'}' {
+                Some(span.with_lo(span.lo() - BytePos(1)))
+            } else {
+                None
+            }
+        })
+        .ok()?
+}
diff --git a/compiler/rustc_mir_transform/src/elaborate_drop.rs b/compiler/rustc_mir_transform/src/elaborate_drop.rs
index de96b1f255a..df4853c1dcb 100644
--- a/compiler/rustc_mir_transform/src/elaborate_drop.rs
+++ b/compiler/rustc_mir_transform/src/elaborate_drop.rs
@@ -761,24 +761,37 @@ where
 
         let skip_contents = adt.is_union() || adt.is_manually_drop();
         let contents_drop = if skip_contents {
+            if adt.has_dtor(self.tcx()) && self.elaborator.get_drop_flag(self.path).is_some() {
+                // the top-level drop flag is usually cleared by open_drop_for_adt_contents
+                // types with destructors would still need an empty drop ladder to clear it
+
+                // however, these types are only open dropped in `DropShimElaborator`
+                // which does not have drop flags
+                // a future box-like "DerefMove" trait would allow for this case to happen
+                span_bug!(self.source_info.span, "open dropping partially moved union");
+            }
+
             (self.succ, self.unwind, self.dropline)
         } else {
             self.open_drop_for_adt_contents(adt, args)
         };
 
-        if adt.is_box() {
-            // we need to drop the inside of the box before running the destructor
-            let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
-            let unwind = contents_drop
-                .1
-                .map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
-            let dropline = contents_drop
-                .2
-                .map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
-
-            self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
-        } else if adt.has_dtor(self.tcx()) {
-            self.destructor_call_block(contents_drop)
+        if adt.has_dtor(self.tcx()) {
+            let destructor_block = if adt.is_box() {
+                // we need to drop the inside of the box before running the destructor
+                let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
+                let unwind = contents_drop
+                    .1
+                    .map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
+                let dropline = contents_drop
+                    .2
+                    .map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
+                self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
+            } else {
+                self.destructor_call_block(contents_drop)
+            };
+
+            self.drop_flag_test_block(destructor_block, contents_drop.0, contents_drop.1)
         } else {
             contents_drop.0
         }
@@ -982,12 +995,7 @@ where
             unwind.is_cleanup(),
         );
 
-        let destructor_block = self.elaborator.patch().new_block(result);
-
-        let block_start = Location { block: destructor_block, statement_index: 0 };
-        self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
-
-        self.drop_flag_test_block(destructor_block, succ, unwind)
+        self.elaborator.patch().new_block(result)
     }
 
     fn destructor_call_block(
@@ -1002,13 +1010,7 @@ where
             && !unwind.is_cleanup()
             && ty.is_async_drop(self.tcx(), self.elaborator.typing_env())
         {
-            let destructor_block =
-                self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true);
-
-            let block_start = Location { block: destructor_block, statement_index: 0 };
-            self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
-
-            self.drop_flag_test_block(destructor_block, succ, unwind)
+            self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true)
         } else {
             self.destructor_call_block_sync((succ, unwind))
         }