about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--compiler/rustc_codegen_gcc/messages.ftl2
-rw-r--r--compiler/rustc_codegen_gcc/src/builder.rs15
-rw-r--r--compiler/rustc_codegen_gcc/src/errors.rs4
-rw-r--r--compiler/rustc_codegen_llvm/src/builder.rs25
-rw-r--r--compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/spans.rs28
-rw-r--r--compiler/rustc_codegen_llvm/src/llvm/ffi.rs11
-rw-r--r--compiler/rustc_codegen_ssa/src/codegen_attrs.rs8
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/block.rs73
-rw-r--r--compiler/rustc_codegen_ssa/src/traits/builder.rs12
-rw-r--r--compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp26
-rw-r--r--compiler/rustc_metadata/src/rmeta/def_path_hash_map.rs5
-rw-r--r--compiler/rustc_metadata/src/rmeta/mod.rs31
-rw-r--r--compiler/rustc_metadata/src/rmeta/parameterized.rs166
-rw-r--r--compiler/rustc_middle/src/ty/mod.rs2
-rw-r--r--compiler/rustc_middle/src/ty/parameterized.rs155
-rw-r--r--compiler/rustc_mir_transform/src/coverage/spans.rs38
-rw-r--r--compiler/rustc_mir_transform/src/elaborate_drop.rs54
-rw-r--r--library/alloc/src/collections/linked_list.rs54
-rw-r--r--library/alloc/src/collections/vec_deque/mod.rs96
-rw-r--r--library/alloc/src/vec/mod.rs122
-rw-r--r--tests/codegen-llvm/become-musttail.rs18
-rw-r--r--tests/coverage/async_closure.cov-map21
-rw-r--r--tests/coverage/try-in-macro.attr.cov-map9
-rw-r--r--tests/coverage/try-in-macro.bang.cov-map9
-rw-r--r--tests/coverage/try-in-macro.derive.cov-map9
-rw-r--r--tests/mir-opt/box_conditional_drop_allocator.main.ElaborateDrops.diff186
-rw-r--r--tests/mir-opt/box_conditional_drop_allocator.rs39
-rw-r--r--tests/mir-opt/coverage/branch_match_arms.main.InstrumentCoverage.diff2
-rw-r--r--tests/mir-opt/coverage/instrument_coverage.bar.InstrumentCoverage.diff2
-rw-r--r--tests/mir-opt/coverage/instrument_coverage.main.InstrumentCoverage.diff4
-rw-r--r--tests/mir-opt/coverage/instrument_coverage_cleanup.main.CleanupPostBorrowck.diff4
-rw-r--r--tests/mir-opt/coverage/instrument_coverage_cleanup.main.InstrumentCoverage.diff4
-rw-r--r--tests/ui/async-await/async-drop/async-drop-box-allocator.rs134
-rw-r--r--tests/ui/async-await/async-drop/async-drop-box-allocator.run.stdout6
-rw-r--r--tests/ui/async-await/async-drop/async-drop-box.rs109
-rw-r--r--tests/ui/async-await/async-drop/async-drop-box.run.stdout6
-rw-r--r--tests/ui/drop/box-conditional-drop-allocator.rs43
-rw-r--r--tests/ui/explicit-tail-calls/recursion-etc.rs17
38 files changed, 1215 insertions, 334 deletions
diff --git a/compiler/rustc_codegen_gcc/messages.ftl b/compiler/rustc_codegen_gcc/messages.ftl
index a70ac08f01a..b9b77b7d18c 100644
--- a/compiler/rustc_codegen_gcc/messages.ftl
+++ b/compiler/rustc_codegen_gcc/messages.ftl
@@ -4,3 +4,5 @@ codegen_gcc_unwinding_inline_asm =
 codegen_gcc_copy_bitcode = failed to copy bitcode to object file: {$err}
 
 codegen_gcc_lto_bitcode_from_rlib = failed to get bitcode from object file for LTO ({$gcc_err})
+
+codegen_gcc_explicit_tail_calls_unsupported = explicit tail calls with the 'become' keyword are not implemented in the GCC backend
diff --git a/compiler/rustc_codegen_gcc/src/builder.rs b/compiler/rustc_codegen_gcc/src/builder.rs
index a4ec4bf8dea..4aee211e2ef 100644
--- a/compiler/rustc_codegen_gcc/src/builder.rs
+++ b/compiler/rustc_codegen_gcc/src/builder.rs
@@ -34,6 +34,7 @@ use rustc_target::spec::{HasTargetSpec, HasX86AbiOpt, Target, X86Abi};
 
 use crate::common::{SignType, TypeReflection, type_is_pointer};
 use crate::context::CodegenCx;
+use crate::errors;
 use crate::intrinsic::llvm;
 use crate::type_of::LayoutGccExt;
 
@@ -1742,6 +1743,20 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
         call
     }
 
+    fn tail_call(
+        &mut self,
+        _llty: Self::Type,
+        _fn_attrs: Option<&CodegenFnAttrs>,
+        _fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
+        _llfn: Self::Value,
+        _args: &[Self::Value],
+        _funclet: Option<&Self::Funclet>,
+        _instance: Option<Instance<'tcx>>,
+    ) {
+        // FIXME: implement support for explicit tail calls like rustc_codegen_llvm.
+        self.tcx.dcx().emit_fatal(errors::ExplicitTailCallsUnsupported);
+    }
+
     fn zext(&mut self, value: RValue<'gcc>, dest_typ: Type<'gcc>) -> RValue<'gcc> {
         // FIXME(antoyo): this does not zero-extend.
         self.gcc_int_cast(value, dest_typ)
diff --git a/compiler/rustc_codegen_gcc/src/errors.rs b/compiler/rustc_codegen_gcc/src/errors.rs
index 0aa16bd88b4..b252c39c0c0 100644
--- a/compiler/rustc_codegen_gcc/src/errors.rs
+++ b/compiler/rustc_codegen_gcc/src/errors.rs
@@ -19,3 +19,7 @@ pub(crate) struct CopyBitcode {
 pub(crate) struct LtoBitcodeFromRlib {
     pub gcc_err: String,
 }
+
+#[derive(Diagnostic)]
+#[diag(codegen_gcc_explicit_tail_calls_unsupported)]
+pub(crate) struct ExplicitTailCallsUnsupported;
diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs
index f712b3b83fa..da2a153d819 100644
--- a/compiler/rustc_codegen_llvm/src/builder.rs
+++ b/compiler/rustc_codegen_llvm/src/builder.rs
@@ -15,6 +15,7 @@ use rustc_codegen_ssa::mir::place::PlaceRef;
 use rustc_codegen_ssa::traits::*;
 use rustc_data_structures::small_c_str::SmallCStr;
 use rustc_hir::def_id::DefId;
+use rustc_middle::bug;
 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
 use rustc_middle::ty::layout::{
     FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasTypingEnv, LayoutError, LayoutOfHelpers,
@@ -24,7 +25,7 @@ use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
 use rustc_sanitizers::{cfi, kcfi};
 use rustc_session::config::OptLevel;
 use rustc_span::Span;
-use rustc_target::callconv::FnAbi;
+use rustc_target::callconv::{FnAbi, PassMode};
 use rustc_target::spec::{HasTargetSpec, SanitizerSet, Target};
 use smallvec::SmallVec;
 use tracing::{debug, instrument};
@@ -1431,6 +1432,28 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
         call
     }
 
+    fn tail_call(
+        &mut self,
+        llty: Self::Type,
+        fn_attrs: Option<&CodegenFnAttrs>,
+        fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
+        llfn: Self::Value,
+        args: &[Self::Value],
+        funclet: Option<&Self::Funclet>,
+        instance: Option<Instance<'tcx>>,
+    ) {
+        let call = self.call(llty, fn_attrs, Some(fn_abi), llfn, args, funclet, instance);
+        llvm::LLVMRustSetTailCallKind(call, llvm::TailCallKind::MustTail);
+
+        match &fn_abi.ret.mode {
+            PassMode::Ignore | PassMode::Indirect { .. } => self.ret_void(),
+            PassMode::Direct(_) | PassMode::Pair { .. } => self.ret(call),
+            mode @ PassMode::Cast { .. } => {
+                bug!("Encountered `PassMode::{mode:?}` during codegen")
+            }
+        }
+    }
+
     fn zext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
         unsafe { llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, UNNAMED) }
     }
diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/spans.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/spans.rs
index 39a59560c9d..574463be7ff 100644
--- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/spans.rs
+++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/spans.rs
@@ -39,7 +39,10 @@ impl Coords {
 /// or other expansions), and if it does happen then skipping a span or function is
 /// better than an ICE or `llvm-cov` failure that the user might have no way to avoid.
 pub(crate) fn make_coords(source_map: &SourceMap, file: &SourceFile, span: Span) -> Option<Coords> {
-    let span = ensure_non_empty_span(source_map, span)?;
+    if span.is_empty() {
+        debug_assert!(false, "can't make coords from empty span: {span:?}");
+        return None;
+    }
 
     let lo = span.lo();
     let hi = span.hi();
@@ -70,29 +73,6 @@ pub(crate) fn make_coords(source_map: &SourceMap, file: &SourceFile, span: Span)
     })
 }
 
-fn ensure_non_empty_span(source_map: &SourceMap, span: Span) -> Option<Span> {
-    if !span.is_empty() {
-        return Some(span);
-    }
-
-    // The span is empty, so try to enlarge it to cover an adjacent '{' or '}'.
-    source_map
-        .span_to_source(span, |src, start, end| try {
-            // Adjusting span endpoints by `BytePos(1)` is normally a bug,
-            // but in this case we have specifically checked that the character
-            // we're skipping over is one of two specific ASCII characters, so
-            // adjusting by exactly 1 byte is correct.
-            if src.as_bytes().get(end).copied() == Some(b'{') {
-                Some(span.with_hi(span.hi() + BytePos(1)))
-            } else if start > 0 && src.as_bytes()[start - 1] == b'}' {
-                Some(span.with_lo(span.lo() - BytePos(1)))
-            } else {
-                None
-            }
-        })
-        .ok()?
-}
-
 /// If `llvm-cov` sees a source region that is improperly ordered (end < start),
 /// it will immediately exit with a fatal error. To prevent that from happening,
 /// discard regions that are improperly ordered, or might be interpreted in a
diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
index 0d0cb5f139e..2443194ff48 100644
--- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
+++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
@@ -97,6 +97,16 @@ pub(crate) enum ModuleFlagMergeBehavior {
 
 // Consts for the LLVM CallConv type, pre-cast to usize.
 
+#[derive(Copy, Clone, PartialEq, Debug)]
+#[repr(C)]
+#[allow(dead_code)]
+pub(crate) enum TailCallKind {
+    None = 0,
+    Tail = 1,
+    MustTail = 2,
+    NoTail = 3,
+}
+
 /// LLVM CallingConv::ID. Should we wrap this?
 ///
 /// See <https://github.com/llvm/llvm-project/blob/main/llvm/include/llvm/IR/CallingConv.h>
@@ -1186,6 +1196,7 @@ unsafe extern "C" {
     pub(crate) safe fn LLVMIsGlobalConstant(GlobalVar: &Value) -> Bool;
     pub(crate) safe fn LLVMSetGlobalConstant(GlobalVar: &Value, IsConstant: Bool);
     pub(crate) safe fn LLVMSetTailCall(CallInst: &Value, IsTailCall: Bool);
+    pub(crate) safe fn LLVMRustSetTailCallKind(CallInst: &Value, Kind: TailCallKind);
 
     // Operations on attributes
     pub(crate) fn LLVMCreateStringAttribute(
diff --git a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs
index 5c54bce6e03..78edc69b237 100644
--- a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs
+++ b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs
@@ -527,14 +527,14 @@ fn handle_lang_items(
     attrs: &[Attribute],
     codegen_fn_attrs: &mut CodegenFnAttrs,
 ) {
+    let lang_item = lang_items::extract(attrs).and_then(|(name, _)| LangItem::from_name(name));
+
     // Weak lang items have the same semantics as "std internal" symbols in the
     // sense that they're preserved through all our LTO passes and only
     // strippable by the linker.
     //
     // Additionally weak lang items have predetermined symbol names.
-    if let Some((name, _)) = lang_items::extract(attrs)
-        && let Some(lang_item) = LangItem::from_name(name)
-    {
+    if let Some(lang_item) = lang_item {
         if WEAK_LANG_ITEMS.contains(&lang_item) {
             codegen_fn_attrs.flags |= CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL;
         }
@@ -548,8 +548,6 @@ fn handle_lang_items(
     if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL)
         && codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE)
     {
-        let lang_item =
-            lang_items::extract(attrs).map_or(None, |(name, _span)| LangItem::from_name(name));
         let mut err = tcx
             .dcx()
             .struct_span_err(
diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs
index bde63fd501a..e96590441fa 100644
--- a/compiler/rustc_codegen_ssa/src/mir/block.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/block.rs
@@ -35,6 +35,14 @@ enum MergingSucc {
     True,
 }
 
+/// Indicates to the call terminator codegen whether a cal
+/// is a normal call or an explicit tail call.
+#[derive(Debug, PartialEq)]
+enum CallKind {
+    Normal,
+    Tail,
+}
+
 /// Used by `FunctionCx::codegen_terminator` for emitting common patterns
 /// e.g., creating a basic block, calling a function, etc.
 struct TerminatorCodegenHelper<'tcx> {
@@ -160,6 +168,7 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
         mut unwind: mir::UnwindAction,
         lifetime_ends_after_call: &[(Bx::Value, Size)],
         instance: Option<Instance<'tcx>>,
+        kind: CallKind,
         mergeable_succ: bool,
     ) -> MergingSucc {
         let tcx = bx.tcx();
@@ -221,6 +230,11 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
             }
         };
 
+        if kind == CallKind::Tail {
+            bx.tail_call(fn_ty, fn_attrs, fn_abi, fn_ptr, llargs, self.funclet(fx), instance);
+            return MergingSucc::False;
+        }
+
         if let Some(unwind_block) = unwind_block {
             let ret_llbb = if let Some((_, target)) = destination {
                 fx.llbb(target)
@@ -659,6 +673,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
             unwind,
             &[],
             Some(drop_instance),
+            CallKind::Normal,
             !maybe_null && mergeable_succ,
         )
     }
@@ -747,8 +762,19 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
         let (fn_abi, llfn, instance) = common::build_langcall(bx, span, lang_item);
 
         // Codegen the actual panic invoke/call.
-        let merging_succ =
-            helper.do_call(self, bx, fn_abi, llfn, &args, None, unwind, &[], Some(instance), false);
+        let merging_succ = helper.do_call(
+            self,
+            bx,
+            fn_abi,
+            llfn,
+            &args,
+            None,
+            unwind,
+            &[],
+            Some(instance),
+            CallKind::Normal,
+            false,
+        );
         assert_eq!(merging_succ, MergingSucc::False);
         MergingSucc::False
     }
@@ -777,6 +803,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
             mir::UnwindAction::Unreachable,
             &[],
             Some(instance),
+            CallKind::Normal,
             false,
         );
         assert_eq!(merging_succ, MergingSucc::False);
@@ -845,6 +872,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
             unwind,
             &[],
             Some(instance),
+            CallKind::Normal,
             mergeable_succ,
         ))
     }
@@ -860,6 +888,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
         target: Option<mir::BasicBlock>,
         unwind: mir::UnwindAction,
         fn_span: Span,
+        kind: CallKind,
         mergeable_succ: bool,
     ) -> MergingSucc {
         let source_info = mir::SourceInfo { span: fn_span, ..terminator.source_info };
@@ -1003,8 +1032,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
         // We still need to call `make_return_dest` even if there's no `target`, since
         // `fn_abi.ret` could be `PassMode::Indirect`, even if it is uninhabited,
         // and `make_return_dest` adds the return-place indirect pointer to `llargs`.
-        let return_dest = self.make_return_dest(bx, destination, &fn_abi.ret, &mut llargs);
-        let destination = target.map(|target| (return_dest, target));
+        let destination = match kind {
+            CallKind::Normal => {
+                let return_dest = self.make_return_dest(bx, destination, &fn_abi.ret, &mut llargs);
+                target.map(|target| (return_dest, target))
+            }
+            CallKind::Tail => None,
+        };
 
         // Split the rust-call tupled arguments off.
         let (first_args, untuple) = if sig.abi() == ExternAbi::RustCall
@@ -1020,6 +1054,14 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
         // to generate `lifetime_end` when the call returns.
         let mut lifetime_ends_after_call: Vec<(Bx::Value, Size)> = Vec::new();
         'make_args: for (i, arg) in first_args.iter().enumerate() {
+            if kind == CallKind::Tail && matches!(fn_abi.args[i].mode, PassMode::Indirect { .. }) {
+                // FIXME: https://github.com/rust-lang/rust/pull/144232#discussion_r2218543841
+                span_bug!(
+                    fn_span,
+                    "arguments using PassMode::Indirect are currently not supported for tail calls"
+                );
+            }
+
             let mut op = self.codegen_operand(bx, &arg.node);
 
             if let (0, Some(ty::InstanceKind::Virtual(_, idx))) = (i, instance.map(|i| i.def)) {
@@ -1147,6 +1189,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
             unwind,
             &lifetime_ends_after_call,
             instance,
+            kind,
             mergeable_succ,
         )
     }
@@ -1388,15 +1431,23 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                 target,
                 unwind,
                 fn_span,
+                CallKind::Normal,
                 mergeable_succ(),
             ),
-            mir::TerminatorKind::TailCall { .. } => {
-                // FIXME(explicit_tail_calls): implement tail calls in ssa backend
-                span_bug!(
-                    terminator.source_info.span,
-                    "`TailCall` terminator is not yet supported by `rustc_codegen_ssa`"
-                )
-            }
+            mir::TerminatorKind::TailCall { ref func, ref args, fn_span } => self
+                .codegen_call_terminator(
+                    helper,
+                    bx,
+                    terminator,
+                    func,
+                    args,
+                    mir::Place::from(mir::RETURN_PLACE),
+                    None,
+                    mir::UnwindAction::Unreachable,
+                    fn_span,
+                    CallKind::Tail,
+                    mergeable_succ(),
+                ),
             mir::TerminatorKind::CoroutineDrop | mir::TerminatorKind::Yield { .. } => {
                 bug!("coroutine ops in codegen")
             }
diff --git a/compiler/rustc_codegen_ssa/src/traits/builder.rs b/compiler/rustc_codegen_ssa/src/traits/builder.rs
index 979456a6ba7..4b18146863b 100644
--- a/compiler/rustc_codegen_ssa/src/traits/builder.rs
+++ b/compiler/rustc_codegen_ssa/src/traits/builder.rs
@@ -595,6 +595,18 @@ pub trait BuilderMethods<'a, 'tcx>:
         funclet: Option<&Self::Funclet>,
         instance: Option<Instance<'tcx>>,
     ) -> Self::Value;
+
+    fn tail_call(
+        &mut self,
+        llty: Self::Type,
+        fn_attrs: Option<&CodegenFnAttrs>,
+        fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
+        llfn: Self::Value,
+        args: &[Self::Value],
+        funclet: Option<&Self::Funclet>,
+        instance: Option<Instance<'tcx>>,
+    );
+
     fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
 
     fn apply_attrs_to_cleanup_callsite(&mut self, llret: Self::Value);
diff --git a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
index c9814beedd6..588d867bbbf 100644
--- a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
+++ b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
@@ -1986,3 +1986,29 @@ extern "C" void LLVMRustSetNoSanitizeHWAddress(LLVMValueRef Global) {
   MD.NoHWAddress = true;
   GV.setSanitizerMetadata(MD);
 }
+
+enum class LLVMRustTailCallKind {
+  None = 0,
+  Tail = 1,
+  MustTail = 2,
+  NoTail = 3
+};
+
+extern "C" void LLVMRustSetTailCallKind(LLVMValueRef Call,
+                                        LLVMRustTailCallKind Kind) {
+  CallInst *CI = unwrap<CallInst>(Call);
+  switch (Kind) {
+  case LLVMRustTailCallKind::None:
+    CI->setTailCallKind(CallInst::TCK_None);
+    break;
+  case LLVMRustTailCallKind::Tail:
+    CI->setTailCallKind(CallInst::TCK_Tail);
+    break;
+  case LLVMRustTailCallKind::MustTail:
+    CI->setTailCallKind(CallInst::TCK_MustTail);
+    break;
+  case LLVMRustTailCallKind::NoTail:
+    CI->setTailCallKind(CallInst::TCK_NoTail);
+    break;
+  }
+}
diff --git a/compiler/rustc_metadata/src/rmeta/def_path_hash_map.rs b/compiler/rustc_metadata/src/rmeta/def_path_hash_map.rs
index 19369425f81..f3917b55782 100644
--- a/compiler/rustc_metadata/src/rmeta/def_path_hash_map.rs
+++ b/compiler/rustc_metadata/src/rmeta/def_path_hash_map.rs
@@ -1,6 +1,5 @@
 use rustc_data_structures::owned_slice::OwnedSlice;
 use rustc_hir::def_path_hash_map::{Config as HashMapConfig, DefPathHashMap};
-use rustc_middle::parameterized_over_tcx;
 use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
 use rustc_span::def_id::{DefIndex, DefPathHash};
 
@@ -11,10 +10,6 @@ pub(crate) enum DefPathHashMapRef<'tcx> {
     BorrowedFromTcx(&'tcx DefPathHashMap),
 }
 
-parameterized_over_tcx! {
-    DefPathHashMapRef,
-}
-
 impl DefPathHashMapRef<'_> {
     #[inline]
     pub(crate) fn def_path_hash_to_def_index(&self, def_path_hash: &DefPathHash) -> DefIndex {
diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs
index 915c9731688..9de5e12d5bb 100644
--- a/compiler/rustc_metadata/src/rmeta/mod.rs
+++ b/compiler/rustc_metadata/src/rmeta/mod.rs
@@ -6,6 +6,7 @@ use decoder::{DecodeContext, Metadata};
 use def_path_hash_map::DefPathHashMapRef;
 use encoder::EncodeContext;
 pub use encoder::{EncodedMetadata, encode_metadata, rendered_const};
+pub(crate) use parameterized::ParameterizedOverTcx;
 use rustc_abi::{FieldIdx, ReprOptions, VariantIdx};
 use rustc_attr_data_structures::StrippedCfgItem;
 use rustc_data_structures::fx::FxHashMap;
@@ -26,12 +27,10 @@ use rustc_middle::middle::debugger_visualizer::DebuggerVisualizerFile;
 use rustc_middle::middle::exported_symbols::{ExportedSymbol, SymbolExportInfo};
 use rustc_middle::middle::lib_features::FeatureStability;
 use rustc_middle::middle::resolve_bound_vars::ObjectLifetimeDefault;
+use rustc_middle::mir;
 use rustc_middle::ty::fast_reject::SimplifiedType;
-use rustc_middle::ty::{
-    self, DeducedParamAttrs, ParameterizedOverTcx, Ty, TyCtxt, UnusedGenericParams,
-};
+use rustc_middle::ty::{self, DeducedParamAttrs, Ty, TyCtxt, UnusedGenericParams};
 use rustc_middle::util::Providers;
-use rustc_middle::{mir, trivially_parameterized_over_tcx};
 use rustc_serialize::opaque::FileEncoder;
 use rustc_session::config::{SymbolManglingVersion, TargetModifier};
 use rustc_session::cstore::{CrateDepKind, ForeignModule, LinkagePreference, NativeLib};
@@ -47,6 +46,7 @@ use crate::creader::CrateMetadataRef;
 mod decoder;
 mod def_path_hash_map;
 mod encoder;
+mod parameterized;
 mod table;
 
 pub(crate) fn rustc_version(cfg_version: &'static str) -> String {
@@ -86,10 +86,6 @@ struct LazyValue<T> {
     _marker: PhantomData<fn() -> T>,
 }
 
-impl<T: ParameterizedOverTcx> ParameterizedOverTcx for LazyValue<T> {
-    type Value<'tcx> = LazyValue<T::Value<'tcx>>;
-}
-
 impl<T> LazyValue<T> {
     fn from_position(position: NonZero<usize>) -> LazyValue<T> {
         LazyValue { position, _marker: PhantomData }
@@ -112,10 +108,6 @@ struct LazyArray<T> {
     _marker: PhantomData<fn() -> T>,
 }
 
-impl<T: ParameterizedOverTcx> ParameterizedOverTcx for LazyArray<T> {
-    type Value<'tcx> = LazyArray<T::Value<'tcx>>;
-}
-
 impl<T> Default for LazyArray<T> {
     fn default() -> LazyArray<T> {
         LazyArray::from_position_and_num_elems(NonZero::new(1).unwrap(), 0)
@@ -143,10 +135,6 @@ struct LazyTable<I, T> {
     _marker: PhantomData<fn(I) -> T>,
 }
 
-impl<I: 'static, T: ParameterizedOverTcx> ParameterizedOverTcx for LazyTable<I, T> {
-    type Value<'tcx> = LazyTable<I, T::Value<'tcx>>;
-}
-
 impl<I, T> LazyTable<I, T> {
     fn from_position_and_encoded_size(
         position: NonZero<usize>,
@@ -594,14 +582,3 @@ pub fn provide(providers: &mut Providers) {
     encoder::provide(providers);
     decoder::provide(providers);
 }
-
-trivially_parameterized_over_tcx! {
-    VariantData,
-    RawDefId,
-    TraitImpls,
-    IncoherentImpls,
-    CrateHeader,
-    CrateRoot,
-    CrateDep,
-    AttrFlags,
-}
diff --git a/compiler/rustc_metadata/src/rmeta/parameterized.rs b/compiler/rustc_metadata/src/rmeta/parameterized.rs
new file mode 100644
index 00000000000..7cd399baa42
--- /dev/null
+++ b/compiler/rustc_metadata/src/rmeta/parameterized.rs
@@ -0,0 +1,166 @@
+use std::hash::Hash;
+
+use rustc_data_structures::unord::UnordMap;
+use rustc_hir::def_id::DefIndex;
+use rustc_index::{Idx, IndexVec};
+use rustc_middle::ty::{Binder, EarlyBinder};
+use rustc_span::Symbol;
+
+use crate::rmeta::{LazyArray, LazyTable, LazyValue};
+
+pub(crate) trait ParameterizedOverTcx: 'static {
+    type Value<'tcx>;
+}
+
+impl<T: ParameterizedOverTcx> ParameterizedOverTcx for Option<T> {
+    type Value<'tcx> = Option<T::Value<'tcx>>;
+}
+
+impl<A: ParameterizedOverTcx, B: ParameterizedOverTcx> ParameterizedOverTcx for (A, B) {
+    type Value<'tcx> = (A::Value<'tcx>, B::Value<'tcx>);
+}
+
+impl<T: ParameterizedOverTcx> ParameterizedOverTcx for Vec<T> {
+    type Value<'tcx> = Vec<T::Value<'tcx>>;
+}
+
+impl<I: Idx + 'static, T: ParameterizedOverTcx> ParameterizedOverTcx for IndexVec<I, T> {
+    type Value<'tcx> = IndexVec<I, T::Value<'tcx>>;
+}
+
+impl<I: Hash + Eq + 'static, T: ParameterizedOverTcx> ParameterizedOverTcx for UnordMap<I, T> {
+    type Value<'tcx> = UnordMap<I, T::Value<'tcx>>;
+}
+
+impl<T: ParameterizedOverTcx> ParameterizedOverTcx for Binder<'static, T> {
+    type Value<'tcx> = Binder<'tcx, T::Value<'tcx>>;
+}
+
+impl<T: ParameterizedOverTcx> ParameterizedOverTcx for EarlyBinder<'static, T> {
+    type Value<'tcx> = EarlyBinder<'tcx, T::Value<'tcx>>;
+}
+
+impl<T: ParameterizedOverTcx> ParameterizedOverTcx for LazyValue<T> {
+    type Value<'tcx> = LazyValue<T::Value<'tcx>>;
+}
+
+impl<T: ParameterizedOverTcx> ParameterizedOverTcx for LazyArray<T> {
+    type Value<'tcx> = LazyArray<T::Value<'tcx>>;
+}
+
+impl<I: 'static, T: ParameterizedOverTcx> ParameterizedOverTcx for LazyTable<I, T> {
+    type Value<'tcx> = LazyTable<I, T::Value<'tcx>>;
+}
+
+macro_rules! trivially_parameterized_over_tcx {
+    ($($ty:ty),+ $(,)?) => {
+        $(
+            impl ParameterizedOverTcx for $ty {
+                #[allow(unused_lifetimes)]
+                type Value<'tcx> = $ty;
+            }
+        )*
+    }
+}
+
+trivially_parameterized_over_tcx! {
+    bool,
+    u64,
+    usize,
+    std::string::String,
+    // tidy-alphabetical-start
+    crate::rmeta::AttrFlags,
+    crate::rmeta::CrateDep,
+    crate::rmeta::CrateHeader,
+    crate::rmeta::CrateRoot,
+    crate::rmeta::IncoherentImpls,
+    crate::rmeta::RawDefId,
+    crate::rmeta::TraitImpls,
+    crate::rmeta::VariantData,
+    rustc_abi::ReprOptions,
+    rustc_ast::DelimArgs,
+    rustc_attr_data_structures::ConstStability,
+    rustc_attr_data_structures::DefaultBodyStability,
+    rustc_attr_data_structures::Deprecation,
+    rustc_attr_data_structures::Stability,
+    rustc_attr_data_structures::StrippedCfgItem<rustc_hir::def_id::DefIndex>,
+    rustc_hir::Attribute,
+    rustc_hir::Constness,
+    rustc_hir::CoroutineKind,
+    rustc_hir::Defaultness,
+    rustc_hir::LangItem,
+    rustc_hir::OpaqueTyOrigin<rustc_hir::def_id::DefId>,
+    rustc_hir::PreciseCapturingArgKind<Symbol, Symbol>,
+    rustc_hir::Safety,
+    rustc_hir::def::DefKind,
+    rustc_hir::def::DocLinkResMap,
+    rustc_hir::def_id::DefId,
+    rustc_hir::def_id::DefIndex,
+    rustc_hir::definitions::DefKey,
+    rustc_index::bit_set::DenseBitSet<u32>,
+    rustc_middle::metadata::ModChild,
+    rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs,
+    rustc_middle::middle::debugger_visualizer::DebuggerVisualizerFile,
+    rustc_middle::middle::exported_symbols::SymbolExportInfo,
+    rustc_middle::middle::lib_features::FeatureStability,
+    rustc_middle::middle::resolve_bound_vars::ObjectLifetimeDefault,
+    rustc_middle::mir::ConstQualifs,
+    rustc_middle::ty::AnonConstKind,
+    rustc_middle::ty::AssocItemContainer,
+    rustc_middle::ty::AsyncDestructor,
+    rustc_middle::ty::Asyncness,
+    rustc_middle::ty::DeducedParamAttrs,
+    rustc_middle::ty::Destructor,
+    rustc_middle::ty::Generics,
+    rustc_middle::ty::ImplTraitInTraitData,
+    rustc_middle::ty::IntrinsicDef,
+    rustc_middle::ty::TraitDef,
+    rustc_middle::ty::Variance,
+    rustc_middle::ty::Visibility<DefIndex>,
+    rustc_middle::ty::adjustment::CoerceUnsizedInfo,
+    rustc_middle::ty::fast_reject::SimplifiedType,
+    rustc_session::config::TargetModifier,
+    rustc_session::cstore::ForeignModule,
+    rustc_session::cstore::LinkagePreference,
+    rustc_session::cstore::NativeLib,
+    rustc_span::ExpnData,
+    rustc_span::ExpnHash,
+    rustc_span::ExpnId,
+    rustc_span::Ident,
+    rustc_span::SourceFile,
+    rustc_span::Span,
+    rustc_span::Symbol,
+    rustc_span::hygiene::SyntaxContextKey,
+    // tidy-alphabetical-end
+}
+
+// HACK(compiler-errors): This macro rule can only take a fake path,
+// not a real, due to parsing ambiguity reasons.
+macro_rules! parameterized_over_tcx {
+    ($($( $fake_path:ident )::+ ),+ $(,)?) => {
+        $(
+            impl ParameterizedOverTcx for $( $fake_path )::+ <'static> {
+                type Value<'tcx> = $( $fake_path )::+ <'tcx>;
+            }
+        )*
+    }
+}
+
+parameterized_over_tcx! {
+    // tidy-alphabetical-start
+    crate::rmeta::DefPathHashMapRef,
+    rustc_middle::middle::exported_symbols::ExportedSymbol,
+    rustc_middle::mir::Body,
+    rustc_middle::mir::CoroutineLayout,
+    rustc_middle::mir::interpret::ConstAllocation,
+    rustc_middle::ty::Clause,
+    rustc_middle::ty::ClauseKind,
+    rustc_middle::ty::Const,
+    rustc_middle::ty::ConstConditions,
+    rustc_middle::ty::FnSig,
+    rustc_middle::ty::GenericPredicates,
+    rustc_middle::ty::ImplTraitHeader,
+    rustc_middle::ty::TraitRef,
+    rustc_middle::ty::Ty,
+    // tidy-alphabetical-end
+}
diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs
index bb70c61cd14..5b7f34e0e77 100644
--- a/compiler/rustc_middle/src/ty/mod.rs
+++ b/compiler/rustc_middle/src/ty/mod.rs
@@ -85,7 +85,6 @@ pub use self::fold::*;
 pub use self::instance::{Instance, InstanceKind, ReifyReason, ShortInstance, UnusedGenericParams};
 pub use self::list::{List, ListWithCachedTypeInfo};
 pub use self::opaque_types::OpaqueTypeKey;
-pub use self::parameterized::ParameterizedOverTcx;
 pub use self::pattern::{Pattern, PatternKind};
 pub use self::predicate::{
     AliasTerm, ArgOutlivesPredicate, Clause, ClauseKind, CoercePredicate, ExistentialPredicate,
@@ -158,7 +157,6 @@ mod instance;
 mod intrinsic;
 mod list;
 mod opaque_types;
-mod parameterized;
 mod predicate;
 mod region;
 mod rvalue_scopes;
diff --git a/compiler/rustc_middle/src/ty/parameterized.rs b/compiler/rustc_middle/src/ty/parameterized.rs
deleted file mode 100644
index dbacbe21edb..00000000000
--- a/compiler/rustc_middle/src/ty/parameterized.rs
+++ /dev/null
@@ -1,155 +0,0 @@
-use std::hash::Hash;
-
-use rustc_data_structures::unord::UnordMap;
-use rustc_hir::def_id::DefIndex;
-use rustc_index::{Idx, IndexVec};
-use rustc_span::Symbol;
-
-use crate::ty;
-
-pub trait ParameterizedOverTcx: 'static {
-    type Value<'tcx>;
-}
-
-impl<T: ParameterizedOverTcx> ParameterizedOverTcx for &'static [T] {
-    type Value<'tcx> = &'tcx [T::Value<'tcx>];
-}
-
-impl<T: ParameterizedOverTcx> ParameterizedOverTcx for Option<T> {
-    type Value<'tcx> = Option<T::Value<'tcx>>;
-}
-
-impl<A: ParameterizedOverTcx, B: ParameterizedOverTcx> ParameterizedOverTcx for (A, B) {
-    type Value<'tcx> = (A::Value<'tcx>, B::Value<'tcx>);
-}
-
-impl<T: ParameterizedOverTcx> ParameterizedOverTcx for Vec<T> {
-    type Value<'tcx> = Vec<T::Value<'tcx>>;
-}
-
-impl<I: Idx + 'static, T: ParameterizedOverTcx> ParameterizedOverTcx for IndexVec<I, T> {
-    type Value<'tcx> = IndexVec<I, T::Value<'tcx>>;
-}
-
-impl<I: Hash + Eq + 'static, T: ParameterizedOverTcx> ParameterizedOverTcx for UnordMap<I, T> {
-    type Value<'tcx> = UnordMap<I, T::Value<'tcx>>;
-}
-
-impl<T: ParameterizedOverTcx> ParameterizedOverTcx for ty::Binder<'static, T> {
-    type Value<'tcx> = ty::Binder<'tcx, T::Value<'tcx>>;
-}
-
-impl<T: ParameterizedOverTcx> ParameterizedOverTcx for ty::EarlyBinder<'static, T> {
-    type Value<'tcx> = ty::EarlyBinder<'tcx, T::Value<'tcx>>;
-}
-
-#[macro_export]
-macro_rules! trivially_parameterized_over_tcx {
-    ($($ty:ty),+ $(,)?) => {
-        $(
-            impl $crate::ty::ParameterizedOverTcx for $ty {
-                #[allow(unused_lifetimes)]
-                type Value<'tcx> = $ty;
-            }
-        )*
-    }
-}
-
-trivially_parameterized_over_tcx! {
-    usize,
-    (),
-    u32,
-    u64,
-    bool,
-    std::string::String,
-    crate::metadata::ModChild,
-    crate::middle::codegen_fn_attrs::CodegenFnAttrs,
-    crate::middle::debugger_visualizer::DebuggerVisualizerFile,
-    crate::middle::exported_symbols::SymbolExportInfo,
-    crate::middle::lib_features::FeatureStability,
-    crate::middle::resolve_bound_vars::ObjectLifetimeDefault,
-    crate::mir::ConstQualifs,
-    ty::AsyncDestructor,
-    ty::AssocItemContainer,
-    ty::Asyncness,
-    ty::AnonConstKind,
-    ty::DeducedParamAttrs,
-    ty::Destructor,
-    ty::Generics,
-    ty::ImplPolarity,
-    ty::ImplTraitInTraitData,
-    ty::ReprOptions,
-    ty::TraitDef,
-    ty::UnusedGenericParams,
-    ty::Visibility<DefIndex>,
-    ty::adjustment::CoerceUnsizedInfo,
-    ty::fast_reject::SimplifiedType,
-    ty::IntrinsicDef,
-    rustc_ast::Attribute,
-    rustc_ast::DelimArgs,
-    rustc_attr_data_structures::StrippedCfgItem<rustc_hir::def_id::DefIndex>,
-    rustc_attr_data_structures::ConstStability,
-    rustc_attr_data_structures::DefaultBodyStability,
-    rustc_attr_data_structures::Deprecation,
-    rustc_attr_data_structures::Stability,
-    rustc_hir::Constness,
-    rustc_hir::Defaultness,
-    rustc_hir::Safety,
-    rustc_hir::CoroutineKind,
-    rustc_hir::IsAsync,
-    rustc_hir::LangItem,
-    rustc_hir::def::DefKind,
-    rustc_hir::def::DocLinkResMap,
-    rustc_hir::def_id::DefId,
-    rustc_hir::def_id::DefIndex,
-    rustc_hir::definitions::DefKey,
-    rustc_hir::OpaqueTyOrigin<rustc_hir::def_id::DefId>,
-    rustc_hir::PreciseCapturingArgKind<Symbol, Symbol>,
-    rustc_index::bit_set::DenseBitSet<u32>,
-    rustc_index::bit_set::FiniteBitSet<u32>,
-    rustc_session::cstore::ForeignModule,
-    rustc_session::cstore::LinkagePreference,
-    rustc_session::cstore::NativeLib,
-    rustc_session::config::TargetModifier,
-    rustc_span::ExpnData,
-    rustc_span::ExpnHash,
-    rustc_span::ExpnId,
-    rustc_span::SourceFile,
-    rustc_span::Span,
-    rustc_span::Symbol,
-    rustc_span::def_id::DefPathHash,
-    rustc_span::hygiene::SyntaxContextKey,
-    rustc_span::Ident,
-    rustc_type_ir::Variance,
-    rustc_hir::Attribute,
-}
-
-// HACK(compiler-errors): This macro rule can only take a fake path,
-// not a real, due to parsing ambiguity reasons.
-#[macro_export]
-macro_rules! parameterized_over_tcx {
-    ($($($fake_path:ident)::+),+ $(,)?) => {
-        $(
-            impl $crate::ty::ParameterizedOverTcx for $($fake_path)::+<'static> {
-                type Value<'tcx> = $($fake_path)::+<'tcx>;
-            }
-        )*
-    }
-}
-
-parameterized_over_tcx! {
-    crate::middle::exported_symbols::ExportedSymbol,
-    crate::mir::Body,
-    crate::mir::CoroutineLayout,
-    crate::mir::interpret::ConstAllocation,
-    ty::Ty,
-    ty::FnSig,
-    ty::GenericPredicates,
-    ty::ConstConditions,
-    ty::TraitRef,
-    ty::Const,
-    ty::Predicate,
-    ty::Clause,
-    ty::ClauseKind,
-    ty::ImplTraitHeader,
-}
diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs
index ec76076020e..ddeae093df5 100644
--- a/compiler/rustc_mir_transform/src/coverage/spans.rs
+++ b/compiler/rustc_mir_transform/src/coverage/spans.rs
@@ -1,7 +1,8 @@
 use rustc_data_structures::fx::FxHashSet;
 use rustc_middle::mir;
 use rustc_middle::ty::TyCtxt;
-use rustc_span::{DesugaringKind, ExpnKind, MacroKind, Span};
+use rustc_span::source_map::SourceMap;
+use rustc_span::{BytePos, DesugaringKind, ExpnKind, MacroKind, Span};
 use tracing::instrument;
 
 use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph};
@@ -83,8 +84,18 @@ pub(super) fn extract_refined_covspans<'tcx>(
     // Discard any span that overlaps with a hole.
     discard_spans_overlapping_holes(&mut covspans, &holes);
 
-    // Perform more refinement steps after holes have been dealt with.
+    // Discard spans that overlap in unwanted ways.
     let mut covspans = remove_unwanted_overlapping_spans(covspans);
+
+    // For all empty spans, either enlarge them to be non-empty, or discard them.
+    let source_map = tcx.sess.source_map();
+    covspans.retain_mut(|covspan| {
+        let Some(span) = ensure_non_empty_span(source_map, covspan.span) else { return false };
+        covspan.span = span;
+        true
+    });
+
+    // Merge covspans that can be merged.
     covspans.dedup_by(|b, a| a.merge_if_eligible(b));
 
     code_mappings.extend(covspans.into_iter().map(|Covspan { span, bcb }| {
@@ -230,3 +241,26 @@ fn compare_spans(a: Span, b: Span) -> std::cmp::Ordering {
         // - Both have the same start and span A extends further right
         .then_with(|| Ord::cmp(&a.hi(), &b.hi()).reverse())
 }
+
+fn ensure_non_empty_span(source_map: &SourceMap, span: Span) -> Option<Span> {
+    if !span.is_empty() {
+        return Some(span);
+    }
+
+    // The span is empty, so try to enlarge it to cover an adjacent '{' or '}'.
+    source_map
+        .span_to_source(span, |src, start, end| try {
+            // Adjusting span endpoints by `BytePos(1)` is normally a bug,
+            // but in this case we have specifically checked that the character
+            // we're skipping over is one of two specific ASCII characters, so
+            // adjusting by exactly 1 byte is correct.
+            if src.as_bytes().get(end).copied() == Some(b'{') {
+                Some(span.with_hi(span.hi() + BytePos(1)))
+            } else if start > 0 && src.as_bytes()[start - 1] == b'}' {
+                Some(span.with_lo(span.lo() - BytePos(1)))
+            } else {
+                None
+            }
+        })
+        .ok()?
+}
diff --git a/compiler/rustc_mir_transform/src/elaborate_drop.rs b/compiler/rustc_mir_transform/src/elaborate_drop.rs
index de96b1f255a..df4853c1dcb 100644
--- a/compiler/rustc_mir_transform/src/elaborate_drop.rs
+++ b/compiler/rustc_mir_transform/src/elaborate_drop.rs
@@ -761,24 +761,37 @@ where
 
         let skip_contents = adt.is_union() || adt.is_manually_drop();
         let contents_drop = if skip_contents {
+            if adt.has_dtor(self.tcx()) && self.elaborator.get_drop_flag(self.path).is_some() {
+                // the top-level drop flag is usually cleared by open_drop_for_adt_contents
+                // types with destructors would still need an empty drop ladder to clear it
+
+                // however, these types are only open dropped in `DropShimElaborator`
+                // which does not have drop flags
+                // a future box-like "DerefMove" trait would allow for this case to happen
+                span_bug!(self.source_info.span, "open dropping partially moved union");
+            }
+
             (self.succ, self.unwind, self.dropline)
         } else {
             self.open_drop_for_adt_contents(adt, args)
         };
 
-        if adt.is_box() {
-            // we need to drop the inside of the box before running the destructor
-            let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
-            let unwind = contents_drop
-                .1
-                .map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
-            let dropline = contents_drop
-                .2
-                .map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
-
-            self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
-        } else if adt.has_dtor(self.tcx()) {
-            self.destructor_call_block(contents_drop)
+        if adt.has_dtor(self.tcx()) {
+            let destructor_block = if adt.is_box() {
+                // we need to drop the inside of the box before running the destructor
+                let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
+                let unwind = contents_drop
+                    .1
+                    .map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
+                let dropline = contents_drop
+                    .2
+                    .map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
+                self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
+            } else {
+                self.destructor_call_block(contents_drop)
+            };
+
+            self.drop_flag_test_block(destructor_block, contents_drop.0, contents_drop.1)
         } else {
             contents_drop.0
         }
@@ -982,12 +995,7 @@ where
             unwind.is_cleanup(),
         );
 
-        let destructor_block = self.elaborator.patch().new_block(result);
-
-        let block_start = Location { block: destructor_block, statement_index: 0 };
-        self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
-
-        self.drop_flag_test_block(destructor_block, succ, unwind)
+        self.elaborator.patch().new_block(result)
     }
 
     fn destructor_call_block(
@@ -1002,13 +1010,7 @@ where
             && !unwind.is_cleanup()
             && ty.is_async_drop(self.tcx(), self.elaborator.typing_env())
         {
-            let destructor_block =
-                self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true);
-
-            let block_start = Location { block: destructor_block, statement_index: 0 };
-            self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
-
-            self.drop_flag_test_block(destructor_block, succ, unwind)
+            self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true)
         } else {
             self.destructor_call_block_sync((succ, unwind))
         }
diff --git a/library/alloc/src/collections/linked_list.rs b/library/alloc/src/collections/linked_list.rs
index 70c344e49b7..31dfe73fc79 100644
--- a/library/alloc/src/collections/linked_list.rs
+++ b/library/alloc/src/collections/linked_list.rs
@@ -825,7 +825,7 @@ impl<T, A: Allocator> LinkedList<T, A> {
         unsafe { self.tail.as_mut().map(|node| &mut node.as_mut().element) }
     }
 
-    /// Adds an element first in the list.
+    /// Adds an element to the front of the list.
     ///
     /// This operation should compute in *O*(1) time.
     ///
@@ -844,11 +844,34 @@ impl<T, A: Allocator> LinkedList<T, A> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn push_front(&mut self, elt: T) {
+        let _ = self.push_front_mut(elt);
+    }
+
+    /// Adds an element to the front of the list, returning a reference to it.
+    ///
+    /// This operation should compute in *O*(1) time.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(push_mut)]
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut dl = LinkedList::from([1, 2, 3]);
+    ///
+    /// let ptr = dl.push_front_mut(2);
+    /// *ptr += 4;
+    /// assert_eq!(dl.front().unwrap(), &6);
+    /// ```
+    #[unstable(feature = "push_mut", issue = "135974")]
+    #[must_use = "if you don't need a reference to the value, use `LinkedList::push_front` instead"]
+    pub fn push_front_mut(&mut self, elt: T) -> &mut T {
         let node = Box::new_in(Node::new(elt), &self.alloc);
-        let node_ptr = NonNull::from(Box::leak(node));
+        let mut node_ptr = NonNull::from(Box::leak(node));
         // SAFETY: node_ptr is a unique pointer to a node we boxed with self.alloc and leaked
         unsafe {
             self.push_front_node(node_ptr);
+            &mut node_ptr.as_mut().element
         }
     }
 
@@ -876,7 +899,7 @@ impl<T, A: Allocator> LinkedList<T, A> {
         self.pop_front_node().map(Node::into_element)
     }
 
-    /// Appends an element to the back of a list.
+    /// Adds an element to the back of the list.
     ///
     /// This operation should compute in *O*(1) time.
     ///
@@ -893,11 +916,34 @@ impl<T, A: Allocator> LinkedList<T, A> {
     #[stable(feature = "rust1", since = "1.0.0")]
     #[rustc_confusables("push", "append")]
     pub fn push_back(&mut self, elt: T) {
+        let _ = self.push_back_mut(elt);
+    }
+
+    /// Adds an element to the back of the list, returning a reference to it.
+    ///
+    /// This operation should compute in *O*(1) time.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(push_mut)]
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut dl = LinkedList::from([1, 2, 3]);
+    ///
+    /// let ptr = dl.push_back_mut(2);
+    /// *ptr += 4;
+    /// assert_eq!(dl.back().unwrap(), &6);
+    /// ```
+    #[unstable(feature = "push_mut", issue = "135974")]
+    #[must_use = "if you don't need a reference to the value, use `LinkedList::push_back` instead"]
+    pub fn push_back_mut(&mut self, elt: T) -> &mut T {
         let node = Box::new_in(Node::new(elt), &self.alloc);
-        let node_ptr = NonNull::from(Box::leak(node));
+        let mut node_ptr = NonNull::from(Box::leak(node));
         // SAFETY: node_ptr is a unique pointer to a node we boxed with self.alloc and leaked
         unsafe {
             self.push_back_node(node_ptr);
+            &mut node_ptr.as_mut().element
         }
     }
 
diff --git a/library/alloc/src/collections/vec_deque/mod.rs b/library/alloc/src/collections/vec_deque/mod.rs
index 08b1828ff00..2fce5c3e737 100644
--- a/library/alloc/src/collections/vec_deque/mod.rs
+++ b/library/alloc/src/collections/vec_deque/mod.rs
@@ -182,11 +182,16 @@ impl<T, A: Allocator> VecDeque<T, A> {
         unsafe { ptr::read(self.ptr().add(off)) }
     }
 
-    /// Writes an element into the buffer, moving it.
+    /// Writes an element into the buffer, moving it and returning a pointer to it.
+    /// # Safety
+    ///
+    /// May only be called if `off < self.capacity()`.
     #[inline]
-    unsafe fn buffer_write(&mut self, off: usize, value: T) {
+    unsafe fn buffer_write(&mut self, off: usize, value: T) -> &mut T {
         unsafe {
-            ptr::write(self.ptr().add(off), value);
+            let ptr = self.ptr().add(off);
+            ptr::write(ptr, value);
+            &mut *ptr
         }
     }
 
@@ -1888,16 +1893,34 @@ impl<T, A: Allocator> VecDeque<T, A> {
     #[stable(feature = "rust1", since = "1.0.0")]
     #[track_caller]
     pub fn push_front(&mut self, value: T) {
+        let _ = self.push_front_mut(value);
+    }
+
+    /// Prepends an element to the deque, returning a reference to it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(push_mut)]
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut d = VecDeque::from([1, 2, 3]);
+    /// let x = d.push_front_mut(8);
+    /// *x -= 1;
+    /// assert_eq!(d.front(), Some(&7));
+    /// ```
+    #[unstable(feature = "push_mut", issue = "135974")]
+    #[track_caller]
+    #[must_use = "if you don't need a reference to the value, use `VecDeque::push_front` instead"]
+    pub fn push_front_mut(&mut self, value: T) -> &mut T {
         if self.is_full() {
             self.grow();
         }
 
         self.head = self.wrap_sub(self.head, 1);
         self.len += 1;
-
-        unsafe {
-            self.buffer_write(self.head, value);
-        }
+        // SAFETY: We know that self.head is within range of the deque.
+        unsafe { self.buffer_write(self.head, value) }
     }
 
     /// Appends an element to the back of the deque.
@@ -1916,12 +1939,33 @@ impl<T, A: Allocator> VecDeque<T, A> {
     #[rustc_confusables("push", "put", "append")]
     #[track_caller]
     pub fn push_back(&mut self, value: T) {
+        let _ = self.push_back_mut(value);
+    }
+
+    /// Appends an element to the back of the deque, returning a reference to it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(push_mut)]
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut d = VecDeque::from([1, 2, 3]);
+    /// let x = d.push_back_mut(9);
+    /// *x += 1;
+    /// assert_eq!(d.back(), Some(&10));
+    /// ```
+    #[unstable(feature = "push_mut", issue = "135974")]
+    #[track_caller]
+    #[must_use = "if you don't need a reference to the value, use `VecDeque::push_back` instead"]
+    pub fn push_back_mut(&mut self, value: T) -> &mut T {
         if self.is_full() {
             self.grow();
         }
 
-        unsafe { self.buffer_write(self.to_physical_idx(self.len), value) }
+        let len = self.len;
         self.len += 1;
+        unsafe { self.buffer_write(self.to_physical_idx(len), value) }
     }
 
     #[inline]
@@ -2007,7 +2051,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
     ///
     /// # Panics
     ///
-    /// Panics if `index` is strictly greater than deque's length
+    /// Panics if `index` is strictly greater than the deque's length.
     ///
     /// # Examples
     ///
@@ -2029,7 +2073,37 @@ impl<T, A: Allocator> VecDeque<T, A> {
     #[stable(feature = "deque_extras_15", since = "1.5.0")]
     #[track_caller]
     pub fn insert(&mut self, index: usize, value: T) {
+        let _ = self.insert_mut(index, value);
+    }
+
+    /// Inserts an element at `index` within the deque, shifting all elements
+    /// with indices greater than or equal to `index` towards the back, and
+    /// returning a reference to it.
+    ///
+    /// Element at index 0 is the front of the queue.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `index` is strictly greater than the deque's length.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(push_mut)]
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut vec_deque = VecDeque::from([1, 2, 3]);
+    ///
+    /// let x = vec_deque.insert_mut(1, 5);
+    /// *x += 7;
+    /// assert_eq!(vec_deque, &[1, 12, 2, 3]);
+    /// ```
+    #[unstable(feature = "push_mut", issue = "135974")]
+    #[track_caller]
+    #[must_use = "if you don't need a reference to the value, use `VecDeque::insert` instead"]
+    pub fn insert_mut(&mut self, index: usize, value: T) -> &mut T {
         assert!(index <= self.len(), "index out of bounds");
+
         if self.is_full() {
             self.grow();
         }
@@ -2042,16 +2116,16 @@ impl<T, A: Allocator> VecDeque<T, A> {
             unsafe {
                 // see `remove()` for explanation why this wrap_copy() call is safe.
                 self.wrap_copy(self.to_physical_idx(index), self.to_physical_idx(index + 1), k);
-                self.buffer_write(self.to_physical_idx(index), value);
                 self.len += 1;
+                self.buffer_write(self.to_physical_idx(index), value)
             }
         } else {
             let old_head = self.head;
             self.head = self.wrap_sub(self.head, 1);
             unsafe {
                 self.wrap_copy(old_head, self.head, index);
-                self.buffer_write(self.to_physical_idx(index), value);
                 self.len += 1;
+                self.buffer_write(self.to_physical_idx(index), value)
             }
         }
     }
diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs
index 9856e9c18ec..ce74615dbcc 100644
--- a/library/alloc/src/vec/mod.rs
+++ b/library/alloc/src/vec/mod.rs
@@ -2046,6 +2046,38 @@ impl<T, A: Allocator> Vec<T, A> {
     #[stable(feature = "rust1", since = "1.0.0")]
     #[track_caller]
     pub fn insert(&mut self, index: usize, element: T) {
+        let _ = self.insert_mut(index, element);
+    }
+
+    /// Inserts an element at position `index` within the vector, shifting all
+    /// elements after it to the right, and returning a reference to the new
+    /// element.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `index > len`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(push_mut)]
+    /// let mut vec = vec![1, 3, 5, 9];
+    /// let x = vec.insert_mut(3, 6);
+    /// *x += 1;
+    /// assert_eq!(vec, [1, 3, 5, 7, 9]);
+    /// ```
+    ///
+    /// # Time complexity
+    ///
+    /// Takes *O*([`Vec::len`]) time. All items after the insertion index must be
+    /// shifted to the right. In the worst case, all elements are shifted when
+    /// the insertion index is 0.
+    #[cfg(not(no_global_oom_handling))]
+    #[inline]
+    #[unstable(feature = "push_mut", issue = "135974")]
+    #[track_caller]
+    #[must_use = "if you don't need a reference to the value, use `Vec::insert` instead"]
+    pub fn insert_mut(&mut self, index: usize, element: T) -> &mut T {
         #[cold]
         #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
         #[track_caller]
@@ -2067,8 +2099,8 @@ impl<T, A: Allocator> Vec<T, A> {
         unsafe {
             // infallible
             // The spot to put the new value
+            let p = self.as_mut_ptr().add(index);
             {
-                let p = self.as_mut_ptr().add(index);
                 if index < len {
                     // Shift everything over to make space. (Duplicating the
                     // `index`th element into two consecutive places.)
@@ -2079,6 +2111,7 @@ impl<T, A: Allocator> Vec<T, A> {
                 ptr::write(p, element);
             }
             self.set_len(len + 1);
+            &mut *p
         }
     }
 
@@ -2486,18 +2519,7 @@ impl<T, A: Allocator> Vec<T, A> {
     #[rustc_confusables("push_back", "put", "append")]
     #[track_caller]
     pub fn push(&mut self, value: T) {
-        // Inform codegen that the length does not change across grow_one().
-        let len = self.len;
-        // This will panic or abort if we would allocate > isize::MAX bytes
-        // or if the length increment would overflow for zero-sized types.
-        if len == self.buf.capacity() {
-            self.buf.grow_one();
-        }
-        unsafe {
-            let end = self.as_mut_ptr().add(len);
-            ptr::write(end, value);
-            self.len = len + 1;
-        }
+        let _ = self.push_mut(value);
     }
 
     /// Appends an element if there is sufficient spare capacity, otherwise an error is returned
@@ -2538,6 +2560,77 @@ impl<T, A: Allocator> Vec<T, A> {
     #[inline]
     #[unstable(feature = "vec_push_within_capacity", issue = "100486")]
     pub fn push_within_capacity(&mut self, value: T) -> Result<(), T> {
+        self.push_mut_within_capacity(value).map(|_| ())
+    }
+
+    /// Appends an element to the back of a collection, returning a reference to it.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity exceeds `isize::MAX` _bytes_.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(push_mut)]
+    ///
+    ///
+    /// let mut vec = vec![1, 2];
+    /// let last = vec.push_mut(3);
+    /// assert_eq!(*last, 3);
+    /// assert_eq!(vec, [1, 2, 3]);
+    ///
+    /// let last = vec.push_mut(3);
+    /// *last += 1;
+    /// assert_eq!(vec, [1, 2, 3, 4]);
+    /// ```
+    ///
+    /// # Time complexity
+    ///
+    /// Takes amortized *O*(1) time. If the vector's length would exceed its
+    /// capacity after the push, *O*(*capacity*) time is taken to copy the
+    /// vector's elements to a larger allocation. This expensive operation is
+    /// offset by the *capacity* *O*(1) insertions it allows.
+    #[cfg(not(no_global_oom_handling))]
+    #[inline]
+    #[unstable(feature = "push_mut", issue = "135974")]
+    #[track_caller]
+    #[must_use = "if you don't need a reference to the value, use `Vec::push` instead"]
+    pub fn push_mut(&mut self, value: T) -> &mut T {
+        // Inform codegen that the length does not change across grow_one().
+        let len = self.len;
+        // This will panic or abort if we would allocate > isize::MAX bytes
+        // or if the length increment would overflow for zero-sized types.
+        if len == self.buf.capacity() {
+            self.buf.grow_one();
+        }
+        unsafe {
+            let end = self.as_mut_ptr().add(len);
+            ptr::write(end, value);
+            self.len = len + 1;
+            // SAFETY: We just wrote a value to the pointer that will live the lifetime of the reference.
+            &mut *end
+        }
+    }
+
+    /// Appends an element and returns a reference to it if there is sufficient spare capacity,
+    /// otherwise an error is returned with the element.
+    ///
+    /// Unlike [`push_mut`] this method will not reallocate when there's insufficient capacity.
+    /// The caller should use [`reserve`] or [`try_reserve`] to ensure that there is enough capacity.
+    ///
+    /// [`push_mut`]: Vec::push_mut
+    /// [`reserve`]: Vec::reserve
+    /// [`try_reserve`]: Vec::try_reserve
+    ///
+    /// # Time complexity
+    ///
+    /// Takes *O*(1) time.
+    #[unstable(feature = "push_mut", issue = "135974")]
+    // #[unstable(feature = "vec_push_within_capacity", issue = "100486")]
+    #[inline]
+    #[must_use = "if you don't need a reference to the value, use `Vec::push_within_capacity` instead"]
+    pub fn push_mut_within_capacity(&mut self, value: T) -> Result<&mut T, T> {
         if self.len == self.buf.capacity() {
             return Err(value);
         }
@@ -2545,8 +2638,9 @@ impl<T, A: Allocator> Vec<T, A> {
             let end = self.as_mut_ptr().add(self.len);
             ptr::write(end, value);
             self.len += 1;
+            // SAFETY: We just wrote a value to the pointer that will live the lifetime of the reference.
+            Ok(&mut *end)
         }
-        Ok(())
     }
 
     /// Removes the last element from a vector and returns it, or [`None`] if it
diff --git a/tests/codegen-llvm/become-musttail.rs b/tests/codegen-llvm/become-musttail.rs
new file mode 100644
index 00000000000..07f33571910
--- /dev/null
+++ b/tests/codegen-llvm/become-musttail.rs
@@ -0,0 +1,18 @@
+//@ compile-flags: -C opt-level=0 -Cpanic=abort -C no-prepopulate-passes
+//@ needs-unwind
+
+#![crate_type = "lib"]
+#![feature(explicit_tail_calls)]
+
+// CHECK-LABEL: define {{.*}}@fibonacci(
+#[no_mangle]
+#[inline(never)]
+pub fn fibonacci(n: u64, a: u64, b: u64) -> u64 {
+    // CHECK: musttail call {{.*}}@fibonacci(
+    // CHECK-NEXT: ret i64
+    match n {
+        0 => a,
+        1 => b,
+        _ => become fibonacci(n - 1, b, a + b),
+    }
+}
diff --git a/tests/coverage/async_closure.cov-map b/tests/coverage/async_closure.cov-map
index 9f8dc8d6cbb..53128dd7a48 100644
--- a/tests/coverage/async_closure.cov-map
+++ b/tests/coverage/async_closure.cov-map
@@ -37,32 +37,29 @@ Number of file 0 mappings: 8
 Highest counter ID seen: c0
 
 Function name: async_closure::main::{closure#0}
-Raw bytes (14): 0x[01, 01, 00, 02, 01, 0b, 22, 00, 23, 01, 00, 23, 00, 24]
+Raw bytes (9): 0x[01, 01, 00, 01, 01, 0b, 22, 00, 24]
 Number of files: 1
 - file 0 => $DIR/async_closure.rs
 Number of expressions: 0
-Number of file 0 mappings: 2
-- Code(Counter(0)) at (prev + 11, 34) to (start + 0, 35)
-- Code(Counter(0)) at (prev + 0, 35) to (start + 0, 36)
+Number of file 0 mappings: 1
+- Code(Counter(0)) at (prev + 11, 34) to (start + 0, 36)
 Highest counter ID seen: c0
 
 Function name: async_closure::main::{closure#0}
-Raw bytes (14): 0x[01, 01, 00, 02, 01, 0b, 22, 00, 23, 01, 00, 23, 00, 24]
+Raw bytes (9): 0x[01, 01, 00, 01, 01, 0b, 22, 00, 24]
 Number of files: 1
 - file 0 => $DIR/async_closure.rs
 Number of expressions: 0
-Number of file 0 mappings: 2
-- Code(Counter(0)) at (prev + 11, 34) to (start + 0, 35)
-- Code(Counter(0)) at (prev + 0, 35) to (start + 0, 36)
+Number of file 0 mappings: 1
+- Code(Counter(0)) at (prev + 11, 34) to (start + 0, 36)
 Highest counter ID seen: c0
 
 Function name: async_closure::main::{closure#0}::{closure#0}::<i16>
-Raw bytes (14): 0x[01, 01, 00, 02, 01, 0b, 22, 00, 23, 01, 00, 23, 00, 24]
+Raw bytes (9): 0x[01, 01, 00, 01, 01, 0b, 22, 00, 24]
 Number of files: 1
 - file 0 => $DIR/async_closure.rs
 Number of expressions: 0
-Number of file 0 mappings: 2
-- Code(Counter(0)) at (prev + 11, 34) to (start + 0, 35)
-- Code(Counter(0)) at (prev + 0, 35) to (start + 0, 36)
+Number of file 0 mappings: 1
+- Code(Counter(0)) at (prev + 11, 34) to (start + 0, 36)
 Highest counter ID seen: c0
 
diff --git a/tests/coverage/try-in-macro.attr.cov-map b/tests/coverage/try-in-macro.attr.cov-map
index 7111e89637c..1b6c97cf145 100644
--- a/tests/coverage/try-in-macro.attr.cov-map
+++ b/tests/coverage/try-in-macro.attr.cov-map
@@ -1,12 +1,3 @@
-Function name: <try_in_macro::MyEnum as try_in_macro::Arbitrary>::try_size_hint
-Raw bytes (9): 0x[01, 01, 00, 01, 00, 1e, 2a, 00, 2b]
-Number of files: 1
-- file 0 => $DIR/try-in-macro.rs
-Number of expressions: 0
-Number of file 0 mappings: 1
-- Code(Zero) at (prev + 30, 42) to (start + 0, 43)
-Highest counter ID seen: (none)
-
 Function name: try_in_macro::main
 Raw bytes (19): 0x[01, 01, 00, 03, 01, 29, 01, 00, 0a, 01, 01, 05, 00, 1a, 01, 01, 01, 00, 02]
 Number of files: 1
diff --git a/tests/coverage/try-in-macro.bang.cov-map b/tests/coverage/try-in-macro.bang.cov-map
index 80bd91a993c..1b6c97cf145 100644
--- a/tests/coverage/try-in-macro.bang.cov-map
+++ b/tests/coverage/try-in-macro.bang.cov-map
@@ -1,12 +1,3 @@
-Function name: <try_in_macro::MyEnum as try_in_macro::Arbitrary>::try_size_hint
-Raw bytes (9): 0x[01, 01, 00, 01, 00, 23, 1c, 00, 1d]
-Number of files: 1
-- file 0 => $DIR/try-in-macro.rs
-Number of expressions: 0
-Number of file 0 mappings: 1
-- Code(Zero) at (prev + 35, 28) to (start + 0, 29)
-Highest counter ID seen: (none)
-
 Function name: try_in_macro::main
 Raw bytes (19): 0x[01, 01, 00, 03, 01, 29, 01, 00, 0a, 01, 01, 05, 00, 1a, 01, 01, 01, 00, 02]
 Number of files: 1
diff --git a/tests/coverage/try-in-macro.derive.cov-map b/tests/coverage/try-in-macro.derive.cov-map
index 6646b6693ba..1b6c97cf145 100644
--- a/tests/coverage/try-in-macro.derive.cov-map
+++ b/tests/coverage/try-in-macro.derive.cov-map
@@ -1,12 +1,3 @@
-Function name: <try_in_macro::MyEnum as try_in_macro::Arbitrary>::try_size_hint
-Raw bytes (9): 0x[01, 01, 00, 01, 00, 26, 38, 00, 39]
-Number of files: 1
-- file 0 => $DIR/try-in-macro.rs
-Number of expressions: 0
-Number of file 0 mappings: 1
-- Code(Zero) at (prev + 38, 56) to (start + 0, 57)
-Highest counter ID seen: (none)
-
 Function name: try_in_macro::main
 Raw bytes (19): 0x[01, 01, 00, 03, 01, 29, 01, 00, 0a, 01, 01, 05, 00, 1a, 01, 01, 01, 00, 02]
 Number of files: 1
diff --git a/tests/mir-opt/box_conditional_drop_allocator.main.ElaborateDrops.diff b/tests/mir-opt/box_conditional_drop_allocator.main.ElaborateDrops.diff
new file mode 100644
index 00000000000..6f6c239d7c8
--- /dev/null
+++ b/tests/mir-opt/box_conditional_drop_allocator.main.ElaborateDrops.diff
@@ -0,0 +1,186 @@
+- // MIR for `main` before ElaborateDrops
++ // MIR for `main` after ElaborateDrops
+  
+  fn main() -> () {
+      let mut _0: ();
+      let _1: std::boxed::Box<HasDrop, DropAllocator>;
+      let mut _2: HasDrop;
+      let mut _3: DropAllocator;
+      let mut _4: bool;
+      let _5: ();
+      let mut _6: HasDrop;
+      let _7: ();
+      let mut _8: std::boxed::Box<HasDrop, DropAllocator>;
++     let mut _9: bool;
++     let mut _10: &mut std::boxed::Box<HasDrop, DropAllocator>;
++     let mut _11: ();
++     let mut _12: &mut std::boxed::Box<HasDrop, DropAllocator>;
++     let mut _13: ();
++     let mut _14: *const HasDrop;
++     let mut _15: &mut std::boxed::Box<HasDrop, DropAllocator>;
++     let mut _16: ();
++     let mut _17: *const HasDrop;
+      scope 1 {
+          debug b => _1;
+      }
+  
+      bb0: {
++         _9 = const false;
+          StorageLive(_1);
+          StorageLive(_2);
+          _2 = HasDrop;
+          StorageLive(_3);
+          _3 = DropAllocator;
+          _1 = Box::<HasDrop, DropAllocator>::new_in(move _2, move _3) -> [return: bb1, unwind: bb11];
+      }
+  
+      bb1: {
++         _9 = const true;
+          StorageDead(_3);
+          StorageDead(_2);
+          StorageLive(_4);
+          _4 = const true;
+          switchInt(move _4) -> [0: bb4, otherwise: bb2];
+      }
+  
+      bb2: {
+          StorageLive(_5);
+          StorageLive(_6);
+          _6 = move (*_1);
+          _5 = std::mem::drop::<HasDrop>(move _6) -> [return: bb3, unwind: bb9];
+      }
+  
+      bb3: {
+          StorageDead(_6);
+          StorageDead(_5);
+          _0 = const ();
+          goto -> bb6;
+      }
+  
+      bb4: {
+          StorageLive(_7);
+          StorageLive(_8);
++         _9 = const false;
+          _8 = move _1;
+          _7 = std::mem::drop::<Box<HasDrop, DropAllocator>>(move _8) -> [return: bb5, unwind: bb8];
+      }
+  
+      bb5: {
+          StorageDead(_8);
+          StorageDead(_7);
+          _0 = const ();
+          goto -> bb6;
+      }
+  
+      bb6: {
+          StorageDead(_4);
+-         drop(_1) -> [return: bb7, unwind continue];
++         goto -> bb23;
+      }
+  
+      bb7: {
++         _9 = const false;
+          StorageDead(_1);
+          return;
+      }
+  
+      bb8 (cleanup): {
+-         drop(_8) -> [return: bb10, unwind terminate(cleanup)];
++         goto -> bb10;
+      }
+  
+      bb9 (cleanup): {
+-         drop(_6) -> [return: bb10, unwind terminate(cleanup)];
++         goto -> bb10;
+      }
+  
+      bb10 (cleanup): {
+-         drop(_1) -> [return: bb13, unwind terminate(cleanup)];
++         goto -> bb29;
+      }
+  
+      bb11 (cleanup): {
+-         drop(_3) -> [return: bb12, unwind terminate(cleanup)];
++         goto -> bb12;
+      }
+  
+      bb12 (cleanup): {
+-         drop(_2) -> [return: bb13, unwind terminate(cleanup)];
++         goto -> bb13;
+      }
+  
+      bb13 (cleanup): {
+          resume;
++     }
++ 
++     bb14: {
++         _9 = const false;
++         goto -> bb7;
++     }
++ 
++     bb15 (cleanup): {
++         drop((_1.1: DropAllocator)) -> [return: bb13, unwind terminate(cleanup)];
++     }
++ 
++     bb16 (cleanup): {
++         switchInt(copy _9) -> [0: bb13, otherwise: bb15];
++     }
++ 
++     bb17: {
++         drop((_1.1: DropAllocator)) -> [return: bb14, unwind: bb13];
++     }
++ 
++     bb18: {
++         switchInt(copy _9) -> [0: bb14, otherwise: bb17];
++     }
++ 
++     bb19: {
++         _10 = &mut _1;
++         _11 = <Box<HasDrop, DropAllocator> as Drop>::drop(move _10) -> [return: bb18, unwind: bb16];
++     }
++ 
++     bb20 (cleanup): {
++         _12 = &mut _1;
++         _13 = <Box<HasDrop, DropAllocator> as Drop>::drop(move _12) -> [return: bb16, unwind terminate(cleanup)];
++     }
++ 
++     bb21: {
++         goto -> bb19;
++     }
++ 
++     bb22: {
++         _14 = copy ((_1.0: std::ptr::Unique<HasDrop>).0: std::ptr::NonNull<HasDrop>) as *const HasDrop (Transmute);
++         goto -> bb21;
++     }
++ 
++     bb23: {
++         switchInt(copy _9) -> [0: bb18, otherwise: bb22];
++     }
++ 
++     bb24 (cleanup): {
++         drop((_1.1: DropAllocator)) -> [return: bb13, unwind terminate(cleanup)];
++     }
++ 
++     bb25 (cleanup): {
++         switchInt(copy _9) -> [0: bb13, otherwise: bb24];
++     }
++ 
++     bb26 (cleanup): {
++         _15 = &mut _1;
++         _16 = <Box<HasDrop, DropAllocator> as Drop>::drop(move _15) -> [return: bb25, unwind terminate(cleanup)];
++     }
++ 
++     bb27 (cleanup): {
++         goto -> bb26;
++     }
++ 
++     bb28 (cleanup): {
++         _17 = copy ((_1.0: std::ptr::Unique<HasDrop>).0: std::ptr::NonNull<HasDrop>) as *const HasDrop (Transmute);
++         goto -> bb27;
++     }
++ 
++     bb29 (cleanup): {
++         switchInt(copy _9) -> [0: bb25, otherwise: bb28];
+      }
+  }
+  
diff --git a/tests/mir-opt/box_conditional_drop_allocator.rs b/tests/mir-opt/box_conditional_drop_allocator.rs
new file mode 100644
index 00000000000..9471be14c87
--- /dev/null
+++ b/tests/mir-opt/box_conditional_drop_allocator.rs
@@ -0,0 +1,39 @@
+// skip-filecheck
+//@ test-mir-pass: ElaborateDrops
+//@ needs-unwind
+#![feature(allocator_api)]
+
+// Regression test for #131082.
+// Testing that the allocator of a Box is dropped in conditional drops
+
+use std::alloc::{AllocError, Allocator, Global, Layout};
+use std::ptr::NonNull;
+
+struct DropAllocator;
+
+unsafe impl Allocator for DropAllocator {
+    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
+        Global.allocate(layout)
+    }
+    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
+        Global.deallocate(ptr, layout);
+    }
+}
+impl Drop for DropAllocator {
+    fn drop(&mut self) {}
+}
+
+struct HasDrop;
+impl Drop for HasDrop {
+    fn drop(&mut self) {}
+}
+
+// EMIT_MIR box_conditional_drop_allocator.main.ElaborateDrops.diff
+fn main() {
+    let b = Box::new_in(HasDrop, DropAllocator);
+    if true {
+        drop(*b);
+    } else {
+        drop(b);
+    }
+}
diff --git a/tests/mir-opt/coverage/branch_match_arms.main.InstrumentCoverage.diff b/tests/mir-opt/coverage/branch_match_arms.main.InstrumentCoverage.diff
index d465b8bded2..fa88211383a 100644
--- a/tests/mir-opt/coverage/branch_match_arms.main.InstrumentCoverage.diff
+++ b/tests/mir-opt/coverage/branch_match_arms.main.InstrumentCoverage.diff
@@ -40,7 +40,7 @@
 +     coverage Code { bcb: bcb5 } => $DIR/branch_match_arms.rs:19:17: 19:18 (#0);
 +     coverage Code { bcb: bcb5 } => $DIR/branch_match_arms.rs:19:23: 19:30 (#0);
 +     coverage Code { bcb: bcb5 } => $DIR/branch_match_arms.rs:19:31: 19:32 (#0);
-+     coverage Code { bcb: bcb2 } => $DIR/branch_match_arms.rs:21:2: 21:2 (#0);
++     coverage Code { bcb: bcb2 } => $DIR/branch_match_arms.rs:21:1: 21:2 (#0);
 + 
       bb0: {
 +         Coverage::VirtualCounter(bcb0);
diff --git a/tests/mir-opt/coverage/instrument_coverage.bar.InstrumentCoverage.diff b/tests/mir-opt/coverage/instrument_coverage.bar.InstrumentCoverage.diff
index cf6d85abd80..9b6d2b22087 100644
--- a/tests/mir-opt/coverage/instrument_coverage.bar.InstrumentCoverage.diff
+++ b/tests/mir-opt/coverage/instrument_coverage.bar.InstrumentCoverage.diff
@@ -6,7 +6,7 @@
   
 +     coverage Code { bcb: bcb0 } => $DIR/instrument_coverage.rs:27:1: 27:17 (#0);
 +     coverage Code { bcb: bcb0 } => $DIR/instrument_coverage.rs:28:5: 28:9 (#0);
-+     coverage Code { bcb: bcb0 } => $DIR/instrument_coverage.rs:29:2: 29:2 (#0);
++     coverage Code { bcb: bcb0 } => $DIR/instrument_coverage.rs:29:1: 29:2 (#0);
 + 
       bb0: {
 +         Coverage::VirtualCounter(bcb0);
diff --git a/tests/mir-opt/coverage/instrument_coverage.main.InstrumentCoverage.diff b/tests/mir-opt/coverage/instrument_coverage.main.InstrumentCoverage.diff
index 980c5e202ff..b2bb2375aee 100644
--- a/tests/mir-opt/coverage/instrument_coverage.main.InstrumentCoverage.diff
+++ b/tests/mir-opt/coverage/instrument_coverage.main.InstrumentCoverage.diff
@@ -10,8 +10,8 @@
 +     coverage Code { bcb: bcb0 } => $DIR/instrument_coverage.rs:13:1: 13:10 (#0);
 +     coverage Code { bcb: bcb1 } => $DIR/instrument_coverage.rs:15:12: 15:15 (#0);
 +     coverage Code { bcb: bcb2 } => $DIR/instrument_coverage.rs:16:13: 16:18 (#0);
-+     coverage Code { bcb: bcb3 } => $DIR/instrument_coverage.rs:17:10: 17:10 (#0);
-+     coverage Code { bcb: bcb2 } => $DIR/instrument_coverage.rs:19:2: 19:2 (#0);
++     coverage Code { bcb: bcb3 } => $DIR/instrument_coverage.rs:17:9: 17:10 (#0);
++     coverage Code { bcb: bcb2 } => $DIR/instrument_coverage.rs:19:1: 19:2 (#0);
 + 
       bb0: {
 +         Coverage::VirtualCounter(bcb0);
diff --git a/tests/mir-opt/coverage/instrument_coverage_cleanup.main.CleanupPostBorrowck.diff b/tests/mir-opt/coverage/instrument_coverage_cleanup.main.CleanupPostBorrowck.diff
index b707cd41788..2eb78c08ee8 100644
--- a/tests/mir-opt/coverage/instrument_coverage_cleanup.main.CleanupPostBorrowck.diff
+++ b/tests/mir-opt/coverage/instrument_coverage_cleanup.main.CleanupPostBorrowck.diff
@@ -10,8 +10,8 @@
       coverage Code { bcb: bcb0 } => $DIR/instrument_coverage_cleanup.rs:13:1: 13:10 (#0);
       coverage Code { bcb: bcb0 } => $DIR/instrument_coverage_cleanup.rs:14:8: 14:36 (#0);
       coverage Code { bcb: bcb3 } => $DIR/instrument_coverage_cleanup.rs:14:37: 14:39 (#0);
-      coverage Code { bcb: bcb1 } => $DIR/instrument_coverage_cleanup.rs:14:39: 14:39 (#0);
-      coverage Code { bcb: bcb2 } => $DIR/instrument_coverage_cleanup.rs:15:2: 15:2 (#0);
+      coverage Code { bcb: bcb1 } => $DIR/instrument_coverage_cleanup.rs:14:38: 14:39 (#0);
+      coverage Code { bcb: bcb2 } => $DIR/instrument_coverage_cleanup.rs:15:1: 15:2 (#0);
       coverage Branch { true_bcb: bcb3, false_bcb: bcb1 } => $DIR/instrument_coverage_cleanup.rs:14:8: 14:36 (#0);
   
       bb0: {
diff --git a/tests/mir-opt/coverage/instrument_coverage_cleanup.main.InstrumentCoverage.diff b/tests/mir-opt/coverage/instrument_coverage_cleanup.main.InstrumentCoverage.diff
index 239b845c231..0c1bc24b6dc 100644
--- a/tests/mir-opt/coverage/instrument_coverage_cleanup.main.InstrumentCoverage.diff
+++ b/tests/mir-opt/coverage/instrument_coverage_cleanup.main.InstrumentCoverage.diff
@@ -10,8 +10,8 @@
 +     coverage Code { bcb: bcb0 } => $DIR/instrument_coverage_cleanup.rs:13:1: 13:10 (#0);
 +     coverage Code { bcb: bcb0 } => $DIR/instrument_coverage_cleanup.rs:14:8: 14:36 (#0);
 +     coverage Code { bcb: bcb3 } => $DIR/instrument_coverage_cleanup.rs:14:37: 14:39 (#0);
-+     coverage Code { bcb: bcb1 } => $DIR/instrument_coverage_cleanup.rs:14:39: 14:39 (#0);
-+     coverage Code { bcb: bcb2 } => $DIR/instrument_coverage_cleanup.rs:15:2: 15:2 (#0);
++     coverage Code { bcb: bcb1 } => $DIR/instrument_coverage_cleanup.rs:14:38: 14:39 (#0);
++     coverage Code { bcb: bcb2 } => $DIR/instrument_coverage_cleanup.rs:15:1: 15:2 (#0);
 +     coverage Branch { true_bcb: bcb3, false_bcb: bcb1 } => $DIR/instrument_coverage_cleanup.rs:14:8: 14:36 (#0);
 + 
       bb0: {
diff --git a/tests/ui/async-await/async-drop/async-drop-box-allocator.rs b/tests/ui/async-await/async-drop/async-drop-box-allocator.rs
new file mode 100644
index 00000000000..86ebf8a0ffd
--- /dev/null
+++ b/tests/ui/async-await/async-drop/async-drop-box-allocator.rs
@@ -0,0 +1,134 @@
+//@ run-pass
+//@ check-run-results
+// struct `Foo` has both sync and async drop.
+// It's used as the allocator of a `Box` which is conditionally moved out of.
+// Sync version is called in sync context, async version is called in async function.
+
+#![feature(async_drop, allocator_api)]
+#![allow(incomplete_features)]
+
+use std::mem::ManuallyDrop;
+
+//@ edition: 2021
+
+#[inline(never)]
+fn myprintln(msg: &str, my_resource_handle: usize) {
+    println!("{} : {}", msg, my_resource_handle);
+}
+
+use std::{
+    future::{Future, async_drop_in_place, AsyncDrop},
+    pin::{pin, Pin},
+    sync::{mpsc, Arc},
+    task::{Context, Poll, Wake, Waker},
+    alloc::{AllocError, Allocator, Global, Layout},
+    ptr::NonNull,
+};
+
+struct Foo {
+    my_resource_handle: usize,
+}
+
+impl Foo {
+    fn new(my_resource_handle: usize) -> Self {
+        let out = Foo {
+            my_resource_handle,
+        };
+        myprintln("Foo::new()", my_resource_handle);
+        out
+    }
+}
+
+impl Drop for Foo {
+    fn drop(&mut self) {
+        myprintln("Foo::drop()", self.my_resource_handle);
+    }
+}
+
+impl AsyncDrop for Foo {
+    async fn drop(self: Pin<&mut Self>) {
+        myprintln("Foo::async drop()", self.my_resource_handle);
+    }
+}
+
+unsafe impl Allocator for Foo {
+    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
+        Global.allocate(layout)
+    }
+    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
+        Global.deallocate(ptr, layout);
+    }
+}
+
+struct HasDrop;
+impl Drop for HasDrop {
+    fn drop(&mut self) {}
+}
+
+fn main() {
+    {
+        let b = Box::new_in(HasDrop, Foo::new(7));
+
+        if true {
+            let _x = *b;
+        } else {
+            let _y = b;
+        }
+    }
+    println!("Middle");
+    block_on(bar(10));
+    println!("Done")
+}
+
+async fn bar(ident_base: usize) {
+    let b = Box::new_in(HasDrop, Foo::new(ident_base));
+
+    if true {
+        let _x = *b;
+    } else {
+        let _y = b;
+    }
+}
+
+fn block_on<F>(fut_unpin: F) -> F::Output
+where
+    F: Future,
+{
+    let mut fut_pin = pin!(ManuallyDrop::new(fut_unpin));
+    let mut fut: Pin<&mut F> = unsafe {
+        Pin::map_unchecked_mut(fut_pin.as_mut(), |x| &mut **x)
+    };
+    let (waker, rx) = simple_waker();
+    let mut context = Context::from_waker(&waker);
+    let rv = loop {
+        match fut.as_mut().poll(&mut context) {
+            Poll::Ready(out) => break out,
+            // expect wake in polls
+            Poll::Pending => rx.try_recv().unwrap(),
+        }
+    };
+    let drop_fut_unpin = unsafe { async_drop_in_place(fut.get_unchecked_mut()) };
+    let mut drop_fut: Pin<&mut _> = pin!(drop_fut_unpin);
+    loop {
+        match drop_fut.as_mut().poll(&mut context) {
+            Poll::Ready(()) => break,
+            Poll::Pending => rx.try_recv().unwrap(),
+        }
+    }
+    rv
+}
+
+fn simple_waker() -> (Waker, mpsc::Receiver<()>) {
+    struct SimpleWaker {
+        tx: std::sync::mpsc::Sender<()>,
+    }
+
+    impl Wake for SimpleWaker {
+        fn wake(self: Arc<Self>) {
+            self.tx.send(()).unwrap();
+        }
+    }
+
+    let (tx, rx) = mpsc::channel();
+    (Waker::from(Arc::new(SimpleWaker { tx })), rx)
+}
diff --git a/tests/ui/async-await/async-drop/async-drop-box-allocator.run.stdout b/tests/ui/async-await/async-drop/async-drop-box-allocator.run.stdout
new file mode 100644
index 00000000000..cb7d0b0fea5
--- /dev/null
+++ b/tests/ui/async-await/async-drop/async-drop-box-allocator.run.stdout
@@ -0,0 +1,6 @@
+Foo::new() : 7
+Foo::drop() : 7
+Middle
+Foo::new() : 10
+Foo::async drop() : 10
+Done
diff --git a/tests/ui/async-await/async-drop/async-drop-box.rs b/tests/ui/async-await/async-drop/async-drop-box.rs
new file mode 100644
index 00000000000..0a6ed412863
--- /dev/null
+++ b/tests/ui/async-await/async-drop/async-drop-box.rs
@@ -0,0 +1,109 @@
+//@ run-pass
+//@ check-run-results
+// struct `Foo` has both sync and async drop.
+// `Foo` is always inside `Box`
+// Sync version is called in sync context, async version is called in async function.
+
+//@ known-bug: #143658
+// async version is never actually called
+
+#![feature(async_drop)]
+#![allow(incomplete_features)]
+
+use std::mem::ManuallyDrop;
+
+//@ edition: 2021
+
+#[inline(never)]
+fn myprintln(msg: &str, my_resource_handle: usize) {
+    println!("{} : {}", msg, my_resource_handle);
+}
+
+use std::{
+    future::{Future, async_drop_in_place, AsyncDrop},
+    pin::{pin, Pin},
+    sync::{mpsc, Arc},
+    task::{Context, Poll, Wake, Waker},
+};
+
+struct Foo {
+    my_resource_handle: usize,
+}
+
+impl Foo {
+    fn new(my_resource_handle: usize) -> Self {
+        let out = Foo {
+            my_resource_handle,
+        };
+        myprintln("Foo::new()", my_resource_handle);
+        out
+    }
+}
+
+impl Drop for Foo {
+    fn drop(&mut self) {
+        myprintln("Foo::drop()", self.my_resource_handle);
+    }
+}
+
+impl AsyncDrop for Foo {
+    async fn drop(self: Pin<&mut Self>) {
+        myprintln("Foo::async drop()", self.my_resource_handle);
+    }
+}
+
+fn main() {
+    {
+        let _ = Box::new(Foo::new(7));
+    }
+    println!("Middle");
+    block_on(bar(10));
+    println!("Done")
+}
+
+async fn bar(ident_base: usize) {
+    let _first = Box::new(Foo::new(ident_base));
+}
+
+fn block_on<F>(fut_unpin: F) -> F::Output
+where
+    F: Future,
+{
+    let mut fut_pin = pin!(ManuallyDrop::new(fut_unpin));
+    let mut fut: Pin<&mut F> = unsafe {
+        Pin::map_unchecked_mut(fut_pin.as_mut(), |x| &mut **x)
+    };
+    let (waker, rx) = simple_waker();
+    let mut context = Context::from_waker(&waker);
+    let rv = loop {
+        match fut.as_mut().poll(&mut context) {
+            Poll::Ready(out) => break out,
+            // expect wake in polls
+            Poll::Pending => rx.try_recv().unwrap(),
+        }
+    };
+    let drop_fut_unpin = unsafe { async_drop_in_place(fut.get_unchecked_mut()) };
+    let mut drop_fut: Pin<&mut _> = pin!(drop_fut_unpin);
+    loop {
+        match drop_fut.as_mut().poll(&mut context) {
+            Poll::Ready(()) => break,
+            Poll::Pending => rx.try_recv().unwrap(),
+        }
+    }
+    rv
+}
+
+fn simple_waker() -> (Waker, mpsc::Receiver<()>) {
+    struct SimpleWaker {
+        tx: std::sync::mpsc::Sender<()>,
+    }
+
+    impl Wake for SimpleWaker {
+        fn wake(self: Arc<Self>) {
+            self.tx.send(()).unwrap();
+        }
+    }
+
+    let (tx, rx) = mpsc::channel();
+    (Waker::from(Arc::new(SimpleWaker { tx })), rx)
+}
diff --git a/tests/ui/async-await/async-drop/async-drop-box.run.stdout b/tests/ui/async-await/async-drop/async-drop-box.run.stdout
new file mode 100644
index 00000000000..a2dab2ba992
--- /dev/null
+++ b/tests/ui/async-await/async-drop/async-drop-box.run.stdout
@@ -0,0 +1,6 @@
+Foo::new() : 7
+Foo::drop() : 7
+Middle
+Foo::new() : 10
+Foo::drop() : 10
+Done
diff --git a/tests/ui/drop/box-conditional-drop-allocator.rs b/tests/ui/drop/box-conditional-drop-allocator.rs
new file mode 100644
index 00000000000..8f78da16473
--- /dev/null
+++ b/tests/ui/drop/box-conditional-drop-allocator.rs
@@ -0,0 +1,43 @@
+//@ run-pass
+#![feature(allocator_api)]
+
+// Regression test for #131082.
+// Testing that the allocator of a Box is dropped in conditional drops
+
+use std::alloc::{AllocError, Allocator, Global, Layout};
+use std::cell::Cell;
+use std::ptr::NonNull;
+
+struct DropCheckingAllocator<'a>(&'a Cell<bool>);
+
+unsafe impl Allocator for DropCheckingAllocator<'_> {
+    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
+        Global.allocate(layout)
+    }
+    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
+        Global.deallocate(ptr, layout);
+    }
+}
+impl Drop for DropCheckingAllocator<'_> {
+    fn drop(&mut self) {
+        self.0.set(true);
+    }
+}
+
+struct HasDrop;
+impl Drop for HasDrop {
+    fn drop(&mut self) {}
+}
+
+fn main() {
+    let dropped = Cell::new(false);
+    {
+        let b = Box::new_in(HasDrop, DropCheckingAllocator(&dropped));
+        if true {
+            drop(*b);
+        } else {
+            drop(b);
+        }
+    }
+    assert!(dropped.get());
+}
diff --git a/tests/ui/explicit-tail-calls/recursion-etc.rs b/tests/ui/explicit-tail-calls/recursion-etc.rs
new file mode 100644
index 00000000000..8c89ceb7869
--- /dev/null
+++ b/tests/ui/explicit-tail-calls/recursion-etc.rs
@@ -0,0 +1,17 @@
+//@ run-pass
+#![expect(incomplete_features)]
+#![feature(explicit_tail_calls)]
+
+use std::hint::black_box;
+
+pub fn count(curr: u64, top: u64) -> u64 {
+   if black_box(curr) >= top {
+        curr
+   } else {
+        become count(curr + 1, top)
+   }
+}
+
+fn main() {
+    println!("{}", count(0, black_box(1000000)));
+}