about summary refs log tree commit diff
path: root/compiler/rustc_mir_transform/src
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2025-04-28 14:14:26 +0000
committerbors <bors@rust-lang.org>2025-04-28 14:14:26 +0000
commit7d65abfe80f9eee93296d1ce08f845c9bf7039f8 (patch)
treee232a65f88a8539991406f4d48d7de0f4dcd863c /compiler/rustc_mir_transform/src
parenta932eb36f8adf6c8cdfc450f063943da3112d621 (diff)
parentc366756a8537ef998d32c1ef57098d5aae7ca92f (diff)
downloadrust-7d65abfe80f9eee93296d1ce08f845c9bf7039f8.tar.gz
rust-7d65abfe80f9eee93296d1ce08f845c9bf7039f8.zip
Auto merge of #123948 - azhogin:azhogin/async-drop, r=oli-obk
Async drop codegen

Async drop implementation using templated coroutine for async drop glue generation.

Scopes changes to generate `async_drop_in_place()` awaits, when async droppable objects are out-of-scope in async context.

Implementation details:
https://github.com/azhogin/posts/blob/main/async-drop-impl.md

New fields in Drop terminator (drop & async_fut). Processing in codegen/miri must validate that those fields are empty (in full version async Drop terminator will be expanded at StateTransform pass or reverted to sync version). Changes in terminator visiting to consider possible new successor (drop field).

ResumedAfterDrop messages for panic when coroutine is resumed after it is started to be async drop'ed.

Lang item for generated coroutine for async function async_drop_in_place. `async fn async_drop_in_place<T>()::{{closure0}}`.

Scopes processing for generate async drop preparations. Async drop is a hidden Yield, so potentially async drops require the same dropline preparation as for Yield terminators.

Processing in StateTransform: async drops are expanded into yield-point. Generation of async drop of coroutine itself added.

Shims for AsyncDropGlueCtorShim, AsyncDropGlue and FutureDropPoll.

```rust
#[lang = "async_drop"]
pub trait AsyncDrop {
    #[allow(async_fn_in_trait)]
    async fn drop(self: Pin<&mut Self>);
}

impl Drop for Foo {
    fn drop(&mut self) {
        println!("Foo::drop({})", self.my_resource_handle);
    }
}

impl AsyncDrop for Foo {
    async fn drop(self: Pin<&mut Self>) {
        println!("Foo::async drop({})", self.my_resource_handle);
    }
}
```

First async drop glue implementation re-worked to use the same drop elaboration code as for sync drop.
`async_drop_in_place` changed to be `async fn`. So both `async_drop_in_place` ctor and produced coroutine have their lang items (`AsyncDropInPlace`/`AsyncDropInPlacePoll`) and shim instances (`AsyncDropGlueCtorShim`/`AsyncDropGlue`).
```
pub async unsafe fn async_drop_in_place<T: ?Sized>(_to_drop: *mut T) {
}
```
AsyncDropGlue shim generation uses `elaborate_drops::elaborate_drop` to produce drop ladder (in the similar way as for sync drop glue) and then `coroutine::StateTransform` to convert function into coroutine poll.

AsyncDropGlue coroutine's layout can't be calculated for generic T, it requires known final dropee type to be generated (in StateTransform). So, `templated coroutine` was introduced here (`templated_coroutine_layout(...)` etc).

Such approach overrides the first implementation using mixing language-level futures in https://github.com/rust-lang/rust/pull/121801.
Diffstat (limited to 'compiler/rustc_mir_transform/src')
-rw-r--r--compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs6
-rw-r--r--compiler/rustc_mir_transform/src/coroutine.rs315
-rw-r--r--compiler/rustc_mir_transform/src/coroutine/drop.rs725
-rw-r--r--compiler/rustc_mir_transform/src/elaborate_drop.rs480
-rw-r--r--compiler/rustc_mir_transform/src/elaborate_drops.rs23
-rw-r--r--compiler/rustc_mir_transform/src/inline.rs31
-rw-r--r--compiler/rustc_mir_transform/src/inline/cycle.rs5
-rw-r--r--compiler/rustc_mir_transform/src/known_panics_lint.rs9
-rw-r--r--compiler/rustc_mir_transform/src/lib.rs2
-rw-r--r--compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs2
-rw-r--r--compiler/rustc_mir_transform/src/patch.rs8
-rw-r--r--compiler/rustc_mir_transform/src/remove_zsts.rs5
-rw-r--r--compiler/rustc_mir_transform/src/shim.rs175
-rw-r--r--compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs991
-rw-r--r--compiler/rustc_mir_transform/src/validate.rs11
15 files changed, 1898 insertions, 890 deletions
diff --git a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs
index b33326cb873..a414d120e68 100644
--- a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs
+++ b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs
@@ -83,7 +83,9 @@ fn add_move_for_packed_drop<'tcx>(
     is_cleanup: bool,
 ) {
     debug!("add_move_for_packed_drop({:?} @ {:?})", terminator, loc);
-    let TerminatorKind::Drop { ref place, target, unwind, replace } = terminator.kind else {
+    let TerminatorKind::Drop { ref place, target, unwind, replace, drop, async_fut } =
+        terminator.kind
+    else {
         unreachable!();
     };
 
@@ -106,6 +108,8 @@ fn add_move_for_packed_drop<'tcx>(
             target: storage_dead_block,
             unwind,
             replace,
+            drop,
+            async_fut,
         },
     );
 }
diff --git a/compiler/rustc_mir_transform/src/coroutine.rs b/compiler/rustc_mir_transform/src/coroutine.rs
index 0eed46c72f9..263f0c40f5a 100644
--- a/compiler/rustc_mir_transform/src/coroutine.rs
+++ b/compiler/rustc_mir_transform/src/coroutine.rs
@@ -51,9 +51,15 @@
 //! Otherwise it drops all the values in scope at the last suspension point.
 
 mod by_move_body;
+mod drop;
 use std::{iter, ops};
 
 pub(super) use by_move_body::coroutine_by_move_body_def_id;
+use drop::{
+    cleanup_async_drops, create_coroutine_drop_shim, create_coroutine_drop_shim_async,
+    create_coroutine_drop_shim_proxy_async, elaborate_coroutine_drops, expand_async_drops,
+    has_expandable_async_drops, insert_clean_drop,
+};
 use rustc_abi::{FieldIdx, VariantIdx};
 use rustc_data_structures::fx::FxHashSet;
 use rustc_errors::pluralize;
@@ -64,6 +70,7 @@ use rustc_index::bit_set::{BitMatrix, DenseBitSet, GrowableBitSet};
 use rustc_index::{Idx, IndexVec};
 use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
 use rustc_middle::mir::*;
+use rustc_middle::ty::util::Discr;
 use rustc_middle::ty::{
     self, CoroutineArgs, CoroutineArgsExt, GenericArgsRef, InstanceKind, Ty, TyCtxt, TypingMode,
 };
@@ -74,7 +81,9 @@ use rustc_mir_dataflow::impls::{
 };
 use rustc_mir_dataflow::{Analysis, Results, ResultsVisitor};
 use rustc_span::def_id::{DefId, LocalDefId};
-use rustc_span::{Span, sym};
+use rustc_span::source_map::dummy_spanned;
+use rustc_span::symbol::sym;
+use rustc_span::{DUMMY_SP, Span};
 use rustc_target::spec::PanicStrategy;
 use rustc_trait_selection::error_reporting::InferCtxtErrorExt;
 use rustc_trait_selection::infer::TyCtxtInferExt as _;
@@ -159,6 +168,7 @@ fn replace_base<'tcx>(place: &mut Place<'tcx>, new_base: Place<'tcx>, tcx: TyCtx
 }
 
 const SELF_ARG: Local = Local::from_u32(1);
+const CTX_ARG: Local = Local::from_u32(2);
 
 /// A `yield` point in the coroutine.
 struct SuspensionPoint<'tcx> {
@@ -539,11 +549,11 @@ fn replace_local<'tcx>(
 /// The async lowering step and the type / lifetime inference / checking are
 /// still using the `ResumeTy` indirection for the time being, and that indirection
 /// is removed here. After this transform, the coroutine body only knows about `&mut Context<'_>`.
-fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
+fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> Ty<'tcx> {
     let context_mut_ref = Ty::new_task_context(tcx);
 
     // replace the type of the `resume` argument
-    replace_resume_ty_local(tcx, body, Local::new(2), context_mut_ref);
+    replace_resume_ty_local(tcx, body, CTX_ARG, context_mut_ref);
 
     let get_context_def_id = tcx.require_lang_item(LangItem::GetContext, None);
 
@@ -569,6 +579,7 @@ fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
             _ => {}
         }
     }
+    context_mut_ref
 }
 
 fn eliminate_get_context_call<'tcx>(bb_data: &mut BasicBlockData<'tcx>) -> Local {
@@ -1036,9 +1047,8 @@ fn insert_switch<'tcx>(
     body: &mut Body<'tcx>,
     cases: Vec<(usize, BasicBlock)>,
     transform: &TransformVisitor<'tcx>,
-    default: TerminatorKind<'tcx>,
+    default_block: BasicBlock,
 ) {
-    let default_block = insert_term_block(body, default);
     let (assign, discr) = transform.get_discr(body);
     let switch_targets =
         SwitchTargets::new(cases.iter().map(|(i, bb)| ((*i) as u128, *bb)), default_block);
@@ -1061,129 +1071,39 @@ fn insert_switch<'tcx>(
     }
 }
 
-fn elaborate_coroutine_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
-    use crate::elaborate_drop::{Unwind, elaborate_drop};
-    use crate::patch::MirPatch;
-    use crate::shim::DropShimElaborator;
-
-    // Note that `elaborate_drops` only drops the upvars of a coroutine, and
-    // this is ok because `open_drop` can only be reached within that own
-    // coroutine's resume function.
-    let typing_env = body.typing_env(tcx);
-
-    let mut elaborator = DropShimElaborator { body, patch: MirPatch::new(body), tcx, typing_env };
-
-    for (block, block_data) in body.basic_blocks.iter_enumerated() {
-        let (target, unwind, source_info) = match block_data.terminator() {
-            Terminator {
-                source_info,
-                kind: TerminatorKind::Drop { place, target, unwind, replace: _ },
-            } => {
-                if let Some(local) = place.as_local()
-                    && local == SELF_ARG
-                {
-                    (target, unwind, source_info)
-                } else {
-                    continue;
-                }
-            }
-            _ => continue,
-        };
-        let unwind = if block_data.is_cleanup {
-            Unwind::InCleanup
-        } else {
-            Unwind::To(match *unwind {
-                UnwindAction::Cleanup(tgt) => tgt,
-                UnwindAction::Continue => elaborator.patch.resume_block(),
-                UnwindAction::Unreachable => elaborator.patch.unreachable_cleanup_block(),
-                UnwindAction::Terminate(reason) => elaborator.patch.terminate_block(reason),
-            })
-        };
-        elaborate_drop(
-            &mut elaborator,
-            *source_info,
-            Place::from(SELF_ARG),
-            (),
-            *target,
-            unwind,
-            block,
-        );
-    }
-    elaborator.patch.apply(body);
-}
-
-fn create_coroutine_drop_shim<'tcx>(
-    tcx: TyCtxt<'tcx>,
-    transform: &TransformVisitor<'tcx>,
-    coroutine_ty: Ty<'tcx>,
-    body: &Body<'tcx>,
-    drop_clean: BasicBlock,
-) -> Body<'tcx> {
-    let mut body = body.clone();
-    // Take the coroutine info out of the body, since the drop shim is
-    // not a coroutine body itself; it just has its drop built out of it.
-    let _ = body.coroutine.take();
-    // Make sure the resume argument is not included here, since we're
-    // building a body for `drop_in_place`.
-    body.arg_count = 1;
-
+fn insert_term_block<'tcx>(body: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> BasicBlock {
     let source_info = SourceInfo::outermost(body.span);
+    body.basic_blocks_mut().push(BasicBlockData {
+        statements: Vec::new(),
+        terminator: Some(Terminator { source_info, kind }),
+        is_cleanup: false,
+    })
+}
 
-    let mut cases = create_cases(&mut body, transform, Operation::Drop);
-
-    cases.insert(0, (CoroutineArgs::UNRESUMED, drop_clean));
-
-    // The returned state and the poisoned state fall through to the default
-    // case which is just to return
-
-    insert_switch(&mut body, cases, transform, TerminatorKind::Return);
-
-    for block in body.basic_blocks_mut() {
-        let kind = &mut block.terminator_mut().kind;
-        if let TerminatorKind::CoroutineDrop = *kind {
-            *kind = TerminatorKind::Return;
-        }
+fn return_poll_ready_assign<'tcx>(tcx: TyCtxt<'tcx>, source_info: SourceInfo) -> Statement<'tcx> {
+    // Poll::Ready(())
+    let poll_def_id = tcx.require_lang_item(LangItem::Poll, None);
+    let args = tcx.mk_args(&[tcx.types.unit.into()]);
+    let val = Operand::Constant(Box::new(ConstOperand {
+        span: source_info.span,
+        user_ty: None,
+        const_: Const::zero_sized(tcx.types.unit),
+    }));
+    let ready_val = Rvalue::Aggregate(
+        Box::new(AggregateKind::Adt(poll_def_id, VariantIdx::from_usize(0), args, None, None)),
+        IndexVec::from_raw(vec![val]),
+    );
+    Statement {
+        kind: StatementKind::Assign(Box::new((Place::return_place(), ready_val))),
+        source_info,
     }
-
-    // Replace the return variable
-    body.local_decls[RETURN_PLACE] = LocalDecl::with_source_info(tcx.types.unit, source_info);
-
-    make_coroutine_state_argument_indirect(tcx, &mut body);
-
-    // Change the coroutine argument from &mut to *mut
-    body.local_decls[SELF_ARG] =
-        LocalDecl::with_source_info(Ty::new_mut_ptr(tcx, coroutine_ty), source_info);
-
-    // Make sure we remove dead blocks to remove
-    // unrelated code from the resume part of the function
-    simplify::remove_dead_blocks(&mut body);
-
-    // Update the body's def to become the drop glue.
-    let coroutine_instance = body.source.instance;
-    let drop_in_place = tcx.require_lang_item(LangItem::DropInPlace, None);
-    let drop_instance = InstanceKind::DropGlue(drop_in_place, Some(coroutine_ty));
-
-    // Temporary change MirSource to coroutine's instance so that dump_mir produces more sensible
-    // filename.
-    body.source.instance = coroutine_instance;
-    dump_mir(tcx, false, "coroutine_drop", &0, &body, |_, _| Ok(()));
-    body.source.instance = drop_instance;
-
-    // Creating a coroutine drop shim happens on `Analysis(PostCleanup) -> Runtime(Initial)`
-    // but the pass manager doesn't update the phase of the coroutine drop shim. Update the
-    // phase of the drop shim so that later on when we run the pass manager on the shim, in
-    // the `mir_shims` query, we don't ICE on the intra-pass validation before we've updated
-    // the phase of the body from analysis.
-    body.phase = MirPhase::Runtime(RuntimePhase::Initial);
-
-    body
 }
 
-fn insert_term_block<'tcx>(body: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> BasicBlock {
+fn insert_poll_ready_block<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> BasicBlock {
     let source_info = SourceInfo::outermost(body.span);
     body.basic_blocks_mut().push(BasicBlockData {
-        statements: Vec::new(),
-        terminator: Some(Terminator { source_info, kind }),
+        statements: [return_poll_ready_assign(tcx, source_info)].to_vec(),
+        terminator: Some(Terminator { source_info, kind: TerminatorKind::Return }),
         is_cleanup: false,
     })
 }
@@ -1263,45 +1183,50 @@ fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool {
     false
 }
 
+// Poison the coroutine when it unwinds
+fn generate_poison_block_and_redirect_unwinds_there<'tcx>(
+    transform: &TransformVisitor<'tcx>,
+    body: &mut Body<'tcx>,
+) {
+    let source_info = SourceInfo::outermost(body.span);
+    let poison_block = body.basic_blocks_mut().push(BasicBlockData {
+        statements: vec![
+            transform.set_discr(VariantIdx::new(CoroutineArgs::POISONED), source_info),
+        ],
+        terminator: Some(Terminator { source_info, kind: TerminatorKind::UnwindResume }),
+        is_cleanup: true,
+    });
+
+    for (idx, block) in body.basic_blocks_mut().iter_enumerated_mut() {
+        let source_info = block.terminator().source_info;
+
+        if let TerminatorKind::UnwindResume = block.terminator().kind {
+            // An existing `Resume` terminator is redirected to jump to our dedicated
+            // "poisoning block" above.
+            if idx != poison_block {
+                *block.terminator_mut() =
+                    Terminator { source_info, kind: TerminatorKind::Goto { target: poison_block } };
+            }
+        } else if !block.is_cleanup
+            // Any terminators that *can* unwind but don't have an unwind target set are also
+            // pointed at our poisoning block (unless they're part of the cleanup path).
+            && let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut()
+        {
+            *unwind = UnwindAction::Cleanup(poison_block);
+        }
+    }
+}
+
 fn create_coroutine_resume_function<'tcx>(
     tcx: TyCtxt<'tcx>,
     transform: TransformVisitor<'tcx>,
     body: &mut Body<'tcx>,
     can_return: bool,
+    can_unwind: bool,
 ) {
-    let can_unwind = can_unwind(tcx, body);
-
     // Poison the coroutine when it unwinds
     if can_unwind {
-        let source_info = SourceInfo::outermost(body.span);
-        let poison_block = body.basic_blocks_mut().push(BasicBlockData {
-            statements: vec![
-                transform.set_discr(VariantIdx::new(CoroutineArgs::POISONED), source_info),
-            ],
-            terminator: Some(Terminator { source_info, kind: TerminatorKind::UnwindResume }),
-            is_cleanup: true,
-        });
-
-        for (idx, block) in body.basic_blocks_mut().iter_enumerated_mut() {
-            let source_info = block.terminator().source_info;
-
-            if let TerminatorKind::UnwindResume = block.terminator().kind {
-                // An existing `Resume` terminator is redirected to jump to our dedicated
-                // "poisoning block" above.
-                if idx != poison_block {
-                    *block.terminator_mut() = Terminator {
-                        source_info,
-                        kind: TerminatorKind::Goto { target: poison_block },
-                    };
-                }
-            } else if !block.is_cleanup
-                // Any terminators that *can* unwind but don't have an unwind target set are also
-                // pointed at our poisoning block (unless they're part of the cleanup path).
-                && let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut()
-            {
-                *unwind = UnwindAction::Cleanup(poison_block);
-            }
-        }
+        generate_poison_block_and_redirect_unwinds_there(&transform, body);
     }
 
     let mut cases = create_cases(body, &transform, Operation::Resume);
@@ -1326,7 +1251,13 @@ fn create_coroutine_resume_function<'tcx>(
         let block = match transform.coroutine_kind {
             CoroutineKind::Desugared(CoroutineDesugaring::Async, _)
             | CoroutineKind::Coroutine(_) => {
-                insert_panic_block(tcx, body, ResumedAfterReturn(transform.coroutine_kind))
+                // For `async_drop_in_place<T>::{closure}` we just keep return Poll::Ready,
+                // because async drop of such coroutine keeps polling original coroutine
+                if tcx.is_async_drop_in_place_coroutine(body.source.def_id()) {
+                    insert_poll_ready_block(tcx, body)
+                } else {
+                    insert_panic_block(tcx, body, ResumedAfterReturn(transform.coroutine_kind))
+                }
             }
             CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)
             | CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
@@ -1336,7 +1267,8 @@ fn create_coroutine_resume_function<'tcx>(
         cases.insert(1, (CoroutineArgs::RETURNED, block));
     }
 
-    insert_switch(body, cases, &transform, TerminatorKind::Unreachable);
+    let default_block = insert_term_block(body, TerminatorKind::Unreachable);
+    insert_switch(body, cases, &transform, default_block);
 
     make_coroutine_state_argument_indirect(tcx, body);
 
@@ -1360,25 +1292,6 @@ fn create_coroutine_resume_function<'tcx>(
     dump_mir(tcx, false, "coroutine_resume", &0, body, |_, _| Ok(()));
 }
 
-fn insert_clean_drop(body: &mut Body<'_>) -> BasicBlock {
-    let return_block = insert_term_block(body, TerminatorKind::Return);
-
-    let term = TerminatorKind::Drop {
-        place: Place::from(SELF_ARG),
-        target: return_block,
-        unwind: UnwindAction::Continue,
-        replace: false,
-    };
-    let source_info = SourceInfo::outermost(body.span);
-
-    // Create a block to destroy an unresumed coroutines. This can only destroy upvars.
-    body.basic_blocks_mut().push(BasicBlockData {
-        statements: Vec::new(),
-        terminator: Some(Terminator { source_info, kind: term }),
-        is_cleanup: false,
-    })
-}
-
 /// An operation that can be performed on a coroutine.
 #[derive(PartialEq, Copy, Clone)]
 enum Operation {
@@ -1423,7 +1336,7 @@ fn create_cases<'tcx>(
 
                 if operation == Operation::Resume {
                     // Move the resume argument to the destination place of the `Yield` terminator
-                    let resume_arg = Local::new(2); // 0 = return, 1 = self
+                    let resume_arg = CTX_ARG;
                     statements.push(Statement {
                         source_info,
                         kind: StatementKind::Assign(Box::new((
@@ -1530,7 +1443,9 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform {
         };
         let old_ret_ty = body.return_ty();
 
-        assert!(body.coroutine_drop().is_none());
+        assert!(body.coroutine_drop().is_none() && body.coroutine_drop_async().is_none());
+
+        dump_mir(tcx, false, "coroutine_before", &0, body, |_, _| Ok(()));
 
         // The first argument is the coroutine type passed by value
         let coroutine_ty = body.local_decls.raw[1].ty;
@@ -1574,19 +1489,32 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform {
         // RETURN_PLACE then is a fresh unused local with type ret_ty.
         let old_ret_local = replace_local(RETURN_PLACE, new_ret_ty, body, tcx);
 
+        // We need to insert clean drop for unresumed state and perform drop elaboration
+        // (finally in open_drop_for_tuple) before async drop expansion.
+        // Async drops, produced by this drop elaboration, will be expanded,
+        // and corresponding futures kept in layout.
+        let has_async_drops = matches!(
+            coroutine_kind,
+            CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _)
+        ) && has_expandable_async_drops(tcx, body, coroutine_ty);
+
         // Replace all occurrences of `ResumeTy` with `&mut Context<'_>` within async bodies.
         if matches!(
             coroutine_kind,
             CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _)
         ) {
-            transform_async_context(tcx, body);
+            let context_mut_ref = transform_async_context(tcx, body);
+            expand_async_drops(tcx, body, context_mut_ref, coroutine_kind, coroutine_ty);
+            dump_mir(tcx, false, "coroutine_async_drop_expand", &0, body, |_, _| Ok(()));
+        } else {
+            cleanup_async_drops(body);
         }
 
         // We also replace the resume argument and insert an `Assign`.
         // This is needed because the resume argument `_2` might be live across a `yield`, in which
         // case there is no `Assign` to it that the transform can turn into a store to the coroutine
         // state. After the yield the slot in the coroutine state would then be uninitialized.
-        let resume_local = Local::new(2);
+        let resume_local = CTX_ARG;
         let resume_ty = body.local_decls[resume_local].ty;
         let old_resume_local = replace_local(resume_local, resume_ty, body, tcx);
 
@@ -1667,10 +1595,14 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform {
         body.coroutine.as_mut().unwrap().resume_ty = None;
         body.coroutine.as_mut().unwrap().coroutine_layout = Some(layout);
 
+        // FIXME: Drops, produced by insert_clean_drop + elaborate_coroutine_drops,
+        // are currently sync only. To allow async for them, we need to move those calls
+        // before expand_async_drops, and fix the related problems.
+        //
         // Insert `drop(coroutine_struct)` which is used to drop upvars for coroutines in
         // the unresumed state.
         // This is expanded to a drop ladder in `elaborate_coroutine_drops`.
-        let drop_clean = insert_clean_drop(body);
+        let drop_clean = insert_clean_drop(tcx, body, has_async_drops);
 
         dump_mir(tcx, false, "coroutine_pre-elab", &0, body, |_, _| Ok(()));
 
@@ -1681,13 +1613,32 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform {
 
         dump_mir(tcx, false, "coroutine_post-transform", &0, body, |_, _| Ok(()));
 
-        // Create a copy of our MIR and use it to create the drop shim for the coroutine
-        let drop_shim = create_coroutine_drop_shim(tcx, &transform, coroutine_ty, body, drop_clean);
+        let can_unwind = can_unwind(tcx, body);
 
-        body.coroutine.as_mut().unwrap().coroutine_drop = Some(drop_shim);
+        // Create a copy of our MIR and use it to create the drop shim for the coroutine
+        if has_async_drops {
+            // If coroutine has async drops, generating async drop shim
+            let mut drop_shim =
+                create_coroutine_drop_shim_async(tcx, &transform, body, drop_clean, can_unwind);
+            // Run derefer to fix Derefs that are not in the first place
+            deref_finder(tcx, &mut drop_shim);
+            body.coroutine.as_mut().unwrap().coroutine_drop_async = Some(drop_shim);
+        } else {
+            // If coroutine has no async drops, generating sync drop shim
+            let mut drop_shim =
+                create_coroutine_drop_shim(tcx, &transform, coroutine_ty, body, drop_clean);
+            // Run derefer to fix Derefs that are not in the first place
+            deref_finder(tcx, &mut drop_shim);
+            body.coroutine.as_mut().unwrap().coroutine_drop = Some(drop_shim);
+
+            // For coroutine with sync drop, generating async proxy for `future_drop_poll` call
+            let mut proxy_shim = create_coroutine_drop_shim_proxy_async(tcx, body);
+            deref_finder(tcx, &mut proxy_shim);
+            body.coroutine.as_mut().unwrap().coroutine_drop_proxy_async = Some(proxy_shim);
+        }
 
         // Create the Coroutine::resume / Future::poll function
-        create_coroutine_resume_function(tcx, transform, body, can_return);
+        create_coroutine_resume_function(tcx, transform, body, can_return, can_unwind);
 
         // Run derefer to fix Derefs that are not in the first place
         deref_finder(tcx, body);
diff --git a/compiler/rustc_mir_transform/src/coroutine/drop.rs b/compiler/rustc_mir_transform/src/coroutine/drop.rs
new file mode 100644
index 00000000000..6b266da5a69
--- /dev/null
+++ b/compiler/rustc_mir_transform/src/coroutine/drop.rs
@@ -0,0 +1,725 @@
+//! Drops and async drops related logic for coroutine transformation pass
+
+use super::*;
+
+// Fix return Poll<Rv>::Pending statement into Poll<()>::Pending for async drop function
+struct FixReturnPendingVisitor<'tcx> {
+    tcx: TyCtxt<'tcx>,
+}
+
+impl<'tcx> MutVisitor<'tcx> for FixReturnPendingVisitor<'tcx> {
+    fn tcx(&self) -> TyCtxt<'tcx> {
+        self.tcx
+    }
+
+    fn visit_assign(
+        &mut self,
+        place: &mut Place<'tcx>,
+        rvalue: &mut Rvalue<'tcx>,
+        _location: Location,
+    ) {
+        if place.local != RETURN_PLACE {
+            return;
+        }
+
+        // Converting `_0 = Poll::<Rv>::Pending` to `_0 = Poll::<()>::Pending`
+        if let Rvalue::Aggregate(kind, _) = rvalue {
+            if let AggregateKind::Adt(_, _, ref mut args, _, _) = **kind {
+                *args = self.tcx.mk_args(&[self.tcx.types.unit.into()]);
+            }
+        }
+    }
+}
+
+// rv = call fut.poll()
+fn build_poll_call<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    body: &mut Body<'tcx>,
+    poll_unit_place: &Place<'tcx>,
+    switch_block: BasicBlock,
+    fut_pin_place: &Place<'tcx>,
+    fut_ty: Ty<'tcx>,
+    context_ref_place: &Place<'tcx>,
+    unwind: UnwindAction,
+) -> BasicBlock {
+    let poll_fn = tcx.require_lang_item(LangItem::FuturePoll, None);
+    let poll_fn = Ty::new_fn_def(tcx, poll_fn, [fut_ty]);
+    let poll_fn = Operand::Constant(Box::new(ConstOperand {
+        span: DUMMY_SP,
+        user_ty: None,
+        const_: Const::zero_sized(poll_fn),
+    }));
+    let call = TerminatorKind::Call {
+        func: poll_fn.clone(),
+        args: [
+            dummy_spanned(Operand::Move(*fut_pin_place)),
+            dummy_spanned(Operand::Move(*context_ref_place)),
+        ]
+        .into(),
+        destination: *poll_unit_place,
+        target: Some(switch_block),
+        unwind,
+        call_source: CallSource::Misc,
+        fn_span: DUMMY_SP,
+    };
+    insert_term_block(body, call)
+}
+
+// pin_fut = Pin::new_unchecked(&mut fut)
+fn build_pin_fut<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    body: &mut Body<'tcx>,
+    fut_place: Place<'tcx>,
+    unwind: UnwindAction,
+) -> (BasicBlock, Place<'tcx>) {
+    let span = body.span;
+    let source_info = SourceInfo::outermost(span);
+    let fut_ty = fut_place.ty(&body.local_decls, tcx).ty;
+    let fut_ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, fut_ty);
+    let fut_ref_place = Place::from(body.local_decls.push(LocalDecl::new(fut_ref_ty, span)));
+    let pin_fut_new_unchecked_fn = Ty::new_fn_def(
+        tcx,
+        tcx.require_lang_item(LangItem::PinNewUnchecked, Some(span)),
+        [fut_ref_ty],
+    );
+    let fut_pin_ty = pin_fut_new_unchecked_fn.fn_sig(tcx).output().skip_binder();
+    let fut_pin_place = Place::from(body.local_decls.push(LocalDecl::new(fut_pin_ty, span)));
+    let pin_fut_new_unchecked_fn = Operand::Constant(Box::new(ConstOperand {
+        span,
+        user_ty: None,
+        const_: Const::zero_sized(pin_fut_new_unchecked_fn),
+    }));
+
+    let storage_live =
+        Statement { source_info, kind: StatementKind::StorageLive(fut_pin_place.local) };
+
+    let fut_ref_assign = Statement {
+        source_info,
+        kind: StatementKind::Assign(Box::new((
+            fut_ref_place,
+            Rvalue::Ref(
+                tcx.lifetimes.re_erased,
+                BorrowKind::Mut { kind: MutBorrowKind::Default },
+                fut_place,
+            ),
+        ))),
+    };
+
+    // call Pin<FutTy>::new_unchecked(&mut fut)
+    let pin_fut_bb = body.basic_blocks_mut().push(BasicBlockData {
+        statements: [storage_live, fut_ref_assign].to_vec(),
+        terminator: Some(Terminator {
+            source_info,
+            kind: TerminatorKind::Call {
+                func: pin_fut_new_unchecked_fn,
+                args: [dummy_spanned(Operand::Move(fut_ref_place))].into(),
+                destination: fut_pin_place,
+                target: None, // will be fixed later
+                unwind,
+                call_source: CallSource::Misc,
+                fn_span: span,
+            },
+        }),
+        is_cleanup: false,
+    });
+    (pin_fut_bb, fut_pin_place)
+}
+
+// Build Poll switch for async drop
+// match rv {
+//     Ready() => ready_block
+//     Pending => yield_block
+//}
+fn build_poll_switch<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    body: &mut Body<'tcx>,
+    poll_enum: Ty<'tcx>,
+    poll_unit_place: &Place<'tcx>,
+    ready_block: BasicBlock,
+    yield_block: BasicBlock,
+) -> BasicBlock {
+    let poll_enum_adt = poll_enum.ty_adt_def().unwrap();
+
+    let Discr { val: poll_ready_discr, ty: poll_discr_ty } = poll_enum
+        .discriminant_for_variant(
+            tcx,
+            poll_enum_adt.variant_index_with_id(tcx.require_lang_item(LangItem::PollReady, None)),
+        )
+        .unwrap();
+    let poll_pending_discr = poll_enum
+        .discriminant_for_variant(
+            tcx,
+            poll_enum_adt.variant_index_with_id(tcx.require_lang_item(LangItem::PollPending, None)),
+        )
+        .unwrap()
+        .val;
+    let source_info = SourceInfo::outermost(body.span);
+    let poll_discr_place =
+        Place::from(body.local_decls.push(LocalDecl::new(poll_discr_ty, source_info.span)));
+    let discr_assign = Statement {
+        source_info,
+        kind: StatementKind::Assign(Box::new((
+            poll_discr_place,
+            Rvalue::Discriminant(*poll_unit_place),
+        ))),
+    };
+    let unreachable_block = insert_term_block(body, TerminatorKind::Unreachable);
+    body.basic_blocks_mut().push(BasicBlockData {
+        statements: [discr_assign].to_vec(),
+        terminator: Some(Terminator {
+            source_info,
+            kind: TerminatorKind::SwitchInt {
+                discr: Operand::Move(poll_discr_place),
+                targets: SwitchTargets::new(
+                    [(poll_ready_discr, ready_block), (poll_pending_discr, yield_block)]
+                        .into_iter(),
+                    unreachable_block,
+                ),
+            },
+        }),
+        is_cleanup: false,
+    })
+}
+
+// Gather blocks, reachable through 'drop' targets of Yield and Drop terminators (chained)
+fn gather_dropline_blocks<'tcx>(body: &mut Body<'tcx>) -> DenseBitSet<BasicBlock> {
+    let mut dropline: DenseBitSet<BasicBlock> = DenseBitSet::new_empty(body.basic_blocks.len());
+    for (bb, data) in traversal::reverse_postorder(body) {
+        if dropline.contains(bb) {
+            data.terminator().successors().for_each(|v| {
+                dropline.insert(v);
+            });
+        } else {
+            match data.terminator().kind {
+                TerminatorKind::Yield { drop: Some(v), .. } => {
+                    dropline.insert(v);
+                }
+                TerminatorKind::Drop { drop: Some(v), .. } => {
+                    dropline.insert(v);
+                }
+                _ => (),
+            }
+        }
+    }
+    dropline
+}
+
+/// Cleanup all async drops (reset to sync)
+pub(super) fn cleanup_async_drops<'tcx>(body: &mut Body<'tcx>) {
+    for block in body.basic_blocks_mut() {
+        if let TerminatorKind::Drop {
+            place: _,
+            target: _,
+            unwind: _,
+            replace: _,
+            ref mut drop,
+            ref mut async_fut,
+        } = block.terminator_mut().kind
+        {
+            if drop.is_some() || async_fut.is_some() {
+                *drop = None;
+                *async_fut = None;
+            }
+        }
+    }
+}
+
+pub(super) fn has_expandable_async_drops<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    body: &mut Body<'tcx>,
+    coroutine_ty: Ty<'tcx>,
+) -> bool {
+    for bb in START_BLOCK..body.basic_blocks.next_index() {
+        // Drops in unwind path (cleanup blocks) are not expanded to async drops, only sync drops in unwind path
+        if body[bb].is_cleanup {
+            continue;
+        }
+        let TerminatorKind::Drop { place, target: _, unwind: _, replace: _, drop: _, async_fut } =
+            body[bb].terminator().kind
+        else {
+            continue;
+        };
+        let place_ty = place.ty(&body.local_decls, tcx).ty;
+        if place_ty == coroutine_ty {
+            continue;
+        }
+        if async_fut.is_none() {
+            continue;
+        }
+        return true;
+    }
+    return false;
+}
+
+/// Expand Drop terminator for async drops into mainline poll-switch and dropline poll-switch
+pub(super) fn expand_async_drops<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    body: &mut Body<'tcx>,
+    context_mut_ref: Ty<'tcx>,
+    coroutine_kind: hir::CoroutineKind,
+    coroutine_ty: Ty<'tcx>,
+) {
+    let dropline = gather_dropline_blocks(body);
+    // Clean drop and async_fut fields if potentially async drop is not expanded (stays sync)
+    let remove_asyncness = |block: &mut BasicBlockData<'tcx>| {
+        if let TerminatorKind::Drop {
+            place: _,
+            target: _,
+            unwind: _,
+            replace: _,
+            ref mut drop,
+            ref mut async_fut,
+        } = block.terminator_mut().kind
+        {
+            *drop = None;
+            *async_fut = None;
+        }
+    };
+    for bb in START_BLOCK..body.basic_blocks.next_index() {
+        // Drops in unwind path (cleanup blocks) are not expanded to async drops, only sync drops in unwind path
+        if body[bb].is_cleanup {
+            remove_asyncness(&mut body[bb]);
+            continue;
+        }
+        let TerminatorKind::Drop { place, target, unwind, replace: _, drop, async_fut } =
+            body[bb].terminator().kind
+        else {
+            continue;
+        };
+
+        let place_ty = place.ty(&body.local_decls, tcx).ty;
+        if place_ty == coroutine_ty {
+            remove_asyncness(&mut body[bb]);
+            continue;
+        }
+
+        let Some(fut_local) = async_fut else {
+            remove_asyncness(&mut body[bb]);
+            continue;
+        };
+
+        let is_dropline_bb = dropline.contains(bb);
+
+        if !is_dropline_bb && drop.is_none() {
+            remove_asyncness(&mut body[bb]);
+            continue;
+        }
+
+        let fut_place = Place::from(fut_local);
+        let fut_ty = fut_place.ty(&body.local_decls, tcx).ty;
+
+        // poll-code:
+        // state_call_drop:
+        // #bb_pin: fut_pin = Pin<FutT>::new_unchecked(&mut fut)
+        // #bb_call: rv = call fut.poll() (or future_drop_poll(fut) for internal future drops)
+        // #bb_check: match (rv)
+        //  pending => return rv (yield)
+        //  ready => *continue_bb|drop_bb*
+
+        // Compute Poll<> (aka Poll with void return)
+        let poll_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Poll, None));
+        let poll_enum = Ty::new_adt(tcx, poll_adt_ref, tcx.mk_args(&[tcx.types.unit.into()]));
+        let poll_decl = LocalDecl::new(poll_enum, body.span);
+        let poll_unit_place = Place::from(body.local_decls.push(poll_decl));
+
+        // First state-loop yield for mainline
+        let context_ref_place =
+            Place::from(body.local_decls.push(LocalDecl::new(context_mut_ref, body.span)));
+        let source_info = body[bb].terminator.as_ref().unwrap().source_info;
+        let arg = Rvalue::Use(Operand::Move(Place::from(CTX_ARG)));
+        body[bb].statements.push(Statement {
+            source_info,
+            kind: StatementKind::Assign(Box::new((context_ref_place, arg))),
+        });
+        let yield_block = insert_term_block(body, TerminatorKind::Unreachable); // `kind` replaced later to yield
+        let switch_block =
+            build_poll_switch(tcx, body, poll_enum, &poll_unit_place, target, yield_block);
+        let (pin_bb, fut_pin_place) =
+            build_pin_fut(tcx, body, fut_place.clone(), UnwindAction::Continue);
+        let call_bb = build_poll_call(
+            tcx,
+            body,
+            &poll_unit_place,
+            switch_block,
+            &fut_pin_place,
+            fut_ty,
+            &context_ref_place,
+            unwind,
+        );
+
+        // Second state-loop yield for transition to dropline (when coroutine async drop started)
+        let mut dropline_transition_bb: Option<BasicBlock> = None;
+        let mut dropline_yield_bb: Option<BasicBlock> = None;
+        let mut dropline_context_ref: Option<Place<'_>> = None;
+        let mut dropline_call_bb: Option<BasicBlock> = None;
+        if !is_dropline_bb {
+            let context_ref_place2: Place<'_> =
+                Place::from(body.local_decls.push(LocalDecl::new(context_mut_ref, body.span)));
+            let drop_yield_block = insert_term_block(body, TerminatorKind::Unreachable); // `kind` replaced later to yield
+            let drop_switch_block = build_poll_switch(
+                tcx,
+                body,
+                poll_enum,
+                &poll_unit_place,
+                drop.unwrap(),
+                drop_yield_block,
+            );
+            let (pin_bb2, fut_pin_place2) =
+                build_pin_fut(tcx, body, fut_place, UnwindAction::Continue);
+            let drop_call_bb = build_poll_call(
+                tcx,
+                body,
+                &poll_unit_place,
+                drop_switch_block,
+                &fut_pin_place2,
+                fut_ty,
+                &context_ref_place2,
+                unwind,
+            );
+            dropline_transition_bb = Some(pin_bb2);
+            dropline_yield_bb = Some(drop_yield_block);
+            dropline_context_ref = Some(context_ref_place2);
+            dropline_call_bb = Some(drop_call_bb);
+        }
+
+        // value needed only for return-yields or gen-coroutines, so just const here
+        let value = Operand::Constant(Box::new(ConstOperand {
+            span: body.span,
+            user_ty: None,
+            const_: Const::from_bool(tcx, false),
+        }));
+        use rustc_middle::mir::AssertKind::ResumedAfterDrop;
+        let panic_bb = insert_panic_block(tcx, body, ResumedAfterDrop(coroutine_kind));
+
+        if is_dropline_bb {
+            body[yield_block].terminator_mut().kind = TerminatorKind::Yield {
+                value: value.clone(),
+                resume: panic_bb,
+                resume_arg: context_ref_place,
+                drop: Some(pin_bb),
+            };
+        } else {
+            body[yield_block].terminator_mut().kind = TerminatorKind::Yield {
+                value: value.clone(),
+                resume: pin_bb,
+                resume_arg: context_ref_place,
+                drop: dropline_transition_bb,
+            };
+            body[dropline_yield_bb.unwrap()].terminator_mut().kind = TerminatorKind::Yield {
+                value,
+                resume: panic_bb,
+                resume_arg: dropline_context_ref.unwrap(),
+                drop: dropline_transition_bb,
+            };
+        }
+
+        if let TerminatorKind::Call { ref mut target, .. } = body[pin_bb].terminator_mut().kind {
+            *target = Some(call_bb);
+        } else {
+            bug!()
+        }
+        if !is_dropline_bb {
+            if let TerminatorKind::Call { ref mut target, .. } =
+                body[dropline_transition_bb.unwrap()].terminator_mut().kind
+            {
+                *target = dropline_call_bb;
+            } else {
+                bug!()
+            }
+        }
+
+        body[bb].terminator_mut().kind = TerminatorKind::Goto { target: pin_bb };
+    }
+}
+
+pub(super) fn elaborate_coroutine_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
+    use crate::elaborate_drop::{Unwind, elaborate_drop};
+    use crate::patch::MirPatch;
+    use crate::shim::DropShimElaborator;
+
+    // Note that `elaborate_drops` only drops the upvars of a coroutine, and
+    // this is ok because `open_drop` can only be reached within that own
+    // coroutine's resume function.
+    let typing_env = body.typing_env(tcx);
+
+    let mut elaborator = DropShimElaborator {
+        body,
+        patch: MirPatch::new(body),
+        tcx,
+        typing_env,
+        produce_async_drops: false,
+    };
+
+    for (block, block_data) in body.basic_blocks.iter_enumerated() {
+        let (target, unwind, source_info, dropline) = match block_data.terminator() {
+            Terminator {
+                source_info,
+                kind: TerminatorKind::Drop { place, target, unwind, replace: _, drop, async_fut: _ },
+            } => {
+                if let Some(local) = place.as_local()
+                    && local == SELF_ARG
+                {
+                    (target, unwind, source_info, *drop)
+                } else {
+                    continue;
+                }
+            }
+            _ => continue,
+        };
+        let unwind = if block_data.is_cleanup {
+            Unwind::InCleanup
+        } else {
+            Unwind::To(match *unwind {
+                UnwindAction::Cleanup(tgt) => tgt,
+                UnwindAction::Continue => elaborator.patch.resume_block(),
+                UnwindAction::Unreachable => elaborator.patch.unreachable_cleanup_block(),
+                UnwindAction::Terminate(reason) => elaborator.patch.terminate_block(reason),
+            })
+        };
+        elaborate_drop(
+            &mut elaborator,
+            *source_info,
+            Place::from(SELF_ARG),
+            (),
+            *target,
+            unwind,
+            block,
+            dropline,
+        );
+    }
+    elaborator.patch.apply(body);
+}
+
+pub(super) fn insert_clean_drop<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    body: &mut Body<'tcx>,
+    has_async_drops: bool,
+) -> BasicBlock {
+    let source_info = SourceInfo::outermost(body.span);
+    let return_block = if has_async_drops {
+        insert_poll_ready_block(tcx, body)
+    } else {
+        insert_term_block(body, TerminatorKind::Return)
+    };
+
+    // FIXME: When move insert_clean_drop + elaborate_coroutine_drops before async drops expand,
+    // also set dropline here:
+    // let dropline = if has_async_drops { Some(return_block) } else { None };
+    let dropline = None;
+
+    let term = TerminatorKind::Drop {
+        place: Place::from(SELF_ARG),
+        target: return_block,
+        unwind: UnwindAction::Continue,
+        replace: false,
+        drop: dropline,
+        async_fut: None,
+    };
+
+    // Create a block to destroy an unresumed coroutines. This can only destroy upvars.
+    body.basic_blocks_mut().push(BasicBlockData {
+        statements: Vec::new(),
+        terminator: Some(Terminator { source_info, kind: term }),
+        is_cleanup: false,
+    })
+}
+
+pub(super) fn create_coroutine_drop_shim<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    transform: &TransformVisitor<'tcx>,
+    coroutine_ty: Ty<'tcx>,
+    body: &Body<'tcx>,
+    drop_clean: BasicBlock,
+) -> Body<'tcx> {
+    let mut body = body.clone();
+    // Take the coroutine info out of the body, since the drop shim is
+    // not a coroutine body itself; it just has its drop built out of it.
+    let _ = body.coroutine.take();
+    // Make sure the resume argument is not included here, since we're
+    // building a body for `drop_in_place`.
+    body.arg_count = 1;
+
+    let source_info = SourceInfo::outermost(body.span);
+
+    let mut cases = create_cases(&mut body, transform, Operation::Drop);
+
+    cases.insert(0, (CoroutineArgs::UNRESUMED, drop_clean));
+
+    // The returned state and the poisoned state fall through to the default
+    // case which is just to return
+
+    let default_block = insert_term_block(&mut body, TerminatorKind::Return);
+    insert_switch(&mut body, cases, transform, default_block);
+
+    for block in body.basic_blocks_mut() {
+        let kind = &mut block.terminator_mut().kind;
+        if let TerminatorKind::CoroutineDrop = *kind {
+            *kind = TerminatorKind::Return;
+        }
+    }
+
+    // Replace the return variable
+    body.local_decls[RETURN_PLACE] = LocalDecl::with_source_info(tcx.types.unit, source_info);
+
+    make_coroutine_state_argument_indirect(tcx, &mut body);
+
+    // Change the coroutine argument from &mut to *mut
+    body.local_decls[SELF_ARG] =
+        LocalDecl::with_source_info(Ty::new_mut_ptr(tcx, coroutine_ty), source_info);
+
+    // Make sure we remove dead blocks to remove
+    // unrelated code from the resume part of the function
+    simplify::remove_dead_blocks(&mut body);
+
+    // Update the body's def to become the drop glue.
+    let coroutine_instance = body.source.instance;
+    let drop_in_place = tcx.require_lang_item(LangItem::DropInPlace, None);
+    let drop_instance = InstanceKind::DropGlue(drop_in_place, Some(coroutine_ty));
+
+    // Temporary change MirSource to coroutine's instance so that dump_mir produces more sensible
+    // filename.
+    body.source.instance = coroutine_instance;
+    dump_mir(tcx, false, "coroutine_drop", &0, &body, |_, _| Ok(()));
+    body.source.instance = drop_instance;
+
+    // Creating a coroutine drop shim happens on `Analysis(PostCleanup) -> Runtime(Initial)`
+    // but the pass manager doesn't update the phase of the coroutine drop shim. Update the
+    // phase of the drop shim so that later on when we run the pass manager on the shim, in
+    // the `mir_shims` query, we don't ICE on the intra-pass validation before we've updated
+    // the phase of the body from analysis.
+    body.phase = MirPhase::Runtime(RuntimePhase::Initial);
+
+    body
+}
+
+// Create async drop shim function to drop coroutine itself
+pub(super) fn create_coroutine_drop_shim_async<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    transform: &TransformVisitor<'tcx>,
+    body: &Body<'tcx>,
+    drop_clean: BasicBlock,
+    can_unwind: bool,
+) -> Body<'tcx> {
+    let mut body = body.clone();
+    // Take the coroutine info out of the body, since the drop shim is
+    // not a coroutine body itself; it just has its drop built out of it.
+    let _ = body.coroutine.take();
+
+    FixReturnPendingVisitor { tcx }.visit_body(&mut body);
+
+    // Poison the coroutine when it unwinds
+    if can_unwind {
+        generate_poison_block_and_redirect_unwinds_there(transform, &mut body);
+    }
+
+    let source_info = SourceInfo::outermost(body.span);
+
+    let mut cases = create_cases(&mut body, transform, Operation::Drop);
+
+    cases.insert(0, (CoroutineArgs::UNRESUMED, drop_clean));
+
+    use rustc_middle::mir::AssertKind::ResumedAfterPanic;
+    // Panic when resumed on the returned or poisoned state
+    if can_unwind {
+        cases.insert(
+            1,
+            (
+                CoroutineArgs::POISONED,
+                insert_panic_block(tcx, &mut body, ResumedAfterPanic(transform.coroutine_kind)),
+            ),
+        );
+    }
+
+    // RETURNED state also goes to default_block with `return Ready<()>`.
+    // For fully-polled coroutine, async drop has nothing to do.
+    let default_block = insert_poll_ready_block(tcx, &mut body);
+    insert_switch(&mut body, cases, transform, default_block);
+
+    for block in body.basic_blocks_mut() {
+        let kind = &mut block.terminator_mut().kind;
+        if let TerminatorKind::CoroutineDrop = *kind {
+            *kind = TerminatorKind::Return;
+            block.statements.push(return_poll_ready_assign(tcx, source_info));
+        }
+    }
+
+    // Replace the return variable: Poll<RetT> to Poll<()>
+    let poll_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Poll, None));
+    let poll_enum = Ty::new_adt(tcx, poll_adt_ref, tcx.mk_args(&[tcx.types.unit.into()]));
+    body.local_decls[RETURN_PLACE] = LocalDecl::with_source_info(poll_enum, source_info);
+
+    make_coroutine_state_argument_indirect(tcx, &mut body);
+
+    match transform.coroutine_kind {
+        // Iterator::next doesn't accept a pinned argument,
+        // unlike for all other coroutine kinds.
+        CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {}
+        _ => {
+            make_coroutine_state_argument_pinned(tcx, &mut body);
+        }
+    }
+
+    // Make sure we remove dead blocks to remove
+    // unrelated code from the resume part of the function
+    simplify::remove_dead_blocks(&mut body);
+
+    pm::run_passes_no_validate(
+        tcx,
+        &mut body,
+        &[&abort_unwinding_calls::AbortUnwindingCalls],
+        None,
+    );
+
+    dump_mir(tcx, false, "coroutine_drop_async", &0, &body, |_, _| Ok(()));
+
+    body
+}
+
+// Create async drop shim proxy function for future_drop_poll
+// It is just { call coroutine_drop(); return Poll::Ready(); }
+pub(super) fn create_coroutine_drop_shim_proxy_async<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    body: &Body<'tcx>,
+) -> Body<'tcx> {
+    let mut body = body.clone();
+    // Take the coroutine info out of the body, since the drop shim is
+    // not a coroutine body itself; it just has its drop built out of it.
+    let _ = body.coroutine.take();
+    let basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>> = IndexVec::new();
+    body.basic_blocks = BasicBlocks::new(basic_blocks);
+    body.var_debug_info.clear();
+
+    // Keeping return value and args
+    body.local_decls.truncate(1 + body.arg_count);
+
+    let source_info = SourceInfo::outermost(body.span);
+
+    // Replace the return variable: Poll<RetT> to Poll<()>
+    let poll_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Poll, None));
+    let poll_enum = Ty::new_adt(tcx, poll_adt_ref, tcx.mk_args(&[tcx.types.unit.into()]));
+    body.local_decls[RETURN_PLACE] = LocalDecl::with_source_info(poll_enum, source_info);
+
+    // call coroutine_drop()
+    let call_bb = body.basic_blocks_mut().push(BasicBlockData {
+        statements: Vec::new(),
+        terminator: None,
+        is_cleanup: false,
+    });
+
+    // return Poll::Ready()
+    let ret_bb = insert_poll_ready_block(tcx, &mut body);
+
+    let kind = TerminatorKind::Drop {
+        place: Place::from(SELF_ARG),
+        target: ret_bb,
+        unwind: UnwindAction::Continue,
+        replace: false,
+        drop: None,
+        async_fut: None,
+    };
+    body.basic_blocks_mut()[call_bb].terminator = Some(Terminator { source_info, kind });
+
+    dump_mir(tcx, false, "coroutine_drop_proxy_async", &0, &body, |_, _| Ok(()));
+
+    body
+}
diff --git a/compiler/rustc_mir_transform/src/elaborate_drop.rs b/compiler/rustc_mir_transform/src/elaborate_drop.rs
index e3057a2f648..6f867f8105d 100644
--- a/compiler/rustc_mir_transform/src/elaborate_drop.rs
+++ b/compiler/rustc_mir_transform/src/elaborate_drop.rs
@@ -4,12 +4,12 @@ use rustc_abi::{FIRST_VARIANT, FieldIdx, VariantIdx};
 use rustc_hir::lang_items::LangItem;
 use rustc_index::Idx;
 use rustc_middle::mir::*;
-use rustc_middle::span_bug;
 use rustc_middle::ty::adjustment::PointerCoercion;
 use rustc_middle::ty::util::IntTypeExt;
-use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt};
+use rustc_middle::ty::{self, GenericArg, GenericArgsRef, Ty, TyCtxt};
+use rustc_middle::{bug, span_bug, traits};
 use rustc_span::DUMMY_SP;
-use rustc_span::source_map::Spanned;
+use rustc_span::source_map::{Spanned, dummy_spanned};
 use tracing::{debug, instrument};
 
 use crate::patch::MirPatch;
@@ -94,6 +94,9 @@ pub(crate) trait DropElaborator<'a, 'tcx>: fmt::Debug {
     fn body(&self) -> &'a Body<'tcx>;
     fn tcx(&self) -> TyCtxt<'tcx>;
     fn typing_env(&self) -> ty::TypingEnv<'tcx>;
+    fn allow_async_drops(&self) -> bool;
+
+    fn terminator_loc(&self, bb: BasicBlock) -> Location;
 
     // Drop logic
 
@@ -149,6 +152,7 @@ where
     path: D::Path,
     succ: BasicBlock,
     unwind: Unwind,
+    dropline: Option<BasicBlock>,
 }
 
 /// "Elaborates" a drop of `place`/`path` and patches `bb`'s terminator to execute it.
@@ -167,11 +171,12 @@ pub(crate) fn elaborate_drop<'b, 'tcx, D>(
     succ: BasicBlock,
     unwind: Unwind,
     bb: BasicBlock,
+    dropline: Option<BasicBlock>,
 ) where
     D: DropElaborator<'b, 'tcx>,
     'tcx: 'b,
 {
-    DropCtxt { elaborator, source_info, place, path, succ, unwind }.elaborate_drop(bb)
+    DropCtxt { elaborator, source_info, place, path, succ, unwind, dropline }.elaborate_drop(bb)
 }
 
 impl<'a, 'b, 'tcx, D> DropCtxt<'a, 'b, 'tcx, D>
@@ -195,6 +200,209 @@ where
         self.elaborator.tcx()
     }
 
+    // Generates three blocks:
+    // * #1:pin_obj_bb:   call Pin<ObjTy>::new_unchecked(&mut obj)
+    // * #2:call_drop_bb: fut = call obj.<AsyncDrop::drop>() OR call async_drop_in_place<T>(obj)
+    // * #3:drop_term_bb: drop (obj, fut, ...)
+    // We keep async drop unexpanded to poll-loop here, to expand it later, at StateTransform -
+    //   into states expand.
+    // call_destructor_only - to call only AsyncDrop::drop, not full async_drop_in_place glue
+    fn build_async_drop(
+        &mut self,
+        place: Place<'tcx>,
+        drop_ty: Ty<'tcx>,
+        bb: Option<BasicBlock>,
+        succ: BasicBlock,
+        unwind: Unwind,
+        dropline: Option<BasicBlock>,
+        call_destructor_only: bool,
+    ) -> BasicBlock {
+        let tcx = self.tcx();
+        let span = self.source_info.span;
+
+        let pin_obj_bb = bb.unwrap_or_else(|| {
+            self.elaborator.patch().new_block(BasicBlockData {
+                statements: vec![],
+                terminator: Some(Terminator {
+                    // Temporary terminator, will be replaced by patch
+                    source_info: self.source_info,
+                    kind: TerminatorKind::Return,
+                }),
+                is_cleanup: false,
+            })
+        });
+
+        let (fut_ty, drop_fn_def_id, trait_args) = if call_destructor_only {
+            // Resolving obj.<AsyncDrop::drop>()
+            let trait_ref = ty::TraitRef::new(
+                tcx,
+                tcx.require_lang_item(LangItem::AsyncDrop, Some(span)),
+                [drop_ty],
+            );
+            let (drop_trait, trait_args) = match tcx.codegen_select_candidate(
+                ty::TypingEnv::fully_monomorphized().as_query_input(trait_ref),
+            ) {
+                Ok(traits::ImplSource::UserDefined(traits::ImplSourceUserDefinedData {
+                    impl_def_id,
+                    args,
+                    ..
+                })) => (*impl_def_id, *args),
+                impl_source => {
+                    span_bug!(span, "invalid `AsyncDrop` impl_source: {:?}", impl_source);
+                }
+            };
+            let drop_fn_def_id = tcx.associated_item_def_ids(drop_trait)[0];
+            let drop_fn = Ty::new_fn_def(tcx, drop_fn_def_id, trait_args);
+            let sig = drop_fn.fn_sig(tcx);
+            let sig = tcx.instantiate_bound_regions_with_erased(sig);
+            (sig.output(), drop_fn_def_id, trait_args)
+        } else {
+            // Resolving async_drop_in_place<T> function for drop_ty
+            let drop_fn_def_id = tcx.require_lang_item(LangItem::AsyncDropInPlace, Some(span));
+            let trait_args = tcx.mk_args(&[drop_ty.into()]);
+            let sig = tcx.fn_sig(drop_fn_def_id).instantiate(tcx, trait_args);
+            let sig = tcx.instantiate_bound_regions_with_erased(sig);
+            (sig.output(), drop_fn_def_id, trait_args)
+        };
+
+        let fut = Place::from(self.new_temp(fut_ty));
+
+        // #1:pin_obj_bb >>> obj_ref = &mut obj
+        let obj_ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, drop_ty);
+        let obj_ref_place = Place::from(self.new_temp(obj_ref_ty));
+
+        let term_loc = self.elaborator.terminator_loc(pin_obj_bb);
+        self.elaborator.patch().add_assign(
+            term_loc,
+            obj_ref_place,
+            Rvalue::Ref(
+                tcx.lifetimes.re_erased,
+                BorrowKind::Mut { kind: MutBorrowKind::Default },
+                place,
+            ),
+        );
+
+        // pin_obj_place preparation
+        let pin_obj_new_unchecked_fn = Ty::new_fn_def(
+            tcx,
+            tcx.require_lang_item(LangItem::PinNewUnchecked, Some(span)),
+            [GenericArg::from(obj_ref_ty)],
+        );
+        let pin_obj_ty = pin_obj_new_unchecked_fn.fn_sig(tcx).output().no_bound_vars().unwrap();
+        let pin_obj_place = Place::from(self.new_temp(pin_obj_ty));
+        let pin_obj_new_unchecked_fn = Operand::Constant(Box::new(ConstOperand {
+            span,
+            user_ty: None,
+            const_: Const::zero_sized(pin_obj_new_unchecked_fn),
+        }));
+
+        // #3:drop_term_bb
+        let drop_term_bb = self.new_block(
+            unwind,
+            TerminatorKind::Drop {
+                place,
+                target: succ,
+                unwind: unwind.into_action(),
+                replace: false,
+                drop: dropline,
+                async_fut: Some(fut.local),
+            },
+        );
+
+        // #2:call_drop_bb
+        let mut call_statements = Vec::new();
+        let drop_arg = if call_destructor_only {
+            pin_obj_place
+        } else {
+            let ty::Adt(adt_def, adt_args) = pin_obj_ty.kind() else {
+                bug!();
+            };
+            let obj_ptr_ty = Ty::new_mut_ptr(tcx, drop_ty);
+            let obj_ptr_place = Place::from(self.new_temp(obj_ptr_ty));
+            let unwrap_ty = adt_def.non_enum_variant().fields[FieldIdx::ZERO].ty(tcx, adt_args);
+            let addr = Rvalue::RawPtr(
+                RawPtrKind::Mut,
+                pin_obj_place.project_deeper(
+                    &[ProjectionElem::Field(FieldIdx::ZERO, unwrap_ty), ProjectionElem::Deref],
+                    tcx,
+                ),
+            );
+            call_statements.push(self.assign(obj_ptr_place, addr));
+            obj_ptr_place
+        };
+        call_statements.push(Statement {
+            source_info: self.source_info,
+            kind: StatementKind::StorageLive(fut.local),
+        });
+
+        let call_drop_bb = self.new_block_with_statements(
+            unwind,
+            call_statements,
+            TerminatorKind::Call {
+                func: Operand::function_handle(tcx, drop_fn_def_id, trait_args, span),
+                args: [Spanned { node: Operand::Move(drop_arg), span: DUMMY_SP }].into(),
+                destination: fut,
+                target: Some(drop_term_bb),
+                unwind: unwind.into_action(),
+                call_source: CallSource::Misc,
+                fn_span: self.source_info.span,
+            },
+        );
+
+        // StorageDead(fut) in self.succ block (at the begin)
+        self.elaborator.patch().add_statement(
+            Location { block: self.succ, statement_index: 0 },
+            StatementKind::StorageDead(fut.local),
+        );
+
+        // #1:pin_obj_bb >>> call Pin<ObjTy>::new_unchecked(&mut obj)
+        self.elaborator.patch().patch_terminator(
+            pin_obj_bb,
+            TerminatorKind::Call {
+                func: pin_obj_new_unchecked_fn,
+                args: [dummy_spanned(Operand::Move(obj_ref_place))].into(),
+                destination: pin_obj_place,
+                target: Some(call_drop_bb),
+                unwind: unwind.into_action(),
+                call_source: CallSource::Misc,
+                fn_span: span,
+            },
+        );
+        pin_obj_bb
+    }
+
+    fn build_drop(&mut self, bb: BasicBlock) {
+        let drop_ty = self.place_ty(self.place);
+        if self.tcx().features().async_drop()
+            && self.elaborator.body().coroutine.is_some()
+            && self.elaborator.allow_async_drops()
+            && !self.elaborator.body()[bb].is_cleanup
+            && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env())
+        {
+            self.build_async_drop(
+                self.place,
+                drop_ty,
+                Some(bb),
+                self.succ,
+                self.unwind,
+                self.dropline,
+                false,
+            );
+        } else {
+            self.elaborator.patch().patch_terminator(
+                bb,
+                TerminatorKind::Drop {
+                    place: self.place,
+                    target: self.succ,
+                    unwind: self.unwind.into_action(),
+                    replace: false,
+                    drop: None,
+                    async_fut: None,
+                },
+            );
+        }
+    }
+
     /// This elaborates a single drop instruction, located at `bb`, and
     /// patches over it.
     ///
@@ -222,15 +430,7 @@ where
                     .patch_terminator(bb, TerminatorKind::Goto { target: self.succ });
             }
             DropStyle::Static => {
-                self.elaborator.patch().patch_terminator(
-                    bb,
-                    TerminatorKind::Drop {
-                        place: self.place,
-                        target: self.succ,
-                        unwind: self.unwind.into_action(),
-                        replace: false,
-                    },
-                );
+                self.build_drop(bb);
             }
             DropStyle::Conditional => {
                 let drop_bb = self.complete_drop(self.succ, self.unwind);
@@ -289,6 +489,7 @@ where
         path: Option<D::Path>,
         succ: BasicBlock,
         unwind: Unwind,
+        dropline: Option<BasicBlock>,
     ) -> BasicBlock {
         if let Some(path) = path {
             debug!("drop_subpath: for std field {:?}", place);
@@ -300,6 +501,7 @@ where
                 place,
                 succ,
                 unwind,
+                dropline,
             }
             .elaborated_drop_block()
         } else {
@@ -311,6 +513,7 @@ where
                 place,
                 succ,
                 unwind,
+                dropline,
                 // Using `self.path` here to condition the drop on
                 // our own drop flag.
                 path: self.path,
@@ -325,25 +528,36 @@ where
     ///
     /// `unwind_ladder` is such a list of steps in reverse order,
     /// which is called if the matching step of the drop glue panics.
+    ///
+    /// `dropline_ladder` is a similar list of steps in reverse order,
+    /// which is called if the matching step of the drop glue will contain async drop
+    /// (expanded later to Yield) and the containing coroutine will be dropped at this point.
     fn drop_halfladder(
         &mut self,
         unwind_ladder: &[Unwind],
+        dropline_ladder: &[Option<BasicBlock>],
         mut succ: BasicBlock,
         fields: &[(Place<'tcx>, Option<D::Path>)],
     ) -> Vec<BasicBlock> {
         iter::once(succ)
-            .chain(fields.iter().rev().zip(unwind_ladder).map(|(&(place, path), &unwind_succ)| {
-                succ = self.drop_subpath(place, path, succ, unwind_succ);
-                succ
-            }))
+            .chain(itertools::izip!(fields.iter().rev(), unwind_ladder, dropline_ladder).map(
+                |(&(place, path), &unwind_succ, &dropline_to)| {
+                    succ = self.drop_subpath(place, path, succ, unwind_succ, dropline_to);
+                    succ
+                },
+            ))
             .collect()
     }
 
-    fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
+    fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind, Option<BasicBlock>) {
         // Clear the "master" drop flag at the end. This is needed
         // because the "master" drop protects the ADT's discriminant,
         // which is invalidated after the ADT is dropped.
-        (self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind), self.unwind)
+        (
+            self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind),
+            self.unwind,
+            self.dropline,
+        )
     }
 
     /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders
@@ -361,6 +575,22 @@ where
     /// .c2:
     ///     ELAB(drop location.2 [target=`self.unwind`])
     ///
+    /// For possible-async drops in coroutines we also need dropline ladder
+    /// .d0 (mainline):
+    ///     ELAB(drop location.0 [target=.d1, unwind=.c1, drop=.e1])
+    /// .d1 (mainline):
+    ///     ELAB(drop location.1 [target=.d2, unwind=.c2, drop=.e2])
+    /// .d2 (mainline):
+    ///     ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`, drop=`self.drop`])
+    /// .c1 (unwind):
+    ///     ELAB(drop location.1 [target=.c2])
+    /// .c2 (unwind):
+    ///     ELAB(drop location.2 [target=`self.unwind`])
+    /// .e1 (dropline):
+    ///     ELAB(drop location.1 [target=.e2, unwind=.c2])
+    /// .e2 (dropline):
+    ///     ELAB(drop location.2 [target=`self.drop`, unwind=`self.unwind`])
+    ///
     /// NOTE: this does not clear the master drop flag, so you need
     /// to point succ/unwind on a `drop_ladder_bottom`.
     fn drop_ladder(
@@ -368,8 +598,13 @@ where
         fields: Vec<(Place<'tcx>, Option<D::Path>)>,
         succ: BasicBlock,
         unwind: Unwind,
-    ) -> (BasicBlock, Unwind) {
+        dropline: Option<BasicBlock>,
+    ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
         debug!("drop_ladder({:?}, {:?})", self, fields);
+        assert!(
+            if unwind.is_cleanup() { dropline.is_none() } else { true },
+            "Dropline is set for cleanup drop ladder"
+        );
 
         let mut fields = fields;
         fields.retain(|&(place, _)| {
@@ -378,17 +613,28 @@ where
 
         debug!("drop_ladder - fields needing drop: {:?}", fields);
 
+        let dropline_ladder: Vec<Option<BasicBlock>> = vec![None; fields.len() + 1];
         let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
-        let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
-            let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
+        let unwind_ladder: Vec<_> = if let Unwind::To(succ) = unwind {
+            let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
             halfladder.into_iter().map(Unwind::To).collect()
         } else {
             unwind_ladder
         };
+        let dropline_ladder: Vec<_> = if let Some(succ) = dropline {
+            let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
+            halfladder.into_iter().map(Some).collect()
+        } else {
+            dropline_ladder
+        };
 
-        let normal_ladder = self.drop_halfladder(&unwind_ladder, succ, &fields);
+        let normal_ladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
 
-        (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
+        (
+            *normal_ladder.last().unwrap(),
+            *unwind_ladder.last().unwrap(),
+            *dropline_ladder.last().unwrap(),
+        )
     }
 
     fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock {
@@ -405,8 +651,8 @@ where
             })
             .collect();
 
-        let (succ, unwind) = self.drop_ladder_bottom();
-        self.drop_ladder(fields, succ, unwind).0
+        let (succ, unwind, dropline) = self.drop_ladder_bottom();
+        self.drop_ladder(fields, succ, unwind, dropline).0
     }
 
     /// Drops the T contained in a `Box<T>` if it has not been moved out of
@@ -417,6 +663,7 @@ where
         args: GenericArgsRef<'tcx>,
         succ: BasicBlock,
         unwind: Unwind,
+        dropline: Option<BasicBlock>,
     ) -> BasicBlock {
         // drop glue is sent straight to codegen
         // box cannot be directly dereferenced
@@ -433,7 +680,7 @@ where
         let interior = self.tcx().mk_place_deref(Place::from(ptr_local));
         let interior_path = self.elaborator.deref_subpath(self.path);
 
-        let do_drop_bb = self.drop_subpath(interior, interior_path, succ, unwind);
+        let do_drop_bb = self.drop_subpath(interior, interior_path, succ, unwind, dropline);
 
         let setup_bbd = BasicBlockData {
             statements: vec![self.assign(
@@ -468,19 +715,22 @@ where
 
         let skip_contents = adt.is_union() || adt.is_manually_drop();
         let contents_drop = if skip_contents {
-            (self.succ, self.unwind)
+            (self.succ, self.unwind, self.dropline)
         } else {
             self.open_drop_for_adt_contents(adt, args)
         };
 
         if adt.is_box() {
             // we need to drop the inside of the box before running the destructor
-            let succ = self.destructor_call_block(contents_drop);
+            let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
             let unwind = contents_drop
                 .1
-                .map(|unwind| self.destructor_call_block((unwind, Unwind::InCleanup)));
+                .map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
+            let dropline = contents_drop
+                .2
+                .map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
 
-            self.open_drop_for_box_contents(adt, args, succ, unwind)
+            self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
         } else if adt.has_dtor(self.tcx()) {
             self.destructor_call_block(contents_drop)
         } else {
@@ -492,14 +742,14 @@ where
         &mut self,
         adt: ty::AdtDef<'tcx>,
         args: GenericArgsRef<'tcx>,
-    ) -> (BasicBlock, Unwind) {
-        let (succ, unwind) = self.drop_ladder_bottom();
+    ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
+        let (succ, unwind, dropline) = self.drop_ladder_bottom();
         if !adt.is_enum() {
             let fields =
                 self.move_paths_for_fields(self.place, self.path, adt.variant(FIRST_VARIANT), args);
-            self.drop_ladder(fields, succ, unwind)
+            self.drop_ladder(fields, succ, unwind, dropline)
         } else {
-            self.open_drop_for_multivariant(adt, args, succ, unwind)
+            self.open_drop_for_multivariant(adt, args, succ, unwind, dropline)
         }
     }
 
@@ -509,11 +759,14 @@ where
         args: GenericArgsRef<'tcx>,
         succ: BasicBlock,
         unwind: Unwind,
-    ) -> (BasicBlock, Unwind) {
+        dropline: Option<BasicBlock>,
+    ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
         let mut values = Vec::with_capacity(adt.variants().len());
         let mut normal_blocks = Vec::with_capacity(adt.variants().len());
         let mut unwind_blocks =
             if unwind.is_cleanup() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
+        let mut dropline_blocks =
+            if dropline.is_none() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
 
         let mut have_otherwise_with_drop_glue = false;
         let mut have_otherwise = false;
@@ -551,11 +804,16 @@ where
 
                     let unwind_blocks = unwind_blocks.as_mut().unwrap();
                     let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
-                    let halfladder = self.drop_halfladder(&unwind_ladder, unwind, &fields);
+                    let dropline_ladder: Vec<Option<BasicBlock>> = vec![None; fields.len() + 1];
+                    let halfladder =
+                        self.drop_halfladder(&unwind_ladder, &dropline_ladder, unwind, &fields);
                     unwind_blocks.push(halfladder.last().cloned().unwrap());
                 }
-                let (normal, _) = self.drop_ladder(fields, succ, unwind);
+                let (normal, _, drop_bb) = self.drop_ladder(fields, succ, unwind, dropline);
                 normal_blocks.push(normal);
+                if dropline.is_some() {
+                    dropline_blocks.as_mut().unwrap().push(drop_bb.unwrap());
+                }
             } else {
                 have_otherwise = true;
 
@@ -595,6 +853,9 @@ where
                     Unwind::InCleanup,
                 )
             }),
+            dropline.map(|dropline| {
+                self.adt_switch_block(adt, dropline_blocks.unwrap(), &values, dropline, unwind)
+            }),
         )
     }
 
@@ -634,8 +895,8 @@ where
         self.drop_flag_test_block(switch_block, succ, unwind)
     }
 
-    fn destructor_call_block(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock {
-        debug!("destructor_call_block({:?}, {:?})", self, succ);
+    fn destructor_call_block_sync(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock {
+        debug!("destructor_call_block_sync({:?}, {:?})", self, succ);
         let tcx = self.tcx();
         let drop_trait = tcx.require_lang_item(LangItem::Drop, None);
         let drop_fn = tcx.associated_item_def_ids(drop_trait)[0];
@@ -683,6 +944,30 @@ where
         self.drop_flag_test_block(destructor_block, succ, unwind)
     }
 
+    fn destructor_call_block(
+        &mut self,
+        (succ, unwind, dropline): (BasicBlock, Unwind, Option<BasicBlock>),
+    ) -> BasicBlock {
+        debug!("destructor_call_block({:?}, {:?})", self, succ);
+        let ty = self.place_ty(self.place);
+        if self.tcx().features().async_drop()
+            && self.elaborator.body().coroutine.is_some()
+            && self.elaborator.allow_async_drops()
+            && !unwind.is_cleanup()
+            && ty.is_async_drop(self.tcx(), self.elaborator.typing_env())
+        {
+            let destructor_block =
+                self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true);
+
+            let block_start = Location { block: destructor_block, statement_index: 0 };
+            self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
+
+            self.drop_flag_test_block(destructor_block, succ, unwind)
+        } else {
+            self.destructor_call_block_sync((succ, unwind))
+        }
+    }
+
     /// Create a loop that drops an array:
     ///
     /// ```text
@@ -701,6 +986,7 @@ where
         len: Local,
         ety: Ty<'tcx>,
         unwind: Unwind,
+        dropline: Option<BasicBlock>,
     ) -> BasicBlock {
         let copy = |place: Place<'tcx>| Operand::Copy(place);
         let move_ = |place: Place<'tcx>| Operand::Move(place);
@@ -744,16 +1030,35 @@ where
         };
         let loop_block = self.elaborator.patch().new_block(loop_block);
 
-        self.elaborator.patch().patch_terminator(
-            drop_block,
-            TerminatorKind::Drop {
-                place: tcx.mk_place_deref(ptr),
-                target: loop_block,
-                unwind: unwind.into_action(),
-                replace: false,
-            },
-        );
-
+        let place = tcx.mk_place_deref(ptr);
+        if self.tcx().features().async_drop()
+            && self.elaborator.body().coroutine.is_some()
+            && self.elaborator.allow_async_drops()
+            && !unwind.is_cleanup()
+            && ety.needs_async_drop(self.tcx(), self.elaborator.typing_env())
+        {
+            self.build_async_drop(
+                place,
+                ety,
+                Some(drop_block),
+                loop_block,
+                unwind,
+                dropline,
+                false,
+            );
+        } else {
+            self.elaborator.patch().patch_terminator(
+                drop_block,
+                TerminatorKind::Drop {
+                    place,
+                    target: loop_block,
+                    unwind: unwind.into_action(),
+                    replace: false,
+                    drop: None,
+                    async_fut: None,
+                },
+            );
+        }
         loop_block
     }
 
@@ -820,8 +1125,8 @@ where
                         (tcx.mk_place_elem(self.place, project), path)
                     })
                     .collect::<Vec<_>>();
-                let (succ, unwind) = self.drop_ladder_bottom();
-                return self.drop_ladder(fields, succ, unwind).0;
+                let (succ, unwind, dropline) = self.drop_ladder_bottom();
+                return self.drop_ladder(fields, succ, unwind, dropline).0;
             }
         }
 
@@ -855,7 +1160,7 @@ where
             &mut self.place,
             Place::from(slice_ptr).project_deeper(&[PlaceElem::Deref], tcx),
         );
-        let slice_block = self.drop_loop_pair_for_slice(ety);
+        let slice_block = self.drop_loop_trio_for_slice(ety);
         self.place = array_place;
 
         delegate_block.terminator = Some(Terminator {
@@ -865,18 +1170,22 @@ where
         self.elaborator.patch().new_block(delegate_block)
     }
 
-    /// Creates a pair of drop-loops of `place`, which drops its contents, even
-    /// in the case of 1 panic.
-    fn drop_loop_pair_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock {
-        debug!("drop_loop_pair_for_slice({:?})", ety);
+    /// Creates a trio of drop-loops of `place`, which drops its contents, even
+    /// in the case of 1 panic or in the case of coroutine drop
+    fn drop_loop_trio_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock {
+        debug!("drop_loop_trio_for_slice({:?})", ety);
         let tcx = self.tcx();
         let len = self.new_temp(tcx.types.usize);
         let cur = self.new_temp(tcx.types.usize);
 
-        let unwind =
-            self.unwind.map(|unwind| self.drop_loop(unwind, cur, len, ety, Unwind::InCleanup));
+        let unwind = self
+            .unwind
+            .map(|unwind| self.drop_loop(unwind, cur, len, ety, Unwind::InCleanup, None));
 
-        let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind);
+        let dropline =
+            self.dropline.map(|dropline| self.drop_loop(dropline, cur, len, ety, unwind, None));
+
+        let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind, dropline);
 
         let [PlaceElem::Deref] = self.place.projection.as_slice() else {
             span_bug!(
@@ -940,7 +1249,7 @@ where
                 let size = size.try_to_target_usize(self.tcx());
                 self.open_drop_for_array(ty, *ety, size)
             }
-            ty::Slice(ety) => self.drop_loop_pair_for_slice(*ety),
+            ty::Slice(ety) => self.drop_loop_trio_for_slice(*ety),
 
             _ => span_bug!(self.source_info.span, "open drop from non-ADT `{:?}`", ty),
         }
@@ -977,21 +1286,53 @@ where
 
     fn elaborated_drop_block(&mut self) -> BasicBlock {
         debug!("elaborated_drop_block({:?})", self);
-        let blk = self.drop_block(self.succ, self.unwind);
+        let blk = self.drop_block_simple(self.succ, self.unwind);
         self.elaborate_drop(blk);
         blk
     }
 
-    fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
+    fn drop_block_simple(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
         let block = TerminatorKind::Drop {
             place: self.place,
             target,
             unwind: unwind.into_action(),
             replace: false,
+            drop: self.dropline,
+            async_fut: None,
         };
         self.new_block(unwind, block)
     }
 
+    fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
+        let drop_ty = self.place_ty(self.place);
+        if self.tcx().features().async_drop()
+            && self.elaborator.body().coroutine.is_some()
+            && self.elaborator.allow_async_drops()
+            && !unwind.is_cleanup()
+            && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env())
+        {
+            self.build_async_drop(
+                self.place,
+                drop_ty,
+                None,
+                self.succ,
+                unwind,
+                self.dropline,
+                false,
+            )
+        } else {
+            let block = TerminatorKind::Drop {
+                place: self.place,
+                target,
+                unwind: unwind.into_action(),
+                replace: false,
+                drop: None,
+                async_fut: None,
+            };
+            self.new_block(unwind, block)
+        }
+    }
+
     fn goto_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
         let block = TerminatorKind::Goto { target };
         self.new_block(unwind, block)
@@ -1033,6 +1374,19 @@ where
         })
     }
 
+    fn new_block_with_statements(
+        &mut self,
+        unwind: Unwind,
+        statements: Vec<Statement<'tcx>>,
+        k: TerminatorKind<'tcx>,
+    ) -> BasicBlock {
+        self.elaborator.patch().new_block(BasicBlockData {
+            statements,
+            terminator: Some(Terminator { source_info: self.source_info, kind: k }),
+            is_cleanup: unwind.is_cleanup(),
+        })
+    }
+
     fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
         self.elaborator.patch().new_temp(ty, self.source_info.span)
     }
diff --git a/compiler/rustc_mir_transform/src/elaborate_drops.rs b/compiler/rustc_mir_transform/src/elaborate_drops.rs
index 530c72ca549..42c8cb0b906 100644
--- a/compiler/rustc_mir_transform/src/elaborate_drops.rs
+++ b/compiler/rustc_mir_transform/src/elaborate_drops.rs
@@ -158,6 +158,14 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for ElaborateDropsCtxt<'a, 'tcx> {
         self.env.typing_env
     }
 
+    fn allow_async_drops(&self) -> bool {
+        true
+    }
+
+    fn terminator_loc(&self, bb: BasicBlock) -> Location {
+        self.patch.terminator_loc(self.body, bb)
+    }
+
     #[instrument(level = "debug", skip(self), ret)]
     fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle {
         let ((maybe_init, maybe_uninit), multipart) = match mode {
@@ -328,7 +336,9 @@ impl<'a, 'tcx> ElaborateDropsCtxt<'a, 'tcx> {
         // This function should mirror what `collect_drop_flags` does.
         for (bb, data) in self.body.basic_blocks.iter_enumerated() {
             let terminator = data.terminator();
-            let TerminatorKind::Drop { place, target, unwind, replace } = terminator.kind else {
+            let TerminatorKind::Drop { place, target, unwind, replace, drop, async_fut: _ } =
+                terminator.kind
+            else {
                 continue;
             };
 
@@ -364,7 +374,16 @@ impl<'a, 'tcx> ElaborateDropsCtxt<'a, 'tcx> {
                         }
                     };
                     self.init_data.seek_before(self.body.terminator_loc(bb));
-                    elaborate_drop(self, terminator.source_info, place, path, target, unwind, bb)
+                    elaborate_drop(
+                        self,
+                        terminator.source_info,
+                        place,
+                        path,
+                        target,
+                        unwind,
+                        bb,
+                        drop,
+                    )
                 }
                 LookupResult::Parent(None) => {}
                 LookupResult::Parent(Some(_)) => {
diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs
index c5732194424..9785c039d53 100644
--- a/compiler/rustc_mir_transform/src/inline.rs
+++ b/compiler/rustc_mir_transform/src/inline.rs
@@ -413,7 +413,15 @@ impl<'tcx> Inliner<'tcx> for NormalInliner<'tcx> {
 
             let term = blk.terminator();
             let caller_attrs = tcx.codegen_fn_attrs(self.caller_def_id());
-            if let TerminatorKind::Drop { ref place, target, unwind, replace: _ } = term.kind {
+            if let TerminatorKind::Drop {
+                ref place,
+                target,
+                unwind,
+                replace: _,
+                drop: _,
+                async_fut: _,
+            } = term.kind
+            {
                 work_list.push(target);
 
                 // If the place doesn't actually need dropping, treat it like a regular goto.
@@ -726,6 +734,20 @@ fn check_mir_is_available<'tcx, I: Inliner<'tcx>>(
             debug!("still needs substitution");
             return Err("implementation limitation -- HACK for dropping polymorphic type");
         }
+        InstanceKind::AsyncDropGlue(_, ty) | InstanceKind::AsyncDropGlueCtorShim(_, ty) => {
+            return if ty.still_further_specializable() {
+                Err("still needs substitution")
+            } else {
+                Ok(())
+            };
+        }
+        InstanceKind::FutureDropPollShim(_, ty, ty2) => {
+            return if ty.still_further_specializable() || ty2.still_further_specializable() {
+                Err("still needs substitution")
+            } else {
+                Ok(())
+            };
+        }
 
         // This cannot result in an immediate cycle since the callee MIR is a shim, which does
         // not get any optimizations run on it. Any subsequent inlining may cause cycles, but we
@@ -739,8 +761,7 @@ fn check_mir_is_available<'tcx, I: Inliner<'tcx>>(
         | InstanceKind::DropGlue(..)
         | InstanceKind::CloneShim(..)
         | InstanceKind::ThreadLocalShim(..)
-        | InstanceKind::FnPtrAddrShim(..)
-        | InstanceKind::AsyncDropGlueCtorShim(..) => return Ok(()),
+        | InstanceKind::FnPtrAddrShim(..) => return Ok(()),
     }
 
     if inliner.tcx().is_constructor(callee_def_id) {
@@ -1345,8 +1366,8 @@ fn try_instance_mir<'tcx>(
     tcx: TyCtxt<'tcx>,
     instance: InstanceKind<'tcx>,
 ) -> Result<&'tcx Body<'tcx>, &'static str> {
-    if let ty::InstanceKind::DropGlue(_, Some(ty))
-    | ty::InstanceKind::AsyncDropGlueCtorShim(_, Some(ty)) = instance
+    if let ty::InstanceKind::DropGlue(_, Some(ty)) | ty::InstanceKind::AsyncDropGlueCtorShim(_, ty) =
+        instance
         && let ty::Adt(def, args) = ty.kind()
     {
         let fields = def.all_fields();
diff --git a/compiler/rustc_mir_transform/src/inline/cycle.rs b/compiler/rustc_mir_transform/src/inline/cycle.rs
index a40768300f5..292278800f8 100644
--- a/compiler/rustc_mir_transform/src/inline/cycle.rs
+++ b/compiler/rustc_mir_transform/src/inline/cycle.rs
@@ -95,7 +95,10 @@ pub(crate) fn mir_callgraph_reachable<'tcx>(
                 InstanceKind::FnPtrAddrShim(..) => {
                     continue;
                 }
-                InstanceKind::DropGlue(..) | InstanceKind::AsyncDropGlueCtorShim(..) => {
+                InstanceKind::DropGlue(..)
+                | InstanceKind::FutureDropPollShim(..)
+                | InstanceKind::AsyncDropGlue(..)
+                | InstanceKind::AsyncDropGlueCtorShim(..) => {
                     // FIXME: A not fully instantiated drop shim can cause ICEs if one attempts to
                     // have its MIR built. Likely oli-obk just screwed up the `ParamEnv`s, so this
                     // needs some more analysis.
diff --git a/compiler/rustc_mir_transform/src/known_panics_lint.rs b/compiler/rustc_mir_transform/src/known_panics_lint.rs
index f8db8de4e82..481c7941909 100644
--- a/compiler/rustc_mir_transform/src/known_panics_lint.rs
+++ b/compiler/rustc_mir_transform/src/known_panics_lint.rs
@@ -888,7 +888,14 @@ impl CanConstProp {
         };
         for (local, val) in cpv.can_const_prop.iter_enumerated_mut() {
             let ty = body.local_decls[local].ty;
-            if ty.is_union() {
+            if ty.is_async_drop_in_place_coroutine(tcx) {
+                // No const propagation for async drop coroutine (AsyncDropGlue).
+                // Otherwise, tcx.layout_of(typing_env.as_query_input(ty)) will be called
+                // (early layout request for async drop coroutine) to calculate layout size.
+                // Layout for `async_drop_in_place<T>::{closure}` may only be known with known T.
+                *val = ConstPropMode::NoPropagation;
+                continue;
+            } else if ty.is_union() {
                 // Unions are incompatible with the current implementation of
                 // const prop because Rust has no concept of an active
                 // variant of a union
diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs
index 5db62b7e902..24f4c11a66d 100644
--- a/compiler/rustc_mir_transform/src/lib.rs
+++ b/compiler/rustc_mir_transform/src/lib.rs
@@ -652,7 +652,7 @@ fn run_runtime_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
     }
 }
 
-fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
+pub(crate) fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
     fn o1<T>(x: T) -> WithMinOptLevel<T> {
         WithMinOptLevel(1, x)
     }
diff --git a/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs b/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs
index 537f152938e..b8cb101f93c 100644
--- a/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs
+++ b/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs
@@ -134,6 +134,8 @@ impl<'a, 'mir, 'tcx> DropsReachable<'a, 'mir, 'tcx> {
                     target: _,
                     unwind: _,
                     replace: _,
+                    drop: _,
+                    async_fut: _,
                 } = &terminator.kind
                 && place_has_common_prefix(dropped_place, self.place)
             {
diff --git a/compiler/rustc_mir_transform/src/patch.rs b/compiler/rustc_mir_transform/src/patch.rs
index 12ace04c5e2..c7eb2a921c7 100644
--- a/compiler/rustc_mir_transform/src/patch.rs
+++ b/compiler/rustc_mir_transform/src/patch.rs
@@ -148,6 +148,14 @@ impl<'tcx> MirPatch<'tcx> {
         self.term_patch_map[bb].is_some()
     }
 
+    pub(crate) fn terminator_loc(&self, body: &Body<'tcx>, bb: BasicBlock) -> Location {
+        let offset = match bb.index().checked_sub(body.basic_blocks.len()) {
+            Some(index) => self.new_blocks[index].statements.len(),
+            None => body[bb].statements.len(),
+        };
+        Location { block: bb, statement_index: offset }
+    }
+
     /// Queues the addition of a new temporary with additional local info.
     pub(crate) fn new_local_with_info(
         &mut self,
diff --git a/compiler/rustc_mir_transform/src/remove_zsts.rs b/compiler/rustc_mir_transform/src/remove_zsts.rs
index 78d94a03867..c4dc8638b26 100644
--- a/compiler/rustc_mir_transform/src/remove_zsts.rs
+++ b/compiler/rustc_mir_transform/src/remove_zsts.rs
@@ -59,6 +59,11 @@ fn trivially_zst<'tcx>(ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> Option<bool> {
         | ty::RawPtr(..)
         | ty::Ref(..)
         | ty::FnPtr(..) => Some(false),
+        ty::Coroutine(def_id, _) => {
+            // For async_drop_in_place::{closure} this is load bearing, not just a perf fix,
+            // because we don't want to compute the layout before mir analysis is done
+            if tcx.is_async_drop_in_place_coroutine(*def_id) { Some(false) } else { None }
+        }
         // check `layout_of` to see (including unreachable things we won't actually see)
         _ => None,
     }
diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs
index c13ffae3649..0d9a04b760a 100644
--- a/compiler/rustc_mir_transform/src/shim.rs
+++ b/compiler/rustc_mir_transform/src/shim.rs
@@ -6,13 +6,14 @@ use rustc_hir as hir;
 use rustc_hir::def_id::DefId;
 use rustc_hir::lang_items::LangItem;
 use rustc_index::{Idx, IndexVec};
+use rustc_middle::mir::visit::{MutVisitor, PlaceContext};
 use rustc_middle::mir::*;
 use rustc_middle::query::Providers;
 use rustc_middle::ty::{
     self, CoroutineArgs, CoroutineArgsExt, EarlyBinder, GenericArgs, Ty, TyCtxt,
 };
 use rustc_middle::{bug, span_bug};
-use rustc_span::source_map::Spanned;
+use rustc_span::source_map::{Spanned, dummy_spanned};
 use rustc_span::{DUMMY_SP, Span};
 use tracing::{debug, instrument};
 
@@ -20,7 +21,8 @@ use crate::elaborate_drop::{DropElaborator, DropFlagMode, DropStyle, Unwind, ela
 use crate::patch::MirPatch;
 use crate::{
     abort_unwinding_calls, add_call_guards, add_moves_for_packed_drops, deref_separator, inline,
-    instsimplify, mentioned_items, pass_manager as pm, remove_noop_landing_pads, simplify,
+    instsimplify, mentioned_items, pass_manager as pm, remove_noop_landing_pads,
+    run_optimization_passes, simplify,
 };
 
 mod async_destructor_ctor;
@@ -29,6 +31,40 @@ pub(super) fn provide(providers: &mut Providers) {
     providers.mir_shims = make_shim;
 }
 
+// Replace Pin<&mut ImplCoroutine> accesses (_1.0) into Pin<&mut ProxyCoroutine> acceses
+struct FixProxyFutureDropVisitor<'tcx> {
+    tcx: TyCtxt<'tcx>,
+    replace_to: Local,
+}
+
+impl<'tcx> MutVisitor<'tcx> for FixProxyFutureDropVisitor<'tcx> {
+    fn tcx(&self) -> TyCtxt<'tcx> {
+        self.tcx
+    }
+
+    fn visit_place(
+        &mut self,
+        place: &mut Place<'tcx>,
+        _context: PlaceContext,
+        _location: Location,
+    ) {
+        if place.local == Local::from_u32(1) {
+            if place.projection.len() == 1 {
+                assert!(matches!(
+                    place.projection.first(),
+                    Some(ProjectionElem::Field(FieldIdx::ZERO, _))
+                ));
+                *place = Place::from(self.replace_to);
+            } else if place.projection.len() == 2 {
+                assert!(matches!(place.projection[0], ProjectionElem::Field(FieldIdx::ZERO, _)));
+                assert!(matches!(place.projection[1], ProjectionElem::Deref));
+                *place =
+                    Place::from(self.replace_to).project_deeper(&[ProjectionElem::Deref], self.tcx);
+            }
+        }
+    }
+}
+
 fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceKind<'tcx>) -> Body<'tcx> {
     debug!("make_shim({:?})", instance);
 
@@ -129,8 +165,53 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceKind<'tcx>) -> Body<
         ty::InstanceKind::ThreadLocalShim(..) => build_thread_local_shim(tcx, instance),
         ty::InstanceKind::CloneShim(def_id, ty) => build_clone_shim(tcx, def_id, ty),
         ty::InstanceKind::FnPtrAddrShim(def_id, ty) => build_fn_ptr_addr_shim(tcx, def_id, ty),
+        ty::InstanceKind::FutureDropPollShim(def_id, proxy_ty, impl_ty) => {
+            let mut body =
+                async_destructor_ctor::build_future_drop_poll_shim(tcx, def_id, proxy_ty, impl_ty);
+
+            pm::run_passes(
+                tcx,
+                &mut body,
+                &[
+                    &mentioned_items::MentionedItems,
+                    &abort_unwinding_calls::AbortUnwindingCalls,
+                    &add_call_guards::CriticalCallEdges,
+                ],
+                Some(MirPhase::Runtime(RuntimePhase::PostCleanup)),
+                pm::Optimizations::Allowed,
+            );
+            run_optimization_passes(tcx, &mut body);
+            debug!("make_shim({:?}) = {:?}", instance, body);
+            return body;
+        }
+        ty::InstanceKind::AsyncDropGlue(def_id, ty) => {
+            let mut body = async_destructor_ctor::build_async_drop_shim(tcx, def_id, ty);
+
+            // Main pass required here is StateTransform to convert sync drop ladder
+            // into coroutine.
+            // Others are minimal passes as for sync drop glue shim
+            pm::run_passes(
+                tcx,
+                &mut body,
+                &[
+                    &mentioned_items::MentionedItems,
+                    &abort_unwinding_calls::AbortUnwindingCalls,
+                    &add_call_guards::CriticalCallEdges,
+                    &simplify::SimplifyCfg::MakeShim,
+                    &crate::coroutine::StateTransform,
+                ],
+                Some(MirPhase::Runtime(RuntimePhase::PostCleanup)),
+                pm::Optimizations::Allowed,
+            );
+            run_optimization_passes(tcx, &mut body);
+            debug!("make_shim({:?}) = {:?}", instance, body);
+            return body;
+        }
+
         ty::InstanceKind::AsyncDropGlueCtorShim(def_id, ty) => {
-            async_destructor_ctor::build_async_destructor_ctor_shim(tcx, def_id, ty)
+            let body = async_destructor_ctor::build_async_destructor_ctor_shim(tcx, def_id, ty);
+            debug!("make_shim({:?}) = {:?}", instance, body);
+            return body;
         }
         ty::InstanceKind::Virtual(..) => {
             bug!("InstanceKind::Virtual ({:?}) is for direct calls only", instance)
@@ -214,6 +295,43 @@ fn local_decls_for_sig<'tcx>(
         .collect()
 }
 
+fn dropee_emit_retag<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    body: &mut Body<'tcx>,
+    dropee_ptr: Place<'tcx>,
+    span: Span,
+) -> Place<'tcx> {
+    let mut dropee_ptr = dropee_ptr;
+    if tcx.sess.opts.unstable_opts.mir_emit_retag {
+        let source_info = SourceInfo::outermost(span);
+        // We want to treat the function argument as if it was passed by `&mut`. As such, we
+        // generate
+        // ```
+        // temp = &mut *arg;
+        // Retag(temp, FnEntry)
+        // ```
+        // It's important that we do this first, before anything that depends on `dropee_ptr`
+        // has been put into the body.
+        let reborrow = Rvalue::Ref(
+            tcx.lifetimes.re_erased,
+            BorrowKind::Mut { kind: MutBorrowKind::Default },
+            tcx.mk_place_deref(dropee_ptr),
+        );
+        let ref_ty = reborrow.ty(body.local_decls(), tcx);
+        dropee_ptr = body.local_decls.push(LocalDecl::new(ref_ty, span)).into();
+        let new_statements = [
+            StatementKind::Assign(Box::new((dropee_ptr, reborrow))),
+            StatementKind::Retag(RetagKind::FnEntry, Box::new(dropee_ptr)),
+        ];
+        for s in new_statements {
+            body.basic_blocks_mut()[START_BLOCK]
+                .statements
+                .push(Statement { source_info, kind: s });
+        }
+    }
+    dropee_ptr
+}
+
 fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option<Ty<'tcx>>) -> Body<'tcx> {
     debug!("build_drop_shim(def_id={:?}, ty={:?})", def_id, ty);
 
@@ -248,38 +366,18 @@ fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option<Ty<'tcx>>)
 
     // The first argument (index 0), but add 1 for the return value.
     let mut dropee_ptr = Place::from(Local::new(1 + 0));
-    if tcx.sess.opts.unstable_opts.mir_emit_retag {
-        // We want to treat the function argument as if it was passed by `&mut`. As such, we
-        // generate
-        // ```
-        // temp = &mut *arg;
-        // Retag(temp, FnEntry)
-        // ```
-        // It's important that we do this first, before anything that depends on `dropee_ptr`
-        // has been put into the body.
-        let reborrow = Rvalue::Ref(
-            tcx.lifetimes.re_erased,
-            BorrowKind::Mut { kind: MutBorrowKind::Default },
-            tcx.mk_place_deref(dropee_ptr),
-        );
-        let ref_ty = reborrow.ty(body.local_decls(), tcx);
-        dropee_ptr = body.local_decls.push(LocalDecl::new(ref_ty, span)).into();
-        let new_statements = [
-            StatementKind::Assign(Box::new((dropee_ptr, reborrow))),
-            StatementKind::Retag(RetagKind::FnEntry, Box::new(dropee_ptr)),
-        ];
-        for s in new_statements {
-            body.basic_blocks_mut()[START_BLOCK]
-                .statements
-                .push(Statement { source_info, kind: s });
-        }
-    }
+    dropee_ptr = dropee_emit_retag(tcx, &mut body, dropee_ptr, span);
 
     if ty.is_some() {
         let patch = {
             let typing_env = ty::TypingEnv::post_analysis(tcx, def_id);
-            let mut elaborator =
-                DropShimElaborator { body: &body, patch: MirPatch::new(&body), tcx, typing_env };
+            let mut elaborator = DropShimElaborator {
+                body: &body,
+                patch: MirPatch::new(&body),
+                tcx,
+                typing_env,
+                produce_async_drops: false,
+            };
             let dropee = tcx.mk_place_deref(dropee_ptr);
             let resume_block = elaborator.patch.resume_block();
             elaborate_drop(
@@ -290,6 +388,7 @@ fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option<Ty<'tcx>>)
                 return_block,
                 Unwind::To(resume_block),
                 START_BLOCK,
+                None,
             );
             elaborator.patch
         };
@@ -338,6 +437,7 @@ pub(super) struct DropShimElaborator<'a, 'tcx> {
     pub patch: MirPatch<'tcx>,
     pub tcx: TyCtxt<'tcx>,
     pub typing_env: ty::TypingEnv<'tcx>,
+    pub produce_async_drops: bool,
 }
 
 impl fmt::Debug for DropShimElaborator<'_, '_> {
@@ -365,6 +465,13 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for DropShimElaborator<'a, 'tcx> {
         self.typing_env
     }
 
+    fn terminator_loc(&self, bb: BasicBlock) -> Location {
+        self.patch.terminator_loc(self.body, bb)
+    }
+    fn allow_async_drops(&self) -> bool {
+        self.produce_async_drops
+    }
+
     fn drop_style(&self, _path: Self::Path, mode: DropFlagMode) -> DropStyle {
         match mode {
             DropFlagMode::Shallow => {
@@ -621,6 +728,8 @@ impl<'tcx> CloneShimBuilder<'tcx> {
                     target: unwind,
                     unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
                     replace: false,
+                    drop: None,
+                    async_fut: None,
                 },
                 /* is_cleanup */ true,
             );
@@ -886,6 +995,8 @@ fn build_call_shim<'tcx>(
                 target: BasicBlock::new(2),
                 unwind: UnwindAction::Continue,
                 replace: false,
+                drop: None,
+                async_fut: None,
             },
             false,
         );
@@ -903,6 +1014,8 @@ fn build_call_shim<'tcx>(
                 target: BasicBlock::new(4),
                 unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
                 replace: false,
+                drop: None,
+                async_fut: None,
             },
             /* is_cleanup */ true,
         );
diff --git a/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs b/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs
index 94b1b4b1855..7976b65aae7 100644
--- a/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs
+++ b/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs
@@ -1,639 +1,430 @@
-use std::iter;
-
-use itertools::Itertools;
-use rustc_abi::{FieldIdx, VariantIdx};
-use rustc_const_eval::interpret;
 use rustc_hir::def_id::DefId;
 use rustc_hir::lang_items::LangItem;
+use rustc_hir::{CoroutineDesugaring, CoroutineKind, CoroutineSource, Safety};
 use rustc_index::{Idx, IndexVec};
-use rustc_middle::mir::*;
-use rustc_middle::ty::adjustment::PointerCoercion;
-use rustc_middle::ty::util::{AsyncDropGlueMorphology, Discr};
-use rustc_middle::ty::{self, Ty, TyCtxt};
-use rustc_middle::{bug, span_bug};
-use rustc_span::source_map::respan;
-use rustc_span::{Span, Symbol};
-use rustc_target::spec::PanicStrategy;
-use tracing::debug;
-
-use super::{local_decls_for_sig, new_body};
+use rustc_middle::mir::{
+    BasicBlock, BasicBlockData, Body, Local, LocalDecl, MirSource, Operand, Place, Rvalue,
+    SourceInfo, Statement, StatementKind, Terminator, TerminatorKind,
+};
+use rustc_middle::ty::{self, EarlyBinder, Ty, TyCtxt};
+
+use super::*;
+use crate::patch::MirPatch;
 
 pub(super) fn build_async_destructor_ctor_shim<'tcx>(
     tcx: TyCtxt<'tcx>,
     def_id: DefId,
-    ty: Option<Ty<'tcx>>,
+    ty: Ty<'tcx>,
 ) -> Body<'tcx> {
-    debug!("build_drop_shim(def_id={:?}, ty={:?})", def_id, ty);
-
-    AsyncDestructorCtorShimBuilder::new(tcx, def_id, ty).build()
+    debug!("build_async_destructor_ctor_shim(def_id={:?}, ty={:?})", def_id, ty);
+    debug_assert_eq!(Some(def_id), tcx.lang_items().async_drop_in_place_fn());
+    let generic_body = tcx.optimized_mir(def_id);
+    let args = tcx.mk_args(&[ty.into()]);
+    let mut body = EarlyBinder::bind(generic_body.clone()).instantiate(tcx, args);
+
+    // Minimal shim passes except MentionedItems,
+    // it causes error "mentioned_items for DefId(...async_drop_in_place...) have already been set
+    pm::run_passes(
+        tcx,
+        &mut body,
+        &[
+            &simplify::SimplifyCfg::MakeShim,
+            &abort_unwinding_calls::AbortUnwindingCalls,
+            &add_call_guards::CriticalCallEdges,
+        ],
+        None,
+        pm::Optimizations::Allowed,
+    );
+    body
 }
 
-/// Builder for async_drop_in_place shim. Functions as a stack machine
-/// to build up an expression using combinators. Stack contains pairs
-/// of locals and types. Combinator is a not yet instantiated pair of a
-/// function and a type, is considered to be an operator which consumes
-/// operands from the stack by instantiating its function and its type
-/// with operand types and moving locals into the function call. Top
-/// pair is considered to be the last operand.
-// FIXME: add mir-opt tests
-struct AsyncDestructorCtorShimBuilder<'tcx> {
+// build_drop_shim analog for async drop glue (for generated coroutine poll function)
+pub(super) fn build_async_drop_shim<'tcx>(
     tcx: TyCtxt<'tcx>,
     def_id: DefId,
-    self_ty: Option<Ty<'tcx>>,
-    span: Span,
-    source_info: SourceInfo,
-    typing_env: ty::TypingEnv<'tcx>,
-
-    stack: Vec<Operand<'tcx>>,
-    last_bb: BasicBlock,
-    top_cleanup_bb: Option<BasicBlock>,
-
-    locals: IndexVec<Local, LocalDecl<'tcx>>,
-    bbs: IndexVec<BasicBlock, BasicBlockData<'tcx>>,
-}
-
-#[derive(Clone, Copy)]
-enum SurfaceDropKind {
-    Async,
-    Sync,
-}
-
-impl<'tcx> AsyncDestructorCtorShimBuilder<'tcx> {
-    const SELF_PTR: Local = Local::from_u32(1);
-    const INPUT_COUNT: usize = 1;
-    const MAX_STACK_LEN: usize = 2;
-
-    fn new(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Option<Ty<'tcx>>) -> Self {
-        let args = if let Some(ty) = self_ty {
-            tcx.mk_args(&[ty.into()])
+    ty: Ty<'tcx>,
+) -> Body<'tcx> {
+    debug!("build_async_drop_shim(def_id={:?}, ty={:?})", def_id, ty);
+    let ty::Coroutine(_, parent_args) = ty.kind() else {
+        bug!();
+    };
+    let typing_env = ty::TypingEnv::fully_monomorphized();
+
+    let drop_ty = parent_args.first().unwrap().expect_ty();
+    let drop_ptr_ty = Ty::new_mut_ptr(tcx, drop_ty);
+
+    assert!(tcx.is_coroutine(def_id));
+    let coroutine_kind = tcx.coroutine_kind(def_id).unwrap();
+
+    assert!(matches!(
+        coroutine_kind,
+        CoroutineKind::Desugared(CoroutineDesugaring::Async, CoroutineSource::Fn)
+    ));
+
+    let needs_async_drop = drop_ty.needs_async_drop(tcx, typing_env);
+    let needs_sync_drop = !needs_async_drop && drop_ty.needs_drop(tcx, typing_env);
+
+    let resume_adt = tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None));
+    let resume_ty = Ty::new_adt(tcx, resume_adt, ty::List::empty());
+
+    let fn_sig = ty::Binder::dummy(tcx.mk_fn_sig(
+        [ty, resume_ty],
+        tcx.types.unit,
+        false,
+        Safety::Safe,
+        ExternAbi::Rust,
+    ));
+    let sig = tcx.instantiate_bound_regions_with_erased(fn_sig);
+
+    assert!(!drop_ty.is_coroutine());
+    let span = tcx.def_span(def_id);
+    let source_info = SourceInfo::outermost(span);
+
+    // The first argument (index 0), but add 1 for the return value.
+    let coroutine_layout = Place::from(Local::new(1 + 0));
+    let coroutine_layout_dropee =
+        tcx.mk_place_field(coroutine_layout, FieldIdx::new(0), drop_ptr_ty);
+
+    let return_block = BasicBlock::new(1);
+    let mut blocks = IndexVec::with_capacity(2);
+    let block = |blocks: &mut IndexVec<_, _>, kind| {
+        blocks.push(BasicBlockData {
+            statements: vec![],
+            terminator: Some(Terminator { source_info, kind }),
+            is_cleanup: false,
+        })
+    };
+    block(
+        &mut blocks,
+        if needs_sync_drop {
+            TerminatorKind::Drop {
+                place: tcx.mk_place_deref(coroutine_layout_dropee),
+                target: return_block,
+                unwind: UnwindAction::Continue,
+                replace: false,
+                drop: None,
+                async_fut: None,
+            }
         } else {
-            ty::GenericArgs::identity_for_item(tcx, def_id)
-        };
-        let sig = tcx.fn_sig(def_id).instantiate(tcx, args);
-        let sig = tcx.instantiate_bound_regions_with_erased(sig);
-        let span = tcx.def_span(def_id);
+            TerminatorKind::Goto { target: return_block }
+        },
+    );
+    block(&mut blocks, TerminatorKind::Return);
+
+    let source = MirSource::from_instance(ty::InstanceKind::AsyncDropGlue(def_id, ty));
+    let mut body =
+        new_body(source, blocks, local_decls_for_sig(&sig, span), sig.inputs().len(), span);
+
+    body.coroutine = Some(Box::new(CoroutineInfo::initial(
+        coroutine_kind,
+        parent_args.as_coroutine().yield_ty(),
+        parent_args.as_coroutine().resume_ty(),
+    )));
+    body.phase = MirPhase::Runtime(RuntimePhase::Initial);
+    if !needs_async_drop {
+        // Returning noop body for types without `need async drop`
+        // (or sync Drop in case of !`need async drop` && `need drop`)
+        return body;
+    }
 
-        let source_info = SourceInfo::outermost(span);
+    let mut dropee_ptr = Place::from(body.local_decls.push(LocalDecl::new(drop_ptr_ty, span)));
+    let st_kind = StatementKind::Assign(Box::new((
+        dropee_ptr,
+        Rvalue::Use(Operand::Move(coroutine_layout_dropee)),
+    )));
+    body.basic_blocks_mut()[START_BLOCK].statements.push(Statement { source_info, kind: st_kind });
+    dropee_ptr = dropee_emit_retag(tcx, &mut body, dropee_ptr, span);
 
-        debug_assert_eq!(sig.inputs().len(), Self::INPUT_COUNT);
-        let locals = local_decls_for_sig(&sig, span);
+    let dropline = body.basic_blocks.last_index();
 
-        // Usual case: noop() + unwind resume + return
-        let mut bbs = IndexVec::with_capacity(3);
-        let typing_env = ty::TypingEnv::post_analysis(tcx, def_id);
-        AsyncDestructorCtorShimBuilder {
+    let patch = {
+        let mut elaborator = DropShimElaborator {
+            body: &body,
+            patch: MirPatch::new(&body),
             tcx,
-            def_id,
-            self_ty,
-            span,
-            source_info,
             typing_env,
-
-            stack: Vec::with_capacity(Self::MAX_STACK_LEN),
-            last_bb: bbs.push(BasicBlockData::new(None, false)),
-            top_cleanup_bb: match tcx.sess.panic_strategy() {
-                PanicStrategy::Unwind => {
-                    // Don't drop input arg because it's just a pointer
-                    Some(bbs.push(BasicBlockData {
-                        statements: Vec::new(),
-                        terminator: Some(Terminator {
-                            source_info,
-                            kind: TerminatorKind::UnwindResume,
-                        }),
-                        is_cleanup: true,
-                    }))
-                }
-                PanicStrategy::Abort => None,
-            },
-
-            locals,
-            bbs,
-        }
-    }
-
-    fn build(self) -> Body<'tcx> {
-        let (tcx, Some(self_ty)) = (self.tcx, self.self_ty) else {
-            return self.build_zst_output();
-        };
-        match self_ty.async_drop_glue_morphology(tcx) {
-            AsyncDropGlueMorphology::Noop => span_bug!(
-                self.span,
-                "async drop glue shim generator encountered type with noop async drop glue morphology"
-            ),
-            AsyncDropGlueMorphology::DeferredDropInPlace => {
-                return self.build_deferred_drop_in_place();
-            }
-            AsyncDropGlueMorphology::Custom => (),
-        }
-
-        let surface_drop_kind = || {
-            let adt_def = self_ty.ty_adt_def()?;
-            if adt_def.async_destructor(tcx).is_some() {
-                Some(SurfaceDropKind::Async)
-            } else if adt_def.destructor(tcx).is_some() {
-                Some(SurfaceDropKind::Sync)
-            } else {
-                None
-            }
+            produce_async_drops: true,
         };
+        let dropee = tcx.mk_place_deref(dropee_ptr);
+        let resume_block = elaborator.patch.resume_block();
+        elaborate_drop(
+            &mut elaborator,
+            source_info,
+            dropee,
+            (),
+            return_block,
+            Unwind::To(resume_block),
+            START_BLOCK,
+            dropline,
+        );
+        elaborator.patch
+    };
+    patch.apply(&mut body);
 
-        match self_ty.kind() {
-            ty::Array(elem_ty, _) => self.build_slice(true, *elem_ty),
-            ty::Slice(elem_ty) => self.build_slice(false, *elem_ty),
-
-            ty::Tuple(elem_tys) => self.build_chain(None, elem_tys.iter()),
-            ty::Adt(adt_def, args) if adt_def.is_struct() => {
-                let field_tys = adt_def.non_enum_variant().fields.iter().map(|f| f.ty(tcx, args));
-                self.build_chain(surface_drop_kind(), field_tys)
-            }
-            ty::Closure(_, args) => self.build_chain(None, args.as_closure().upvar_tys().iter()),
-            ty::CoroutineClosure(_, args) => {
-                self.build_chain(None, args.as_coroutine_closure().upvar_tys().iter())
-            }
+    body
+}
 
-            ty::Adt(adt_def, args) if adt_def.is_enum() => {
-                self.build_enum(*adt_def, *args, surface_drop_kind())
-            }
+// * For async drop a "normal" coroutine:
+// `async_drop_in_place<T>::{closure}.poll()` is converted into `T.future_drop_poll()`.
+// Every coroutine has its `poll` (calculate yourself a little further)
+// and its `future_drop_poll` (drop yourself a little further).
+//
+// * For async drop of "async drop coroutine" (`async_drop_in_place<T>::{closure}`):
+// Correct drop of such coroutine means normal execution of nested async drop.
+// async_drop(async_drop(T))::future_drop_poll() => async_drop(T)::poll().
+pub(super) fn build_future_drop_poll_shim<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    def_id: DefId,
+    proxy_ty: Ty<'tcx>,
+    impl_ty: Ty<'tcx>,
+) -> Body<'tcx> {
+    let instance = ty::InstanceKind::FutureDropPollShim(def_id, proxy_ty, impl_ty);
+    let ty::Coroutine(coroutine_def_id, _) = impl_ty.kind() else {
+        bug!("build_future_drop_poll_shim not for coroutine impl type: ({:?})", instance);
+    };
 
-            ty::Adt(adt_def, _) => {
-                assert!(adt_def.is_union());
-                match surface_drop_kind().unwrap() {
-                    SurfaceDropKind::Async => self.build_fused_async_surface(),
-                    SurfaceDropKind::Sync => self.build_fused_sync_surface(),
-                }
-            }
+    let span = tcx.def_span(def_id);
 
-            ty::Bound(..)
-            | ty::Foreign(_)
-            | ty::Placeholder(_)
-            | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_) | ty::TyVar(_))
-            | ty::Param(_)
-            | ty::Alias(..) => {
-                bug!("Building async destructor for unexpected type: {self_ty:?}")
-            }
-
-            _ => {
-                bug!(
-                    "Building async destructor constructor shim is not yet implemented for type: {self_ty:?}"
-                )
-            }
-        }
+    if tcx.is_async_drop_in_place_coroutine(*coroutine_def_id) {
+        build_adrop_for_adrop_shim(tcx, proxy_ty, impl_ty, span, instance)
+    } else {
+        build_adrop_for_coroutine_shim(tcx, proxy_ty, impl_ty, span, instance)
     }
+}
 
-    fn build_enum(
-        mut self,
-        adt_def: ty::AdtDef<'tcx>,
-        args: ty::GenericArgsRef<'tcx>,
-        surface_drop: Option<SurfaceDropKind>,
-    ) -> Body<'tcx> {
-        let tcx = self.tcx;
-
-        let surface = match surface_drop {
-            None => None,
-            Some(kind) => {
-                self.put_self();
-                Some(match kind {
-                    SurfaceDropKind::Async => self.combine_async_surface(),
-                    SurfaceDropKind::Sync => self.combine_sync_surface(),
-                })
-            }
-        };
-
-        let mut other = None;
-        for (variant_idx, discr) in adt_def.discriminants(tcx) {
-            let variant = adt_def.variant(variant_idx);
-
-            let mut chain = None;
-            for (field_idx, field) in variant.fields.iter_enumerated() {
-                let field_ty = field.ty(tcx, args);
-                self.put_variant_field(variant.name, variant_idx, field_idx, field_ty);
-                let defer = self.combine_defer(field_ty);
-                chain = Some(match chain {
-                    None => defer,
-                    Some(chain) => self.combine_chain(chain, defer),
-                })
-            }
-            let variant_dtor = chain.unwrap_or_else(|| self.put_noop());
-
-            other = Some(match other {
-                None => variant_dtor,
-                Some(other) => {
-                    self.put_self();
-                    self.put_discr(discr);
-                    self.combine_either(other, variant_dtor)
-                }
-            });
-        }
-        let variants_dtor = other.unwrap_or_else(|| self.put_noop());
-
-        let dtor = match surface {
-            None => variants_dtor,
-            Some(surface) => self.combine_chain(surface, variants_dtor),
-        };
-        self.combine_fuse(dtor);
-        self.return_()
-    }
+// For async drop a "normal" coroutine:
+// `async_drop_in_place<T>::{closure}.poll()` is converted into `T.future_drop_poll()`.
+// Every coroutine has its `poll` (calculate yourself a little further)
+// and its `future_drop_poll` (drop yourself a little further).
+fn build_adrop_for_coroutine_shim<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    proxy_ty: Ty<'tcx>,
+    impl_ty: Ty<'tcx>,
+    span: Span,
+    instance: ty::InstanceKind<'tcx>,
+) -> Body<'tcx> {
+    let ty::Coroutine(coroutine_def_id, impl_args) = impl_ty.kind() else {
+        bug!("build_adrop_for_coroutine_shim not for coroutine impl type: ({:?})", instance);
+    };
+    let proxy_ref = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, proxy_ty);
+    // taking _1.0 (impl from Pin)
+    let pin_proxy_layout_local = Local::new(1);
+    let source_info = SourceInfo::outermost(span);
+    // converting `(_1: Pin<&mut CorLayout>, _2: &mut Context<'_>) -> Poll<()>`
+    // into `(_1: Pin<&mut ProxyLayout>, _2: &mut Context<'_>) -> Poll<()>`
+    // let mut _x: &mut CorLayout = &*_1.0.0;
+    // Replace old _1.0 accesses into _x accesses;
+    let body = tcx.optimized_mir(*coroutine_def_id).future_drop_poll().unwrap();
+    let mut body: Body<'tcx> = EarlyBinder::bind(body.clone()).instantiate(tcx, impl_args);
+    body.source.instance = instance;
+    body.phase = MirPhase::Runtime(RuntimePhase::Initial);
+    body.var_debug_info.clear();
+    let pin_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Pin, Some(span)));
+    let args = tcx.mk_args(&[proxy_ref.into()]);
+    let pin_proxy_ref = Ty::new_adt(tcx, pin_adt_ref, args);
+
+    let cor_ref = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, impl_ty);
+
+    let proxy_ref_local = body.local_decls.push(LocalDecl::new(proxy_ref, span));
+    let cor_ref_local = body.local_decls.push(LocalDecl::new(cor_ref, span));
+
+    FixProxyFutureDropVisitor { tcx, replace_to: cor_ref_local }.visit_body(&mut body);
+    // Now changing first arg from Pin<&mut ImplCoroutine> to Pin<&mut ProxyCoroutine>
+    body.local_decls[pin_proxy_layout_local] = LocalDecl::new(pin_proxy_ref, span);
 
-    fn build_chain<I>(mut self, surface_drop: Option<SurfaceDropKind>, elem_tys: I) -> Body<'tcx>
-    where
-        I: Iterator<Item = Ty<'tcx>> + ExactSizeIterator,
     {
-        let surface = match surface_drop {
-            None => None,
-            Some(kind) => {
-                self.put_self();
-                Some(match kind {
-                    SurfaceDropKind::Async => self.combine_async_surface(),
-                    SurfaceDropKind::Sync => self.combine_sync_surface(),
-                })
-            }
-        };
-
-        let mut chain = None;
-        for (field_idx, field_ty) in elem_tys.enumerate().map(|(i, ty)| (FieldIdx::new(i), ty)) {
-            self.put_field(field_idx, field_ty);
-            let defer = self.combine_defer(field_ty);
-            chain = Some(match chain {
-                None => defer,
-                Some(chain) => self.combine_chain(chain, defer),
-            })
-        }
-        let chain = chain.unwrap_or_else(|| self.put_noop());
-
-        let dtor = match surface {
-            None => chain,
-            Some(surface) => self.combine_chain(surface, chain),
-        };
-        self.combine_fuse(dtor);
-        self.return_()
-    }
-
-    fn build_zst_output(mut self) -> Body<'tcx> {
-        self.put_zst_output();
-        self.return_()
-    }
-
-    fn build_deferred_drop_in_place(mut self) -> Body<'tcx> {
-        self.put_self();
-        let deferred = self.combine_deferred_drop_in_place();
-        self.combine_fuse(deferred);
-        self.return_()
-    }
-
-    fn build_fused_async_surface(mut self) -> Body<'tcx> {
-        self.put_self();
-        let surface = self.combine_async_surface();
-        self.combine_fuse(surface);
-        self.return_()
-    }
-
-    fn build_fused_sync_surface(mut self) -> Body<'tcx> {
-        self.put_self();
-        let surface = self.combine_sync_surface();
-        self.combine_fuse(surface);
-        self.return_()
-    }
-
-    fn build_slice(mut self, is_array: bool, elem_ty: Ty<'tcx>) -> Body<'tcx> {
-        if is_array {
-            self.put_array_as_slice(elem_ty)
-        } else {
-            self.put_self()
-        }
-        let dtor = self.combine_slice(elem_ty);
-        self.combine_fuse(dtor);
-        self.return_()
-    }
-
-    fn put_zst_output(&mut self) {
-        let return_ty = self.locals[RETURN_PLACE].ty;
-        self.put_operand(Operand::Constant(Box::new(ConstOperand {
-            span: self.span,
-            user_ty: None,
-            const_: Const::zero_sized(return_ty),
-        })));
-    }
-
-    /// Puts `to_drop: *mut Self` on top of the stack.
-    fn put_self(&mut self) {
-        self.put_operand(Operand::Copy(Self::SELF_PTR.into()))
-    }
-
-    /// Given that `Self is [ElemTy; N]` puts `to_drop: *mut [ElemTy]`
-    /// on top of the stack.
-    fn put_array_as_slice(&mut self, elem_ty: Ty<'tcx>) {
-        let slice_ptr_ty = Ty::new_mut_ptr(self.tcx, Ty::new_slice(self.tcx, elem_ty));
-        self.put_temp_rvalue(Rvalue::Cast(
-            CastKind::PointerCoercion(PointerCoercion::Unsize, CoercionSource::Implicit),
-            Operand::Copy(Self::SELF_PTR.into()),
-            slice_ptr_ty,
-        ))
-    }
-
-    /// If given Self is a struct puts `to_drop: *mut FieldTy` on top
-    /// of the stack.
-    fn put_field(&mut self, field: FieldIdx, field_ty: Ty<'tcx>) {
-        let place = Place {
-            local: Self::SELF_PTR,
-            projection: self
-                .tcx
-                .mk_place_elems(&[PlaceElem::Deref, PlaceElem::Field(field, field_ty)]),
-        };
-        self.put_temp_rvalue(Rvalue::RawPtr(RawPtrKind::Mut, place))
-    }
-
-    /// If given Self is an enum puts `to_drop: *mut FieldTy` on top of
-    /// the stack.
-    fn put_variant_field(
-        &mut self,
-        variant_sym: Symbol,
-        variant: VariantIdx,
-        field: FieldIdx,
-        field_ty: Ty<'tcx>,
-    ) {
-        let place = Place {
-            local: Self::SELF_PTR,
-            projection: self.tcx.mk_place_elems(&[
-                PlaceElem::Deref,
-                PlaceElem::Downcast(Some(variant_sym), variant),
-                PlaceElem::Field(field, field_ty),
-            ]),
-        };
-        self.put_temp_rvalue(Rvalue::RawPtr(RawPtrKind::Mut, place))
-    }
-
-    /// If given Self is an enum puts `to_drop: *mut FieldTy` on top of
-    /// the stack.
-    fn put_discr(&mut self, discr: Discr<'tcx>) {
-        let (size, _) = discr.ty.int_size_and_signed(self.tcx);
-        self.put_operand(Operand::const_from_scalar(
-            self.tcx,
-            discr.ty,
-            interpret::Scalar::from_uint(discr.val, size),
-            self.span,
-        ));
-    }
-
-    /// Puts `x: RvalueType` on top of the stack.
-    fn put_temp_rvalue(&mut self, rvalue: Rvalue<'tcx>) {
-        let last_bb = &mut self.bbs[self.last_bb];
-        debug_assert!(last_bb.terminator.is_none());
-        let source_info = self.source_info;
-
-        let local_ty = rvalue.ty(&self.locals, self.tcx);
-        // We need to create a new local to be able to "consume" it with
-        // a combinator
-        let local = self.locals.push(LocalDecl::with_source_info(local_ty, source_info));
-        last_bb.statements.extend_from_slice(&[
-            Statement { source_info, kind: StatementKind::StorageLive(local) },
+        let mut idx: usize = 0;
+        // _proxy = _1.0 : Pin<&ProxyLayout> ==> &ProxyLayout
+        let proxy_ref_place = Place::from(pin_proxy_layout_local)
+            .project_deeper(&[PlaceElem::Field(FieldIdx::ZERO, proxy_ref)], tcx);
+        body.basic_blocks_mut()[START_BLOCK].statements.insert(
+            idx,
             Statement {
                 source_info,
-                kind: StatementKind::Assign(Box::new((local.into(), rvalue))),
+                kind: StatementKind::Assign(Box::new((
+                    Place::from(proxy_ref_local),
+                    Rvalue::CopyForDeref(proxy_ref_place),
+                ))),
             },
-        ]);
-
-        self.put_operand(Operand::Move(local.into()));
-    }
-
-    /// Puts operand on top of the stack.
-    fn put_operand(&mut self, operand: Operand<'tcx>) {
-        if let Some(top_cleanup_bb) = &mut self.top_cleanup_bb {
-            let source_info = self.source_info;
-            match &operand {
-                Operand::Copy(_) | Operand::Constant(_) => {
-                    *top_cleanup_bb = self.bbs.push(BasicBlockData {
-                        statements: Vec::new(),
-                        terminator: Some(Terminator {
-                            source_info,
-                            kind: TerminatorKind::Goto { target: *top_cleanup_bb },
-                        }),
-                        is_cleanup: true,
-                    });
-                }
-                Operand::Move(place) => {
-                    let local = place.as_local().unwrap();
-                    *top_cleanup_bb = self.bbs.push(BasicBlockData {
-                        statements: Vec::new(),
-                        terminator: Some(Terminator {
-                            source_info,
-                            kind: if self.locals[local].ty.needs_drop(self.tcx, self.typing_env) {
-                                TerminatorKind::Drop {
-                                    place: local.into(),
-                                    target: *top_cleanup_bb,
-                                    unwind: UnwindAction::Terminate(
-                                        UnwindTerminateReason::InCleanup,
-                                    ),
-                                    replace: false,
-                                }
-                            } else {
-                                TerminatorKind::Goto { target: *top_cleanup_bb }
-                            },
-                        }),
-                        is_cleanup: true,
-                    });
-                }
-            };
-        }
-        self.stack.push(operand);
-    }
-
-    /// Puts `noop: async_drop::Noop` on top of the stack
-    fn put_noop(&mut self) -> Ty<'tcx> {
-        self.apply_combinator(0, LangItem::AsyncDropNoop, &[])
-    }
-
-    fn combine_async_surface(&mut self) -> Ty<'tcx> {
-        self.apply_combinator(1, LangItem::SurfaceAsyncDropInPlace, &[self.self_ty.unwrap().into()])
-    }
-
-    fn combine_sync_surface(&mut self) -> Ty<'tcx> {
-        self.apply_combinator(
-            1,
-            LangItem::AsyncDropSurfaceDropInPlace,
-            &[self.self_ty.unwrap().into()],
-        )
-    }
-
-    fn combine_deferred_drop_in_place(&mut self) -> Ty<'tcx> {
-        self.apply_combinator(
-            1,
-            LangItem::AsyncDropDeferredDropInPlace,
-            &[self.self_ty.unwrap().into()],
-        )
-    }
-
-    fn combine_fuse(&mut self, inner_future_ty: Ty<'tcx>) -> Ty<'tcx> {
-        self.apply_combinator(1, LangItem::AsyncDropFuse, &[inner_future_ty.into()])
-    }
-
-    fn combine_slice(&mut self, elem_ty: Ty<'tcx>) -> Ty<'tcx> {
-        self.apply_combinator(1, LangItem::AsyncDropSlice, &[elem_ty.into()])
-    }
-
-    fn combine_defer(&mut self, to_drop_ty: Ty<'tcx>) -> Ty<'tcx> {
-        self.apply_combinator(1, LangItem::AsyncDropDefer, &[to_drop_ty.into()])
-    }
-
-    fn combine_chain(&mut self, first: Ty<'tcx>, second: Ty<'tcx>) -> Ty<'tcx> {
-        self.apply_combinator(2, LangItem::AsyncDropChain, &[first.into(), second.into()])
-    }
-
-    fn combine_either(&mut self, other: Ty<'tcx>, matched: Ty<'tcx>) -> Ty<'tcx> {
-        self.apply_combinator(
-            4,
-            LangItem::AsyncDropEither,
-            &[other.into(), matched.into(), self.self_ty.unwrap().into()],
-        )
-    }
-
-    fn return_(mut self) -> Body<'tcx> {
-        let last_bb = &mut self.bbs[self.last_bb];
-        debug_assert!(last_bb.terminator.is_none());
-        let source_info = self.source_info;
-
-        let (1, Some(output)) = (self.stack.len(), self.stack.pop()) else {
-            span_bug!(
-                self.span,
-                "async destructor ctor shim builder finished with invalid number of stack items: expected 1 found {}",
-                self.stack.len(),
-            )
-        };
-        #[cfg(debug_assertions)]
-        if let Some(ty) = self.self_ty {
-            debug_assert_eq!(
-                output.ty(&self.locals, self.tcx),
-                ty.async_destructor_ty(self.tcx),
-                "output async destructor types did not match for type: {ty:?}",
-            );
-        }
-
-        let dead_storage = match &output {
-            Operand::Move(place) => Some(Statement {
-                source_info,
-                kind: StatementKind::StorageDead(place.as_local().unwrap()),
-            }),
-            _ => None,
-        };
-
-        last_bb.statements.extend(
-            iter::once(Statement {
-                source_info,
-                kind: StatementKind::Assign(Box::new((RETURN_PLACE.into(), Rvalue::Use(output)))),
-            })
-            .chain(dead_storage),
         );
-
-        last_bb.terminator = Some(Terminator { source_info, kind: TerminatorKind::Return });
-
-        let source = MirSource::from_instance(ty::InstanceKind::AsyncDropGlueCtorShim(
-            self.def_id,
-            self.self_ty,
-        ));
-        new_body(source, self.bbs, self.locals, Self::INPUT_COUNT, self.span)
-    }
-
-    fn apply_combinator(
-        &mut self,
-        arity: usize,
-        function: LangItem,
-        args: &[ty::GenericArg<'tcx>],
-    ) -> Ty<'tcx> {
-        let function = self.tcx.require_lang_item(function, Some(self.span));
-        let operands_split = self
-            .stack
-            .len()
-            .checked_sub(arity)
-            .expect("async destructor ctor shim combinator tried to consume too many items");
-        let operands = &self.stack[operands_split..];
-
-        let func_ty = Ty::new_fn_def(self.tcx, function, args.iter().copied());
-        let func_sig = func_ty.fn_sig(self.tcx).no_bound_vars().unwrap();
-        #[cfg(debug_assertions)]
-        operands.iter().zip(func_sig.inputs()).for_each(|(operand, expected_ty)| {
-            let operand_ty = operand.ty(&self.locals, self.tcx);
-            if operand_ty == *expected_ty {
-                return;
-            }
-
-            // If projection of Discriminant then compare with `Ty::discriminant_ty`
-            if let ty::Alias(ty::Projection, ty::AliasTy { args, def_id, .. }) = expected_ty.kind()
-                && self.tcx.is_lang_item(*def_id, LangItem::Discriminant)
-                && args.first().unwrap().as_type().unwrap().discriminant_ty(self.tcx) == operand_ty
-            {
-                return;
+        idx += 1;
+        let mut cor_ptr_local = proxy_ref_local;
+        proxy_ty.find_async_drop_impl_coroutine(tcx, |ty| {
+            if ty != proxy_ty {
+                let ty_ptr = Ty::new_mut_ptr(tcx, ty);
+                let impl_ptr_place = Place::from(cor_ptr_local).project_deeper(
+                    &[PlaceElem::Deref, PlaceElem::Field(FieldIdx::ZERO, ty_ptr)],
+                    tcx,
+                );
+                cor_ptr_local = body.local_decls.push(LocalDecl::new(ty_ptr, span));
+                // _cor_ptr = _proxy.0.0 (... .0)
+                body.basic_blocks_mut()[START_BLOCK].statements.insert(
+                    idx,
+                    Statement {
+                        source_info,
+                        kind: StatementKind::Assign(Box::new((
+                            Place::from(cor_ptr_local),
+                            Rvalue::CopyForDeref(impl_ptr_place),
+                        ))),
+                    },
+                );
+                idx += 1;
             }
-
-            span_bug!(
-                self.span,
-                "Operand type and combinator argument type are not equal.
-    operand_ty: {:?}
-    argument_ty: {:?}
-",
-                operand_ty,
-                expected_ty
-            );
-        });
-
-        let target = self.bbs.push(BasicBlockData {
-            statements: operands
-                .iter()
-                .rev()
-                .filter_map(|o| {
-                    if let Operand::Move(Place { local, projection }) = o {
-                        assert!(projection.is_empty());
-                        Some(Statement {
-                            source_info: self.source_info,
-                            kind: StatementKind::StorageDead(*local),
-                        })
-                    } else {
-                        None
-                    }
-                })
-                .collect(),
-            terminator: None,
-            is_cleanup: false,
         });
 
-        let dest_ty = func_sig.output();
-        let dest =
-            self.locals.push(LocalDecl::with_source_info(dest_ty, self.source_info).immutable());
-
-        let unwind = if let Some(top_cleanup_bb) = &mut self.top_cleanup_bb {
-            for _ in 0..arity {
-                *top_cleanup_bb =
-                    self.bbs[*top_cleanup_bb].terminator().successors().exactly_one().ok().unwrap();
-            }
-            UnwindAction::Cleanup(*top_cleanup_bb)
-        } else {
-            UnwindAction::Unreachable
-        };
+        // _cor_ref = &*cor_ptr
+        let reborrow = Rvalue::Ref(
+            tcx.lifetimes.re_erased,
+            BorrowKind::Mut { kind: MutBorrowKind::Default },
+            tcx.mk_place_deref(Place::from(cor_ptr_local)),
+        );
+        body.basic_blocks_mut()[START_BLOCK].statements.insert(
+            idx,
+            Statement {
+                source_info,
+                kind: StatementKind::Assign(Box::new((Place::from(cor_ref_local), reborrow))),
+            },
+        );
+    }
+    body
+}
 
-        let last_bb = &mut self.bbs[self.last_bb];
-        debug_assert!(last_bb.terminator.is_none());
-        last_bb.statements.push(Statement {
-            source_info: self.source_info,
-            kind: StatementKind::StorageLive(dest),
-        });
-        last_bb.terminator = Some(Terminator {
-            source_info: self.source_info,
+// When dropping async drop coroutine, we continue its execution.
+// async_drop(async_drop(T))::future_drop_poll() => async_drop(T)::poll()
+fn build_adrop_for_adrop_shim<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    proxy_ty: Ty<'tcx>,
+    impl_ty: Ty<'tcx>,
+    span: Span,
+    instance: ty::InstanceKind<'tcx>,
+) -> Body<'tcx> {
+    let source_info = SourceInfo::outermost(span);
+    let proxy_ref = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, proxy_ty);
+    // taking _1.0 (impl from Pin)
+    let pin_proxy_layout_local = Local::new(1);
+    let proxy_ref_place = Place::from(pin_proxy_layout_local)
+        .project_deeper(&[PlaceElem::Field(FieldIdx::ZERO, proxy_ref)], tcx);
+    let cor_ref = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, impl_ty);
+
+    // ret_ty = `Poll<()>`
+    let poll_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Poll, None));
+    let ret_ty = Ty::new_adt(tcx, poll_adt_ref, tcx.mk_args(&[tcx.types.unit.into()]));
+    // env_ty = `Pin<&mut proxy_ty>`
+    let pin_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Pin, None));
+    let env_ty = Ty::new_adt(tcx, pin_adt_ref, tcx.mk_args(&[proxy_ref.into()]));
+    // sig = `fn (Pin<&mut proxy_ty>, &mut Context) -> Poll<()>`
+    let sig = tcx.mk_fn_sig(
+        [env_ty, Ty::new_task_context(tcx)],
+        ret_ty,
+        false,
+        hir::Safety::Safe,
+        ExternAbi::Rust,
+    );
+    // This function will be called with pinned proxy coroutine layout.
+    // We need to extract `Arg0.0` to get proxy layout, and then get `.0`
+    // further to receive impl coroutine (may be needed)
+    let mut locals = local_decls_for_sig(&sig, span);
+    let mut blocks = IndexVec::with_capacity(3);
+
+    let proxy_ref_local = locals.push(LocalDecl::new(proxy_ref, span));
+
+    let call_bb = BasicBlock::new(1);
+    let return_bb = BasicBlock::new(2);
+
+    let mut statements = Vec::new();
+
+    statements.push(Statement {
+        source_info,
+        kind: StatementKind::Assign(Box::new((
+            Place::from(proxy_ref_local),
+            Rvalue::CopyForDeref(proxy_ref_place),
+        ))),
+    });
+
+    let mut cor_ptr_local = proxy_ref_local;
+    proxy_ty.find_async_drop_impl_coroutine(tcx, |ty| {
+        if ty != proxy_ty {
+            let ty_ptr = Ty::new_mut_ptr(tcx, ty);
+            let impl_ptr_place = Place::from(cor_ptr_local)
+                .project_deeper(&[PlaceElem::Deref, PlaceElem::Field(FieldIdx::ZERO, ty_ptr)], tcx);
+            cor_ptr_local = locals.push(LocalDecl::new(ty_ptr, span));
+            // _cor_ptr = _proxy.0.0 (... .0)
+            statements.push(Statement {
+                source_info,
+                kind: StatementKind::Assign(Box::new((
+                    Place::from(cor_ptr_local),
+                    Rvalue::CopyForDeref(impl_ptr_place),
+                ))),
+            });
+        }
+    });
+
+    // convert impl coroutine ptr into ref
+    let reborrow = Rvalue::Ref(
+        tcx.lifetimes.re_erased,
+        BorrowKind::Mut { kind: MutBorrowKind::Default },
+        tcx.mk_place_deref(Place::from(cor_ptr_local)),
+    );
+    let cor_ref_place = Place::from(locals.push(LocalDecl::new(cor_ref, span)));
+    statements.push(Statement {
+        source_info,
+        kind: StatementKind::Assign(Box::new((cor_ref_place, reborrow))),
+    });
+
+    // cor_pin_ty = `Pin<&mut cor_ref>`
+    let cor_pin_ty = Ty::new_adt(tcx, pin_adt_ref, tcx.mk_args(&[cor_ref.into()]));
+    let cor_pin_place = Place::from(locals.push(LocalDecl::new(cor_pin_ty, span)));
+
+    let pin_fn = tcx.require_lang_item(LangItem::PinNewUnchecked, Some(span));
+    // call Pin<FutTy>::new_unchecked(&mut impl_cor)
+    blocks.push(BasicBlockData {
+        statements,
+        terminator: Some(Terminator {
+            source_info,
             kind: TerminatorKind::Call {
-                func: Operand::Constant(Box::new(ConstOperand {
-                    span: self.span,
-                    user_ty: None,
-                    const_: Const::Val(ConstValue::ZeroSized, func_ty),
-                })),
-                destination: dest.into(),
-                target: Some(target),
-                unwind,
+                func: Operand::function_handle(tcx, pin_fn, [cor_ref.into()], span),
+                args: [dummy_spanned(Operand::Move(cor_ref_place))].into(),
+                destination: cor_pin_place,
+                target: Some(call_bb),
+                unwind: UnwindAction::Continue,
                 call_source: CallSource::Misc,
-                fn_span: self.span,
-                args: self.stack.drain(operands_split..).map(|o| respan(self.span, o)).collect(),
+                fn_span: span,
             },
-        });
-
-        self.put_operand(Operand::Move(dest.into()));
-        self.last_bb = target;
-
-        dest_ty
-    }
+        }),
+        is_cleanup: false,
+    });
+    // When dropping async drop coroutine, we continue its execution:
+    // we call impl::poll (impl_layout, ctx)
+    let poll_fn = tcx.require_lang_item(LangItem::FuturePoll, None);
+    let resume_ctx = Place::from(Local::new(2));
+    blocks.push(BasicBlockData {
+        statements: vec![],
+        terminator: Some(Terminator {
+            source_info,
+            kind: TerminatorKind::Call {
+                func: Operand::function_handle(tcx, poll_fn, [impl_ty.into()], span),
+                args: [
+                    dummy_spanned(Operand::Move(cor_pin_place)),
+                    dummy_spanned(Operand::Move(resume_ctx)),
+                ]
+                .into(),
+                destination: Place::return_place(),
+                target: Some(return_bb),
+                unwind: UnwindAction::Continue,
+                call_source: CallSource::Misc,
+                fn_span: span,
+            },
+        }),
+        is_cleanup: false,
+    });
+    blocks.push(BasicBlockData {
+        statements: vec![],
+        terminator: Some(Terminator { source_info, kind: TerminatorKind::Return }),
+        is_cleanup: false,
+    });
+
+    let source = MirSource::from_instance(instance);
+    let mut body = new_body(source, blocks, locals, sig.inputs().len(), span);
+    body.phase = MirPhase::Runtime(RuntimePhase::Initial);
+    return body;
 }
diff --git a/compiler/rustc_mir_transform/src/validate.rs b/compiler/rustc_mir_transform/src/validate.rs
index 66fe3ef4141..f541a32cd26 100644
--- a/compiler/rustc_mir_transform/src/validate.rs
+++ b/compiler/rustc_mir_transform/src/validate.rs
@@ -372,9 +372,12 @@ impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
                     );
                 }
             }
-            TerminatorKind::Drop { target, unwind, .. } => {
+            TerminatorKind::Drop { target, unwind, drop, .. } => {
                 self.check_edge(location, *target, EdgeKind::Normal);
                 self.check_unwind_edge(location, *unwind);
+                if let Some(drop) = drop {
+                    self.check_edge(location, *drop, EdgeKind::Normal);
+                }
             }
             TerminatorKind::Call { func, args, .. }
             | TerminatorKind::TailCall { func, args, .. } => {
@@ -747,7 +750,9 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
                             // since we may be in the process of computing this MIR in the
                             // first place.
                             let layout = if def_id == self.caller_body.source.def_id() {
-                                self.caller_body.coroutine_layout_raw()
+                                self.caller_body
+                                    .coroutine_layout_raw()
+                                    .or_else(|| self.tcx.coroutine_layout(def_id, args))
                             } else if self.tcx.needs_coroutine_by_move_body_def_id(def_id)
                                 && let ty::ClosureKind::FnOnce =
                                     args.as_coroutine().kind_ty().to_opt_closure_kind().unwrap()
@@ -757,7 +762,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
                                 // Same if this is the by-move body of a coroutine-closure.
                                 self.caller_body.coroutine_layout_raw()
                             } else {
-                                self.tcx.coroutine_layout(def_id, args.as_coroutine().kind_ty())
+                                self.tcx.coroutine_layout(def_id, args)
                             };
 
                             let Some(layout) = layout else {