diff options
Diffstat (limited to 'compiler/rustc_ty_utils/src')
| -rw-r--r-- | compiler/rustc_ty_utils/src/abi.rs | 36 | ||||
| -rw-r--r-- | compiler/rustc_ty_utils/src/needs_drop.rs | 43 | 
2 files changed, 42 insertions, 37 deletions
| diff --git a/compiler/rustc_ty_utils/src/abi.rs b/compiler/rustc_ty_utils/src/abi.rs index af2e000e340..89d1dd8cf23 100644 --- a/compiler/rustc_ty_utils/src/abi.rs +++ b/compiler/rustc_ty_utils/src/abi.rs @@ -39,7 +39,7 @@ fn fn_sig_for_fn_abi<'tcx>( tcx.thread_local_ptr_ty(instance.def_id()), false, hir::Safety::Safe, - rustc_abi::ExternAbi::Unadjusted, + rustc_abi::ExternAbi::Rust, ); } @@ -268,20 +268,21 @@ fn fn_abi_of_instance<'tcx>( } // Handle safe Rust thin and wide pointers. -fn adjust_for_rust_scalar<'tcx>( +fn arg_attrs_for_rust_scalar<'tcx>( cx: LayoutCx<'tcx>, - attrs: &mut ArgAttributes, scalar: Scalar, layout: TyAndLayout<'tcx>, offset: Size, is_return: bool, drop_target_pointee: Option<Ty<'tcx>>, -) { +) -> ArgAttributes { + let mut attrs = ArgAttributes::new(); + // Booleans are always a noundef i1 that needs to be zero-extended. if scalar.is_bool() { attrs.ext(ArgExtension::Zext); attrs.set(ArgAttribute::NoUndef); - return; + return attrs; } if !scalar.is_uninit_valid() { @@ -289,7 +290,7 @@ fn adjust_for_rust_scalar<'tcx>( } // Only pointer types handled below. - let Scalar::Initialized { value: Pointer(_), valid_range } = scalar else { return }; + let Scalar::Initialized { value: Pointer(_), valid_range } = scalar else { return attrs }; // Set `nonnull` if the validity range excludes zero, or for the argument to `drop_in_place`, // which must be nonnull per its documented safety requirements. @@ -358,6 +359,8 @@ fn adjust_for_rust_scalar<'tcx>( } } } + + attrs } /// Ensure that the ABI makes basic sense. @@ -471,9 +474,10 @@ fn fn_abi_new_uncached<'tcx>( let (caller_location, determined_fn_def_id, is_virtual_call) = if let Some(instance) = instance { let is_virtual_call = matches!(instance.def, ty::InstanceKind::Virtual(..)); + let is_tls_shim_call = matches!(instance.def, ty::InstanceKind::ThreadLocalShim(_)); ( instance.def.requires_caller_location(tcx).then(|| tcx.caller_location_ty()), - if is_virtual_call { None } else { Some(instance.def_id()) }, + if is_virtual_call || is_tls_shim_call { None } else { Some(instance.def_id()) }, is_virtual_call, ) } else { @@ -530,17 +534,7 @@ fn fn_abi_new_uncached<'tcx>( }; let mut arg = ArgAbi::new(cx, layout, |layout, scalar, offset| { - let mut attrs = ArgAttributes::new(); - adjust_for_rust_scalar( - *cx, - &mut attrs, - scalar, - *layout, - offset, - is_return, - drop_target_pointee, - ); - attrs + arg_attrs_for_rust_scalar(*cx, scalar, *layout, offset, is_return, drop_target_pointee) }); if arg.layout.is_zst() { @@ -563,6 +557,7 @@ fn fn_abi_new_uncached<'tcx>( c_variadic: sig.c_variadic, fixed_count: inputs.len() as u32, conv, + // FIXME return false for tls shim can_unwind: fn_can_unwind( tcx, // Since `#[rustc_nounwind]` can change unwinding, we cannot infer unwinding by `fn_def_id` for a virtual call. @@ -575,8 +570,9 @@ fn fn_abi_new_uncached<'tcx>( &mut fn_abi, sig.abi, // If this is a virtual call, we cannot pass the `fn_def_id`, as it might call other - // functions from vtable. Internally, `deduced_param_attrs` attempts to infer attributes by - // visit the function body. + // functions from vtable. And for a tls shim, passing the `fn_def_id` would refer to + // the underlying static. Internally, `deduced_param_attrs` attempts to infer attributes + // by visit the function body. determined_fn_def_id, ); debug!("fn_abi_new_uncached = {:?}", fn_abi); diff --git a/compiler/rustc_ty_utils/src/needs_drop.rs b/compiler/rustc_ty_utils/src/needs_drop.rs index c3b04c20f4b..13d56889bd1 100644 --- a/compiler/rustc_ty_utils/src/needs_drop.rs +++ b/compiler/rustc_ty_utils/src/needs_drop.rs @@ -101,9 +101,6 @@ fn has_significant_drop_raw<'tcx>( struct NeedsDropTypes<'tcx, F> { tcx: TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>, - /// Whether to reveal coroutine witnesses, this is set - /// to `false` unless we compute `needs_drop` for a coroutine witness. - reveal_coroutine_witnesses: bool, query_ty: Ty<'tcx>, seen_tys: FxHashSet<Ty<'tcx>>, /// A stack of types left to process, and the recursion depth when we @@ -115,6 +112,15 @@ struct NeedsDropTypes<'tcx, F> { adt_components: F, /// Set this to true if an exhaustive list of types involved in /// drop obligation is requested. + // FIXME: Calling this bool `exhaustive` is confusing and possibly a footgun, + // since it does two things: It makes the iterator yield *all* of the types + // that need drop, and it also affects the computation of the drop components + // on `Coroutine`s. The latter is somewhat confusing, and probably should be + // a function of `typing_env`. See the HACK comment below for why this is + // necessary. If this isn't possible, then we probably should turn this into + // a `NeedsDropMode` so that we can have a variant like `CollectAllSignificantDrops`, + // which will more accurately indicate that we want *all* of the *significant* + // drops, which are the two important behavioral changes toggled by this bool. exhaustive: bool, } @@ -131,7 +137,6 @@ impl<'tcx, F> NeedsDropTypes<'tcx, F> { Self { tcx, typing_env, - reveal_coroutine_witnesses: exhaustive, seen_tys, query_ty: ty, unchecked_tys: vec![(ty, 0)], @@ -195,23 +200,27 @@ where // for the coroutine witness and check whether any of the contained types // need to be dropped, and only require the captured types to be live // if they do. - ty::Coroutine(_, args) => { - if self.reveal_coroutine_witnesses { - queue_type(self, args.as_coroutine().witness()); + ty::Coroutine(def_id, args) => { + // FIXME: See FIXME on `exhaustive` field above. + if self.exhaustive { + for upvar in args.as_coroutine().upvar_tys() { + queue_type(self, upvar); + } + queue_type(self, args.as_coroutine().resume_ty()); + if let Some(witness) = tcx.mir_coroutine_witnesses(def_id) { + for field_ty in &witness.field_tys { + queue_type( + self, + EarlyBinder::bind(field_ty.ty).instantiate(tcx, args), + ); + } + } } else { return Some(self.always_drop_component(ty)); } } - ty::CoroutineWitness(def_id, args) => { - if let Some(witness) = tcx.mir_coroutine_witnesses(def_id) { - self.reveal_coroutine_witnesses = true; - for field_ty in &witness.field_tys { - queue_type( - self, - EarlyBinder::bind(field_ty.ty).instantiate(tcx, args), - ); - } - } + ty::CoroutineWitness(..) => { + unreachable!("witness should be handled in parent"); } ty::UnsafeBinder(bound_ty) => { | 
