diff options
Diffstat (limited to 'compiler/rustc_middle/src/mir/statement.rs')
| -rw-r--r-- | compiler/rustc_middle/src/mir/statement.rs | 462 | 
1 files changed, 453 insertions, 9 deletions
| diff --git a/compiler/rustc_middle/src/mir/statement.rs b/compiler/rustc_middle/src/mir/statement.rs index d345c99f902..f05a798949b 100644 --- a/compiler/rustc_middle/src/mir/statement.rs +++ b/compiler/rustc_middle/src/mir/statement.rs @@ -1,7 +1,10 @@ //! Functionality for statements, operands, places, and things that appear in them. +use tracing::{debug, instrument}; + use super::interpret::GlobalAlloc; use super::*; +use crate::ty::CoroutineArgsExt; /////////////////////////////////////////////////////////////////////////// // Statements @@ -19,18 +22,29 @@ impl Statement<'_> { pub fn make_nop(&mut self) { self.kind = StatementKind::Nop } - - /// Changes a statement to a nop and returns the original statement. - #[must_use = "If you don't need the statement, use `make_nop` instead"] - pub fn replace_nop(&mut self) -> Self { - Statement { - source_info: self.source_info, - kind: mem::replace(&mut self.kind, StatementKind::Nop), - } - } } impl<'tcx> StatementKind<'tcx> { + /// Returns a simple string representation of a `StatementKind` variant, independent of any + /// values it might hold (e.g. `StatementKind::Assign` always returns `"Assign"`). + pub const fn name(&self) -> &'static str { + match self { + StatementKind::Assign(..) => "Assign", + StatementKind::FakeRead(..) => "FakeRead", + StatementKind::SetDiscriminant { .. } => "SetDiscriminant", + StatementKind::Deinit(..) => "Deinit", + StatementKind::StorageLive(..) => "StorageLive", + StatementKind::StorageDead(..) => "StorageDead", + StatementKind::Retag(..) => "Retag", + StatementKind::PlaceMention(..) => "PlaceMention", + StatementKind::AscribeUserType(..) => "AscribeUserType", + StatementKind::Coverage(..) => "Coverage", + StatementKind::Intrinsic(..) => "Intrinsic", + StatementKind::ConstEvalCounter => "ConstEvalCounter", + StatementKind::Nop => "Nop", + StatementKind::BackwardIncompatibleDropHint { .. } => "BackwardIncompatibleDropHint", + } + } pub fn as_assign_mut(&mut self) -> Option<&mut (Place<'tcx>, Rvalue<'tcx>)> { match self { StatementKind::Assign(x) => Some(x), @@ -49,6 +63,162 @@ impl<'tcx> StatementKind<'tcx> { /////////////////////////////////////////////////////////////////////////// // Places +#[derive(Copy, Clone, Debug, TypeFoldable, TypeVisitable)] +pub struct PlaceTy<'tcx> { + pub ty: Ty<'tcx>, + /// Downcast to a particular variant of an enum or a coroutine, if included. + pub variant_index: Option<VariantIdx>, +} + +// At least on 64 bit systems, `PlaceTy` should not be larger than two or three pointers. +#[cfg(target_pointer_width = "64")] +rustc_data_structures::static_assert_size!(PlaceTy<'_>, 16); + +impl<'tcx> PlaceTy<'tcx> { + #[inline] + pub fn from_ty(ty: Ty<'tcx>) -> PlaceTy<'tcx> { + PlaceTy { ty, variant_index: None } + } + + /// `place_ty.field_ty(tcx, f)` computes the type of a given field. + /// + /// Most clients of `PlaceTy` can instead just extract the relevant type + /// directly from their `PlaceElem`, but some instances of `ProjectionElem<V, T>` + /// do not carry a `Ty` for `T`. + /// + /// Note that the resulting type has not been normalized. + #[instrument(level = "debug", skip(tcx), ret)] + pub fn field_ty(self, tcx: TyCtxt<'tcx>, f: FieldIdx) -> Ty<'tcx> { + if let Some(variant_index) = self.variant_index { + match *self.ty.kind() { + ty::Adt(adt_def, args) if adt_def.is_enum() => { + adt_def.variant(variant_index).fields[f].ty(tcx, args) + } + ty::Coroutine(def_id, args) => { + let mut variants = args.as_coroutine().state_tys(def_id, tcx); + let Some(mut variant) = variants.nth(variant_index.into()) else { + bug!("variant {variant_index:?} of coroutine out of range: {self:?}"); + }; + + variant + .nth(f.index()) + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")) + } + _ => bug!("can't downcast non-adt non-coroutine type: {self:?}"), + } + } else { + match self.ty.kind() { + ty::Adt(adt_def, args) if !adt_def.is_enum() => { + adt_def.non_enum_variant().fields[f].ty(tcx, args) + } + ty::Closure(_, args) => args + .as_closure() + .upvar_tys() + .get(f.index()) + .copied() + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")), + ty::CoroutineClosure(_, args) => args + .as_coroutine_closure() + .upvar_tys() + .get(f.index()) + .copied() + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")), + // Only prefix fields (upvars and current state) are + // accessible without a variant index. + ty::Coroutine(_, args) => args + .as_coroutine() + .prefix_tys() + .get(f.index()) + .copied() + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")), + ty::Tuple(tys) => tys + .get(f.index()) + .copied() + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")), + _ => bug!("can't project out of {self:?}"), + } + } + } + + pub fn multi_projection_ty( + self, + tcx: TyCtxt<'tcx>, + elems: &[PlaceElem<'tcx>], + ) -> PlaceTy<'tcx> { + elems.iter().fold(self, |place_ty, &elem| place_ty.projection_ty(tcx, elem)) + } + + /// Convenience wrapper around `projection_ty_core` for + /// `PlaceElem`, where we can just use the `Ty` that is already + /// stored inline on field projection elems. + pub fn projection_ty(self, tcx: TyCtxt<'tcx>, elem: PlaceElem<'tcx>) -> PlaceTy<'tcx> { + self.projection_ty_core(tcx, &elem, |_, _, ty| ty, |_, ty| ty) + } + + /// `place_ty.projection_ty_core(tcx, elem, |...| { ... })` + /// projects `place_ty` onto `elem`, returning the appropriate + /// `Ty` or downcast variant corresponding to that projection. + /// The `handle_field` callback must map a `FieldIdx` to its `Ty`, + /// (which should be trivial when `T` = `Ty`). + pub fn projection_ty_core<V, T>( + self, + tcx: TyCtxt<'tcx>, + elem: &ProjectionElem<V, T>, + mut handle_field: impl FnMut(&Self, FieldIdx, T) -> Ty<'tcx>, + mut handle_opaque_cast_and_subtype: impl FnMut(&Self, T) -> Ty<'tcx>, + ) -> PlaceTy<'tcx> + where + V: ::std::fmt::Debug, + T: ::std::fmt::Debug + Copy, + { + if self.variant_index.is_some() && !matches!(elem, ProjectionElem::Field(..)) { + bug!("cannot use non field projection on downcasted place") + } + let answer = match *elem { + ProjectionElem::Deref => { + let ty = self.ty.builtin_deref(true).unwrap_or_else(|| { + bug!("deref projection of non-dereferenceable ty {:?}", self) + }); + PlaceTy::from_ty(ty) + } + ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } => { + PlaceTy::from_ty(self.ty.builtin_index().unwrap()) + } + ProjectionElem::Subslice { from, to, from_end } => { + PlaceTy::from_ty(match self.ty.kind() { + ty::Slice(..) => self.ty, + ty::Array(inner, _) if !from_end => Ty::new_array(tcx, *inner, to - from), + ty::Array(inner, size) if from_end => { + let size = size + .try_to_target_usize(tcx) + .expect("expected subslice projection on fixed-size array"); + let len = size - from - to; + Ty::new_array(tcx, *inner, len) + } + _ => bug!("cannot subslice non-array type: `{:?}`", self), + }) + } + ProjectionElem::Downcast(_name, index) => { + PlaceTy { ty: self.ty, variant_index: Some(index) } + } + ProjectionElem::Field(f, fty) => PlaceTy::from_ty(handle_field(&self, f, fty)), + ProjectionElem::OpaqueCast(ty) => { + PlaceTy::from_ty(handle_opaque_cast_and_subtype(&self, ty)) + } + ProjectionElem::Subtype(ty) => { + PlaceTy::from_ty(handle_opaque_cast_and_subtype(&self, ty)) + } + + // FIXME(unsafe_binders): Rename `handle_opaque_cast_and_subtype` to be more general. + ProjectionElem::UnwrapUnsafeBinder(ty) => { + PlaceTy::from_ty(handle_opaque_cast_and_subtype(&self, ty)) + } + }; + debug!("projection_ty self: {:?} elem: {:?} yields: {:?}", self, elem, answer); + answer + } +} + impl<V, T> ProjectionElem<V, T> { /// Returns `true` if the target of this projection may refer to a different region of memory /// than the base. @@ -192,6 +362,25 @@ impl<'tcx> Place<'tcx> { self.as_ref().project_deeper(more_projections, tcx) } + + pub fn ty_from<D: ?Sized>( + local: Local, + projection: &[PlaceElem<'tcx>], + local_decls: &D, + tcx: TyCtxt<'tcx>, + ) -> PlaceTy<'tcx> + where + D: HasLocalDecls<'tcx>, + { + PlaceTy::from_ty(local_decls.local_decls()[local].ty).multi_projection_ty(tcx, projection) + } + + pub fn ty<D: ?Sized>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> PlaceTy<'tcx> + where + D: HasLocalDecls<'tcx>, + { + Place::ty_from(self.local, self.projection, local_decls, tcx) + } } impl From<Local> for Place<'_> { @@ -294,6 +483,13 @@ impl<'tcx> PlaceRef<'tcx> { Place { local: self.local, projection: tcx.mk_place_elems(new_projections) } } + + pub fn ty<D: ?Sized>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> PlaceTy<'tcx> + where + D: HasLocalDecls<'tcx>, + { + Place::ty_from(self.local, self.projection, local_decls, tcx) + } } impl From<Local> for PlaceRef<'_> { @@ -388,6 +584,28 @@ impl<'tcx> Operand<'tcx> { let const_ty = self.constant()?.const_.ty(); if let ty::FnDef(def_id, args) = *const_ty.kind() { Some((def_id, args)) } else { None } } + + pub fn ty<D: ?Sized>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> Ty<'tcx> + where + D: HasLocalDecls<'tcx>, + { + match self { + &Operand::Copy(ref l) | &Operand::Move(ref l) => l.ty(local_decls, tcx).ty, + Operand::Constant(c) => c.const_.ty(), + } + } + + pub fn span<D: ?Sized>(&self, local_decls: &D) -> Span + where + D: HasLocalDecls<'tcx>, + { + match self { + &Operand::Copy(ref l) | &Operand::Move(ref l) => { + local_decls.local_decls()[l.local].source_info.span + } + Operand::Constant(c) => c.span, + } + } } impl<'tcx> ConstOperand<'tcx> { @@ -413,6 +631,11 @@ impl<'tcx> ConstOperand<'tcx> { /////////////////////////////////////////////////////////////////////////// /// Rvalues +pub enum RvalueInitializationState { + Shallow, + Deep, +} + impl<'tcx> Rvalue<'tcx> { /// Returns true if rvalue can be safely removed when the result is unused. #[inline] @@ -452,6 +675,70 @@ impl<'tcx> Rvalue<'tcx> { | Rvalue::WrapUnsafeBinder(_, _) => true, } } + + pub fn ty<D: ?Sized>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> Ty<'tcx> + where + D: HasLocalDecls<'tcx>, + { + match *self { + Rvalue::Use(ref operand) => operand.ty(local_decls, tcx), + Rvalue::Repeat(ref operand, count) => { + Ty::new_array_with_const_len(tcx, operand.ty(local_decls, tcx), count) + } + Rvalue::ThreadLocalRef(did) => tcx.thread_local_ptr_ty(did), + Rvalue::Ref(reg, bk, ref place) => { + let place_ty = place.ty(local_decls, tcx).ty; + Ty::new_ref(tcx, reg, place_ty, bk.to_mutbl_lossy()) + } + Rvalue::RawPtr(kind, ref place) => { + let place_ty = place.ty(local_decls, tcx).ty; + Ty::new_ptr(tcx, place_ty, kind.to_mutbl_lossy()) + } + Rvalue::Len(..) => tcx.types.usize, + Rvalue::Cast(.., ty) => ty, + Rvalue::BinaryOp(op, box (ref lhs, ref rhs)) => { + let lhs_ty = lhs.ty(local_decls, tcx); + let rhs_ty = rhs.ty(local_decls, tcx); + op.ty(tcx, lhs_ty, rhs_ty) + } + Rvalue::UnaryOp(op, ref operand) => { + let arg_ty = operand.ty(local_decls, tcx); + op.ty(tcx, arg_ty) + } + Rvalue::Discriminant(ref place) => place.ty(local_decls, tcx).ty.discriminant_ty(tcx), + Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf | NullOp::OffsetOf(..), _) => { + tcx.types.usize + } + Rvalue::NullaryOp(NullOp::ContractChecks, _) + | Rvalue::NullaryOp(NullOp::UbChecks, _) => tcx.types.bool, + Rvalue::Aggregate(ref ak, ref ops) => match **ak { + AggregateKind::Array(ty) => Ty::new_array(tcx, ty, ops.len() as u64), + AggregateKind::Tuple => { + Ty::new_tup_from_iter(tcx, ops.iter().map(|op| op.ty(local_decls, tcx))) + } + AggregateKind::Adt(did, _, args, _, _) => tcx.type_of(did).instantiate(tcx, args), + AggregateKind::Closure(did, args) => Ty::new_closure(tcx, did, args), + AggregateKind::Coroutine(did, args) => Ty::new_coroutine(tcx, did, args), + AggregateKind::CoroutineClosure(did, args) => { + Ty::new_coroutine_closure(tcx, did, args) + } + AggregateKind::RawPtr(ty, mutability) => Ty::new_ptr(tcx, ty, mutability), + }, + Rvalue::ShallowInitBox(_, ty) => Ty::new_box(tcx, ty), + Rvalue::CopyForDeref(ref place) => place.ty(local_decls, tcx).ty, + Rvalue::WrapUnsafeBinder(_, ty) => ty, + } + } + + #[inline] + /// Returns `true` if this rvalue is deeply initialized (most rvalues) or + /// whether its only shallowly initialized (`Rvalue::Box`). + pub fn initialization_state(&self) -> RvalueInitializationState { + match *self { + Rvalue::ShallowInitBox(_, _) => RvalueInitializationState::Shallow, + _ => RvalueInitializationState::Deep, + } + } } impl BorrowKind { @@ -474,4 +761,161 @@ impl BorrowKind { BorrowKind::Mut { kind: MutBorrowKind::TwoPhaseBorrow } => true, } } + + pub fn to_mutbl_lossy(self) -> hir::Mutability { + match self { + BorrowKind::Mut { .. } => hir::Mutability::Mut, + BorrowKind::Shared => hir::Mutability::Not, + + // We have no type corresponding to a shallow borrow, so use + // `&` as an approximation. + BorrowKind::Fake(_) => hir::Mutability::Not, + } + } +} + +impl<'tcx> UnOp { + pub fn ty(&self, tcx: TyCtxt<'tcx>, arg_ty: Ty<'tcx>) -> Ty<'tcx> { + match self { + UnOp::Not | UnOp::Neg => arg_ty, + UnOp::PtrMetadata => arg_ty.pointee_metadata_ty_or_projection(tcx), + } + } +} + +impl<'tcx> BinOp { + pub fn ty(&self, tcx: TyCtxt<'tcx>, lhs_ty: Ty<'tcx>, rhs_ty: Ty<'tcx>) -> Ty<'tcx> { + // FIXME: handle SIMD correctly + match self { + &BinOp::Add + | &BinOp::AddUnchecked + | &BinOp::Sub + | &BinOp::SubUnchecked + | &BinOp::Mul + | &BinOp::MulUnchecked + | &BinOp::Div + | &BinOp::Rem + | &BinOp::BitXor + | &BinOp::BitAnd + | &BinOp::BitOr => { + // these should be integers or floats of the same size. + assert_eq!(lhs_ty, rhs_ty); + lhs_ty + } + &BinOp::AddWithOverflow | &BinOp::SubWithOverflow | &BinOp::MulWithOverflow => { + // these should be integers of the same size. + assert_eq!(lhs_ty, rhs_ty); + Ty::new_tup(tcx, &[lhs_ty, tcx.types.bool]) + } + &BinOp::Shl + | &BinOp::ShlUnchecked + | &BinOp::Shr + | &BinOp::ShrUnchecked + | &BinOp::Offset => { + lhs_ty // lhs_ty can be != rhs_ty + } + &BinOp::Eq | &BinOp::Lt | &BinOp::Le | &BinOp::Ne | &BinOp::Ge | &BinOp::Gt => { + tcx.types.bool + } + &BinOp::Cmp => { + // these should be integer-like types of the same size. + assert_eq!(lhs_ty, rhs_ty); + tcx.ty_ordering_enum(None) + } + } + } + pub(crate) fn to_hir_binop(self) -> hir::BinOpKind { + match self { + // HIR `+`/`-`/`*` can map to either of these MIR BinOp, depending + // on whether overflow checks are enabled or not. + BinOp::Add | BinOp::AddWithOverflow => hir::BinOpKind::Add, + BinOp::Sub | BinOp::SubWithOverflow => hir::BinOpKind::Sub, + BinOp::Mul | BinOp::MulWithOverflow => hir::BinOpKind::Mul, + BinOp::Div => hir::BinOpKind::Div, + BinOp::Rem => hir::BinOpKind::Rem, + BinOp::BitXor => hir::BinOpKind::BitXor, + BinOp::BitAnd => hir::BinOpKind::BitAnd, + BinOp::BitOr => hir::BinOpKind::BitOr, + BinOp::Shl => hir::BinOpKind::Shl, + BinOp::Shr => hir::BinOpKind::Shr, + BinOp::Eq => hir::BinOpKind::Eq, + BinOp::Ne => hir::BinOpKind::Ne, + BinOp::Lt => hir::BinOpKind::Lt, + BinOp::Gt => hir::BinOpKind::Gt, + BinOp::Le => hir::BinOpKind::Le, + BinOp::Ge => hir::BinOpKind::Ge, + // We don't have HIR syntax for these. + BinOp::Cmp + | BinOp::AddUnchecked + | BinOp::SubUnchecked + | BinOp::MulUnchecked + | BinOp::ShlUnchecked + | BinOp::ShrUnchecked + | BinOp::Offset => { + unreachable!() + } + } + } + + /// If this is a `FooWithOverflow`, return `Some(Foo)`. + pub fn overflowing_to_wrapping(self) -> Option<BinOp> { + Some(match self { + BinOp::AddWithOverflow => BinOp::Add, + BinOp::SubWithOverflow => BinOp::Sub, + BinOp::MulWithOverflow => BinOp::Mul, + _ => return None, + }) + } + + /// Returns whether this is a `FooWithOverflow` + pub fn is_overflowing(self) -> bool { + self.overflowing_to_wrapping().is_some() + } + + /// If this is a `Foo`, return `Some(FooWithOverflow)`. + pub fn wrapping_to_overflowing(self) -> Option<BinOp> { + Some(match self { + BinOp::Add => BinOp::AddWithOverflow, + BinOp::Sub => BinOp::SubWithOverflow, + BinOp::Mul => BinOp::MulWithOverflow, + _ => return None, + }) + } +} + +impl From<Mutability> for RawPtrKind { + fn from(other: Mutability) -> Self { + match other { + Mutability::Mut => RawPtrKind::Mut, + Mutability::Not => RawPtrKind::Const, + } + } +} + +impl RawPtrKind { + pub fn is_fake(self) -> bool { + match self { + RawPtrKind::Mut | RawPtrKind::Const => false, + RawPtrKind::FakeForPtrMetadata => true, + } + } + + pub fn to_mutbl_lossy(self) -> Mutability { + match self { + RawPtrKind::Mut => Mutability::Mut, + RawPtrKind::Const => Mutability::Not, + + // We have no type corresponding to a fake borrow, so use + // `*const` as an approximation. + RawPtrKind::FakeForPtrMetadata => Mutability::Not, + } + } + + pub fn ptr_str(self) -> &'static str { + match self { + RawPtrKind::Mut => "mut", + RawPtrKind::Const => "const", + RawPtrKind::FakeForPtrMetadata => "const (fake)", + } + } } | 
