diff options
| author | Zalathar <Zalathar@users.noreply.github.com> | 2024-12-16 12:44:54 +1100 |
|---|---|---|
| committer | Zalathar <Zalathar@users.noreply.github.com> | 2024-12-17 11:41:11 +1100 |
| commit | bccbe70991775218b4dd2d31919558109e8cc66f (patch) | |
| tree | cfdb563c42b359a6fac979d09e0b25414f0aae4d /compiler/rustc_mir_build/src/builder/expr | |
| parent | 6d9f6ae36ae1299d6126ba40c15191f7aa3b79d8 (diff) | |
| download | rust-bccbe70991775218b4dd2d31919558109e8cc66f.tar.gz rust-bccbe70991775218b4dd2d31919558109e8cc66f.zip | |
Rename `rustc_mir_build::build` to `builder`
Diffstat (limited to 'compiler/rustc_mir_build/src/builder/expr')
| -rw-r--r-- | compiler/rustc_mir_build/src/builder/expr/as_constant.rs | 173 | ||||
| -rw-r--r-- | compiler/rustc_mir_build/src/builder/expr/as_operand.rs | 200 | ||||
| -rw-r--r-- | compiler/rustc_mir_build/src/builder/expr/as_place.rs | 832 | ||||
| -rw-r--r-- | compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs | 840 | ||||
| -rw-r--r-- | compiler/rustc_mir_build/src/builder/expr/as_temp.rs | 139 | ||||
| -rw-r--r-- | compiler/rustc_mir_build/src/builder/expr/category.rs | 94 | ||||
| -rw-r--r-- | compiler/rustc_mir_build/src/builder/expr/into.rs | 650 | ||||
| -rw-r--r-- | compiler/rustc_mir_build/src/builder/expr/mod.rs | 70 | ||||
| -rw-r--r-- | compiler/rustc_mir_build/src/builder/expr/stmt.rs | 196 |
9 files changed, 3194 insertions, 0 deletions
diff --git a/compiler/rustc_mir_build/src/builder/expr/as_constant.rs b/compiler/rustc_mir_build/src/builder/expr/as_constant.rs new file mode 100644 index 00000000000..177c1e33a83 --- /dev/null +++ b/compiler/rustc_mir_build/src/builder/expr/as_constant.rs @@ -0,0 +1,173 @@ +//! See docs in build/expr/mod.rs + +use rustc_abi::Size; +use rustc_ast as ast; +use rustc_hir::LangItem; +use rustc_middle::mir::interpret::{ + Allocation, CTFE_ALLOC_SALT, LitToConstError, LitToConstInput, Scalar, +}; +use rustc_middle::mir::*; +use rustc_middle::thir::*; +use rustc_middle::ty::{ + self, CanonicalUserType, CanonicalUserTypeAnnotation, Ty, TyCtxt, UserTypeAnnotationIndex, +}; +use rustc_middle::{bug, mir, span_bug}; +use tracing::{instrument, trace}; + +use crate::builder::{Builder, parse_float_into_constval}; + +impl<'a, 'tcx> Builder<'a, 'tcx> { + /// Compile `expr`, yielding a compile-time constant. Assumes that + /// `expr` is a valid compile-time constant! + pub(crate) fn as_constant(&mut self, expr: &Expr<'tcx>) -> ConstOperand<'tcx> { + let this = self; + let tcx = this.tcx; + let Expr { ty, temp_lifetime: _, span, ref kind } = *expr; + match kind { + ExprKind::Scope { region_scope: _, lint_level: _, value } => { + this.as_constant(&this.thir[*value]) + } + _ => as_constant_inner( + expr, + |user_ty| { + Some(this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation { + span, + user_ty: user_ty.clone(), + inferred_ty: ty, + })) + }, + tcx, + ), + } + } +} + +pub(crate) fn as_constant_inner<'tcx>( + expr: &Expr<'tcx>, + push_cuta: impl FnMut(&Box<CanonicalUserType<'tcx>>) -> Option<UserTypeAnnotationIndex>, + tcx: TyCtxt<'tcx>, +) -> ConstOperand<'tcx> { + let Expr { ty, temp_lifetime: _, span, ref kind } = *expr; + match *kind { + ExprKind::Literal { lit, neg } => { + let const_ = match lit_to_mir_constant(tcx, LitToConstInput { lit: &lit.node, ty, neg }) + { + Ok(c) => c, + Err(LitToConstError::Reported(guar)) => { + Const::Ty(Ty::new_error(tcx, guar), ty::Const::new_error(tcx, guar)) + } + Err(LitToConstError::TypeError) => { + bug!("encountered type error in `lit_to_mir_constant`") + } + }; + + ConstOperand { span, user_ty: None, const_ } + } + ExprKind::NonHirLiteral { lit, ref user_ty } => { + let user_ty = user_ty.as_ref().and_then(push_cuta); + + let const_ = Const::Val(ConstValue::Scalar(Scalar::Int(lit)), ty); + + ConstOperand { span, user_ty, const_ } + } + ExprKind::ZstLiteral { ref user_ty } => { + let user_ty = user_ty.as_ref().and_then(push_cuta); + + let const_ = Const::Val(ConstValue::ZeroSized, ty); + + ConstOperand { span, user_ty, const_ } + } + ExprKind::NamedConst { def_id, args, ref user_ty } => { + let user_ty = user_ty.as_ref().and_then(push_cuta); + + let uneval = mir::UnevaluatedConst::new(def_id, args); + let const_ = Const::Unevaluated(uneval, ty); + + ConstOperand { user_ty, span, const_ } + } + ExprKind::ConstParam { param, def_id: _ } => { + let const_param = ty::Const::new_param(tcx, param); + let const_ = Const::Ty(expr.ty, const_param); + + ConstOperand { user_ty: None, span, const_ } + } + ExprKind::ConstBlock { did: def_id, args } => { + let uneval = mir::UnevaluatedConst::new(def_id, args); + let const_ = Const::Unevaluated(uneval, ty); + + ConstOperand { user_ty: None, span, const_ } + } + ExprKind::StaticRef { alloc_id, ty, .. } => { + let const_val = ConstValue::Scalar(Scalar::from_pointer(alloc_id.into(), &tcx)); + let const_ = Const::Val(const_val, ty); + + ConstOperand { span, user_ty: None, const_ } + } + _ => span_bug!(span, "expression is not a valid constant {:?}", kind), + } +} + +#[instrument(skip(tcx, lit_input))] +fn lit_to_mir_constant<'tcx>( + tcx: TyCtxt<'tcx>, + lit_input: LitToConstInput<'tcx>, +) -> Result<Const<'tcx>, LitToConstError> { + let LitToConstInput { lit, ty, neg } = lit_input; + let trunc = |n| { + let width = match tcx.layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(ty)) { + Ok(layout) => layout.size, + Err(_) => { + tcx.dcx().bug(format!("couldn't compute width of literal: {:?}", lit_input.lit)) + } + }; + trace!("trunc {} with size {} and shift {}", n, width.bits(), 128 - width.bits()); + let result = width.truncate(n); + trace!("trunc result: {}", result); + Ok(ConstValue::Scalar(Scalar::from_uint(result, width))) + }; + + let value = match (lit, ty.kind()) { + (ast::LitKind::Str(s, _), ty::Ref(_, inner_ty, _)) if inner_ty.is_str() => { + let s = s.as_str(); + let allocation = Allocation::from_bytes_byte_aligned_immutable(s.as_bytes()); + let allocation = tcx.mk_const_alloc(allocation); + ConstValue::Slice { data: allocation, meta: allocation.inner().size().bytes() } + } + (ast::LitKind::ByteStr(data, _), ty::Ref(_, inner_ty, _)) + if matches!(inner_ty.kind(), ty::Slice(_)) => + { + let allocation = Allocation::from_bytes_byte_aligned_immutable(data as &[u8]); + let allocation = tcx.mk_const_alloc(allocation); + ConstValue::Slice { data: allocation, meta: allocation.inner().size().bytes() } + } + (ast::LitKind::ByteStr(data, _), ty::Ref(_, inner_ty, _)) if inner_ty.is_array() => { + let id = tcx.allocate_bytes_dedup(data, CTFE_ALLOC_SALT); + ConstValue::Scalar(Scalar::from_pointer(id.into(), &tcx)) + } + (ast::LitKind::CStr(data, _), ty::Ref(_, inner_ty, _)) if matches!(inner_ty.kind(), ty::Adt(def, _) if tcx.is_lang_item(def.did(), LangItem::CStr)) => + { + let allocation = Allocation::from_bytes_byte_aligned_immutable(data as &[u8]); + let allocation = tcx.mk_const_alloc(allocation); + ConstValue::Slice { data: allocation, meta: allocation.inner().size().bytes() } + } + (ast::LitKind::Byte(n), ty::Uint(ty::UintTy::U8)) => { + ConstValue::Scalar(Scalar::from_uint(*n, Size::from_bytes(1))) + } + (ast::LitKind::Int(n, _), ty::Uint(_)) | (ast::LitKind::Int(n, _), ty::Int(_)) => { + trunc(if neg { (n.get() as i128).overflowing_neg().0 as u128 } else { n.get() })? + } + (ast::LitKind::Float(n, _), ty::Float(fty)) => parse_float_into_constval(*n, *fty, neg) + .ok_or_else(|| { + LitToConstError::Reported( + tcx.dcx() + .delayed_bug(format!("couldn't parse float literal: {:?}", lit_input.lit)), + ) + })?, + (ast::LitKind::Bool(b), ty::Bool) => ConstValue::Scalar(Scalar::from_bool(*b)), + (ast::LitKind::Char(c), ty::Char) => ConstValue::Scalar(Scalar::from_char(*c)), + (ast::LitKind::Err(guar), _) => return Err(LitToConstError::Reported(*guar)), + _ => return Err(LitToConstError::TypeError), + }; + + Ok(Const::Val(value, ty)) +} diff --git a/compiler/rustc_mir_build/src/builder/expr/as_operand.rs b/compiler/rustc_mir_build/src/builder/expr/as_operand.rs new file mode 100644 index 00000000000..63e9b1dc6cd --- /dev/null +++ b/compiler/rustc_mir_build/src/builder/expr/as_operand.rs @@ -0,0 +1,200 @@ +//! See docs in build/expr/mod.rs + +use rustc_middle::mir::*; +use rustc_middle::thir::*; +use tracing::{debug, instrument}; + +use crate::builder::expr::category::Category; +use crate::builder::{BlockAnd, BlockAndExtension, Builder, NeedsTemporary}; + +impl<'a, 'tcx> Builder<'a, 'tcx> { + /// Construct a temporary lifetime restricted to just the local scope + pub(crate) fn local_temp_lifetime(&self) -> TempLifetime { + let local_scope = self.local_scope(); + TempLifetime { temp_lifetime: Some(local_scope), backwards_incompatible: None } + } + + /// Returns an operand suitable for use until the end of the current + /// scope expression. + /// + /// The operand returned from this function will *not be valid* + /// after the current enclosing `ExprKind::Scope` has ended, so + /// please do *not* return it from functions to avoid bad + /// miscompiles. + pub(crate) fn as_local_operand( + &mut self, + block: BasicBlock, + expr_id: ExprId, + ) -> BlockAnd<Operand<'tcx>> { + self.as_operand( + block, + self.local_temp_lifetime(), + expr_id, + LocalInfo::Boring, + NeedsTemporary::Maybe, + ) + } + + /// Returns an operand suitable for use until the end of the current scope expression and + /// suitable also to be passed as function arguments. + /// + /// The operand returned from this function will *not be valid* after an ExprKind::Scope is + /// passed, so please do *not* return it from functions to avoid bad miscompiles. Returns an + /// operand suitable for use as a call argument. This is almost always equivalent to + /// `as_operand`, except for the particular case of passing values of (potentially) unsized + /// types "by value" (see details below). + /// + /// The operand returned from this function will *not be valid* + /// after the current enclosing `ExprKind::Scope` has ended, so + /// please do *not* return it from functions to avoid bad + /// miscompiles. + /// + /// # Parameters of unsized types + /// + /// We tweak the handling of parameters of unsized type slightly to avoid the need to create a + /// local variable of unsized type. For example, consider this program: + /// + /// ``` + /// #![feature(unsized_locals, unsized_fn_params)] + /// # use core::fmt::Debug; + /// fn foo(p: dyn Debug) { dbg!(p); } + /// + /// fn bar(box_p: Box<dyn Debug>) { foo(*box_p); } + /// ``` + /// + /// Ordinarily, for sized types, we would compile the call `foo(*p)` like so: + /// + /// ```ignore (illustrative) + /// let tmp0 = *box_p; // tmp0 would be the operand returned by this function call + /// foo(tmp0) + /// ``` + /// + /// But because the parameter to `foo` is of the unsized type `dyn Debug`, and because it is + /// being moved the deref of a box, we compile it slightly differently. The temporary `tmp0` + /// that we create *stores the entire box*, and the parameter to the call itself will be + /// `*tmp0`: + /// + /// ```ignore (illustrative) + /// let tmp0 = box_p; call foo(*tmp0) + /// ``` + /// + /// This way, the temporary `tmp0` that we create has type `Box<dyn Debug>`, which is sized. + /// The value passed to the call (`*tmp0`) still has the `dyn Debug` type -- but the way that + /// calls are compiled means that this parameter will be passed "by reference", meaning that we + /// will actually provide a pointer to the interior of the box, and not move the `dyn Debug` + /// value to the stack. + /// + /// See #68304 for more details. + pub(crate) fn as_local_call_operand( + &mut self, + block: BasicBlock, + expr: ExprId, + ) -> BlockAnd<Operand<'tcx>> { + self.as_call_operand(block, self.local_temp_lifetime(), expr) + } + + /// Compile `expr` into a value that can be used as an operand. + /// If `expr` is a place like `x`, this will introduce a + /// temporary `tmp = x`, so that we capture the value of `x` at + /// this time. + /// + /// If we end up needing to create a temporary, then we will use + /// `local_info` as its `LocalInfo`, unless `as_temporary` + /// has already assigned it a non-`None` `LocalInfo`. + /// Normally, you should use `None` for `local_info` + /// + /// The operand is known to be live until the end of `scope`. + /// + /// Like `as_local_call_operand`, except that the argument will + /// not be valid once `scope` ends. + #[instrument(level = "debug", skip(self, scope))] + pub(crate) fn as_operand( + &mut self, + mut block: BasicBlock, + scope: TempLifetime, + expr_id: ExprId, + local_info: LocalInfo<'tcx>, + needs_temporary: NeedsTemporary, + ) -> BlockAnd<Operand<'tcx>> { + let this = self; + + let expr = &this.thir[expr_id]; + if let ExprKind::Scope { region_scope, lint_level, value } = expr.kind { + let source_info = this.source_info(expr.span); + let region_scope = (region_scope, source_info); + return this.in_scope(region_scope, lint_level, |this| { + this.as_operand(block, scope, value, local_info, needs_temporary) + }); + } + + let category = Category::of(&expr.kind).unwrap(); + debug!(?category, ?expr.kind); + match category { + Category::Constant + if matches!(needs_temporary, NeedsTemporary::No) + || !expr.ty.needs_drop(this.tcx, this.typing_env()) => + { + let constant = this.as_constant(expr); + block.and(Operand::Constant(Box::new(constant))) + } + Category::Constant | Category::Place | Category::Rvalue(..) => { + let operand = unpack!(block = this.as_temp(block, scope, expr_id, Mutability::Mut)); + // Overwrite temp local info if we have something more interesting to record. + if !matches!(local_info, LocalInfo::Boring) { + let decl_info = + this.local_decls[operand].local_info.as_mut().assert_crate_local(); + if let LocalInfo::Boring | LocalInfo::BlockTailTemp(_) = **decl_info { + **decl_info = local_info; + } + } + block.and(Operand::Move(Place::from(operand))) + } + } + } + + pub(crate) fn as_call_operand( + &mut self, + mut block: BasicBlock, + scope: TempLifetime, + expr_id: ExprId, + ) -> BlockAnd<Operand<'tcx>> { + let this = self; + let expr = &this.thir[expr_id]; + debug!("as_call_operand(block={:?}, expr={:?})", block, expr); + + if let ExprKind::Scope { region_scope, lint_level, value } = expr.kind { + let source_info = this.source_info(expr.span); + let region_scope = (region_scope, source_info); + return this.in_scope(region_scope, lint_level, |this| { + this.as_call_operand(block, scope, value) + }); + } + + let tcx = this.tcx; + + if tcx.features().unsized_fn_params() { + let ty = expr.ty; + if !ty.is_sized(tcx, this.typing_env()) { + // !sized means !copy, so this is an unsized move + assert!(!tcx.type_is_copy_modulo_regions(this.typing_env(), ty)); + + // As described above, detect the case where we are passing a value of unsized + // type, and that value is coming from the deref of a box. + if let ExprKind::Deref { arg } = expr.kind { + // Generate let tmp0 = arg0 + let operand = unpack!(block = this.as_temp(block, scope, arg, Mutability::Mut)); + + // Return the operand *tmp0 to be used as the call argument + let place = Place { + local: operand, + projection: tcx.mk_place_elems(&[PlaceElem::Deref]), + }; + + return block.and(Operand::Move(place)); + } + } + } + + this.as_operand(block, scope, expr_id, LocalInfo::Boring, NeedsTemporary::Maybe) + } +} diff --git a/compiler/rustc_mir_build/src/builder/expr/as_place.rs b/compiler/rustc_mir_build/src/builder/expr/as_place.rs new file mode 100644 index 00000000000..01aec70f437 --- /dev/null +++ b/compiler/rustc_mir_build/src/builder/expr/as_place.rs @@ -0,0 +1,832 @@ +//! See docs in build/expr/mod.rs + +use std::assert_matches::assert_matches; +use std::iter; + +use rustc_abi::{FIRST_VARIANT, FieldIdx, VariantIdx}; +use rustc_hir::def_id::LocalDefId; +use rustc_middle::hir::place::{Projection as HirProjection, ProjectionKind as HirProjectionKind}; +use rustc_middle::mir::AssertKind::BoundsCheck; +use rustc_middle::mir::*; +use rustc_middle::thir::*; +use rustc_middle::ty::{self, AdtDef, CanonicalUserTypeAnnotation, Ty, Variance}; +use rustc_middle::{bug, span_bug}; +use rustc_span::{DesugaringKind, Span}; +use tracing::{debug, instrument, trace}; + +use crate::builder::ForGuard::{OutsideGuard, RefWithinGuard}; +use crate::builder::expr::category::Category; +use crate::builder::{BlockAnd, BlockAndExtension, Builder, Capture, CaptureMap}; + +/// The "outermost" place that holds this value. +#[derive(Copy, Clone, Debug, PartialEq)] +pub(crate) enum PlaceBase { + /// Denotes the start of a `Place`. + Local(Local), + + /// When building place for an expression within a closure, the place might start off a + /// captured path. When `capture_disjoint_fields` is enabled, we might not know the capture + /// index (within the desugared closure) of the captured path until most of the projections + /// are applied. We use `PlaceBase::Upvar` to keep track of the root variable off of which the + /// captured path starts, the closure the capture belongs to and the trait the closure + /// implements. + /// + /// Once we have figured out the capture index, we can convert the place builder to start from + /// `PlaceBase::Local`. + /// + /// Consider the following example + /// ```rust + /// let t = (((10, 10), 10), 10); + /// + /// let c = || { + /// println!("{}", t.0.0.0); + /// }; + /// ``` + /// Here the THIR expression for `t.0.0.0` will be something like + /// + /// ```ignore (illustrative) + /// * Field(0) + /// * Field(0) + /// * Field(0) + /// * UpvarRef(t) + /// ``` + /// + /// When `capture_disjoint_fields` is enabled, `t.0.0.0` is captured and we won't be able to + /// figure out that it is captured until all the `Field` projections are applied. + Upvar { + /// HirId of the upvar + var_hir_id: LocalVarId, + /// DefId of the closure + closure_def_id: LocalDefId, + }, +} + +/// `PlaceBuilder` is used to create places during MIR construction. It allows you to "build up" a +/// place by pushing more and more projections onto the end, and then convert the final set into a +/// place using the `to_place` method. +/// +/// This is used internally when building a place for an expression like `a.b.c`. The fields `b` +/// and `c` can be progressively pushed onto the place builder that is created when converting `a`. +#[derive(Clone, Debug, PartialEq)] +pub(in crate::builder) struct PlaceBuilder<'tcx> { + base: PlaceBase, + projection: Vec<PlaceElem<'tcx>>, +} + +/// Given a list of MIR projections, convert them to list of HIR ProjectionKind. +/// The projections are truncated to represent a path that might be captured by a +/// closure/coroutine. This implies the vector returned from this function doesn't contain +/// ProjectionElems `Downcast`, `ConstantIndex`, `Index`, or `Subslice` because those will never be +/// part of a path that is captured by a closure. We stop applying projections once we see the first +/// projection that isn't captured by a closure. +fn convert_to_hir_projections_and_truncate_for_capture( + mir_projections: &[PlaceElem<'_>], +) -> Vec<HirProjectionKind> { + let mut hir_projections = Vec::new(); + let mut variant = None; + + for mir_projection in mir_projections { + let hir_projection = match mir_projection { + ProjectionElem::Deref => HirProjectionKind::Deref, + ProjectionElem::Field(field, _) => { + let variant = variant.unwrap_or(FIRST_VARIANT); + HirProjectionKind::Field(*field, variant) + } + ProjectionElem::Downcast(.., idx) => { + // We don't expect to see multi-variant enums here, as earlier + // phases will have truncated them already. However, there can + // still be downcasts, thanks to single-variant enums. + // We keep track of VariantIdx so we can use this information + // if the next ProjectionElem is a Field. + variant = Some(*idx); + continue; + } + // These do not affect anything, they just make sure we know the right type. + ProjectionElem::OpaqueCast(_) | ProjectionElem::Subtype(..) => continue, + ProjectionElem::Index(..) + | ProjectionElem::ConstantIndex { .. } + | ProjectionElem::Subslice { .. } => { + // We don't capture array-access projections. + // We can stop here as arrays are captured completely. + break; + } + }; + variant = None; + hir_projections.push(hir_projection); + } + + hir_projections +} + +/// Return true if the `proj_possible_ancestor` represents an ancestor path +/// to `proj_capture` or `proj_possible_ancestor` is same as `proj_capture`, +/// assuming they both start off of the same root variable. +/// +/// **Note:** It's the caller's responsibility to ensure that both lists of projections +/// start off of the same root variable. +/// +/// Eg: 1. `foo.x` which is represented using `projections=[Field(x)]` is an ancestor of +/// `foo.x.y` which is represented using `projections=[Field(x), Field(y)]`. +/// Note both `foo.x` and `foo.x.y` start off of the same root variable `foo`. +/// 2. Since we only look at the projections here function will return `bar.x` as a valid +/// ancestor of `foo.x.y`. It's the caller's responsibility to ensure that both projections +/// list are being applied to the same root variable. +fn is_ancestor_or_same_capture( + proj_possible_ancestor: &[HirProjectionKind], + proj_capture: &[HirProjectionKind], +) -> bool { + // We want to make sure `is_ancestor_or_same_capture("x.0.0", "x.0")` to return false. + // Therefore we can't just check if all projections are same in the zipped iterator below. + if proj_possible_ancestor.len() > proj_capture.len() { + return false; + } + + iter::zip(proj_possible_ancestor, proj_capture).all(|(a, b)| a == b) +} + +/// Given a closure, returns the index of a capture within the desugared closure struct and the +/// `ty::CapturedPlace` which is the ancestor of the Place represented using the `var_hir_id` +/// and `projection`. +/// +/// Note there will be at most one ancestor for any given Place. +/// +/// Returns None, when the ancestor is not found. +fn find_capture_matching_projections<'a, 'tcx>( + upvars: &'a CaptureMap<'tcx>, + var_hir_id: LocalVarId, + projections: &[PlaceElem<'tcx>], +) -> Option<(usize, &'a Capture<'tcx>)> { + let hir_projections = convert_to_hir_projections_and_truncate_for_capture(projections); + + upvars.get_by_key_enumerated(var_hir_id.0).find(|(_, capture)| { + let possible_ancestor_proj_kinds: Vec<_> = + capture.captured_place.place.projections.iter().map(|proj| proj.kind).collect(); + is_ancestor_or_same_capture(&possible_ancestor_proj_kinds, &hir_projections) + }) +} + +/// Takes an upvar place and tries to resolve it into a `PlaceBuilder` +/// with `PlaceBase::Local` +#[instrument(level = "trace", skip(cx), ret)] +fn to_upvars_resolved_place_builder<'tcx>( + cx: &Builder<'_, 'tcx>, + var_hir_id: LocalVarId, + closure_def_id: LocalDefId, + projection: &[PlaceElem<'tcx>], +) -> Option<PlaceBuilder<'tcx>> { + let Some((capture_index, capture)) = + find_capture_matching_projections(&cx.upvars, var_hir_id, projection) + else { + let closure_span = cx.tcx.def_span(closure_def_id); + if !enable_precise_capture(closure_span) { + bug!( + "No associated capture found for {:?}[{:#?}] even though \ + capture_disjoint_fields isn't enabled", + var_hir_id, + projection + ) + } else { + debug!("No associated capture found for {:?}[{:#?}]", var_hir_id, projection,); + } + return None; + }; + + // Access the capture by accessing the field within the Closure struct. + let capture_info = &cx.upvars[capture_index]; + + let mut upvar_resolved_place_builder = PlaceBuilder::from(capture_info.use_place); + + // We used some of the projections to build the capture itself, + // now we apply the remaining to the upvar resolved place. + trace!(?capture.captured_place, ?projection); + let remaining_projections = strip_prefix( + capture.captured_place.place.base_ty, + projection, + &capture.captured_place.place.projections, + ); + upvar_resolved_place_builder.projection.extend(remaining_projections); + + Some(upvar_resolved_place_builder) +} + +/// Returns projections remaining after stripping an initial prefix of HIR +/// projections. +/// +/// Supports only HIR projection kinds that represent a path that might be +/// captured by a closure or a coroutine, i.e., an `Index` or a `Subslice` +/// projection kinds are unsupported. +fn strip_prefix<'a, 'tcx>( + mut base_ty: Ty<'tcx>, + projections: &'a [PlaceElem<'tcx>], + prefix_projections: &[HirProjection<'tcx>], +) -> impl Iterator<Item = PlaceElem<'tcx>> + 'a { + let mut iter = projections + .iter() + .copied() + // Filter out opaque casts, they are unnecessary in the prefix. + .filter(|elem| !matches!(elem, ProjectionElem::OpaqueCast(..))); + for projection in prefix_projections { + match projection.kind { + HirProjectionKind::Deref => { + assert_matches!(iter.next(), Some(ProjectionElem::Deref)); + } + HirProjectionKind::Field(..) => { + if base_ty.is_enum() { + assert_matches!(iter.next(), Some(ProjectionElem::Downcast(..))); + } + assert_matches!(iter.next(), Some(ProjectionElem::Field(..))); + } + HirProjectionKind::OpaqueCast => { + assert_matches!(iter.next(), Some(ProjectionElem::OpaqueCast(..))); + } + HirProjectionKind::Index | HirProjectionKind::Subslice => { + bug!("unexpected projection kind: {:?}", projection); + } + } + base_ty = projection.ty; + } + iter +} + +impl<'tcx> PlaceBuilder<'tcx> { + pub(in crate::builder) fn to_place(&self, cx: &Builder<'_, 'tcx>) -> Place<'tcx> { + self.try_to_place(cx).unwrap_or_else(|| match self.base { + PlaceBase::Local(local) => span_bug!( + cx.local_decls[local].source_info.span, + "could not resolve local: {local:#?} + {:?}", + self.projection + ), + PlaceBase::Upvar { var_hir_id, closure_def_id: _ } => span_bug!( + cx.tcx.hir().span(var_hir_id.0), + "could not resolve upvar: {var_hir_id:?} + {:?}", + self.projection + ), + }) + } + + /// Creates a `Place` or returns `None` if an upvar cannot be resolved + pub(in crate::builder) fn try_to_place(&self, cx: &Builder<'_, 'tcx>) -> Option<Place<'tcx>> { + let resolved = self.resolve_upvar(cx); + let builder = resolved.as_ref().unwrap_or(self); + let PlaceBase::Local(local) = builder.base else { return None }; + let projection = cx.tcx.mk_place_elems(&builder.projection); + Some(Place { local, projection }) + } + + /// Attempts to resolve the `PlaceBuilder`. + /// Returns `None` if this is not an upvar. + /// + /// Upvars resolve may fail for a `PlaceBuilder` when attempting to + /// resolve a disjoint field whose root variable is not captured + /// (destructured assignments) or when attempting to resolve a root + /// variable (discriminant matching with only wildcard arm) that is + /// not captured. This can happen because the final mir that will be + /// generated doesn't require a read for this place. Failures will only + /// happen inside closures. + pub(in crate::builder) fn resolve_upvar( + &self, + cx: &Builder<'_, 'tcx>, + ) -> Option<PlaceBuilder<'tcx>> { + let PlaceBase::Upvar { var_hir_id, closure_def_id } = self.base else { + return None; + }; + to_upvars_resolved_place_builder(cx, var_hir_id, closure_def_id, &self.projection) + } + + pub(crate) fn base(&self) -> PlaceBase { + self.base + } + + pub(crate) fn projection(&self) -> &[PlaceElem<'tcx>] { + &self.projection + } + + pub(crate) fn field(self, f: FieldIdx, ty: Ty<'tcx>) -> Self { + self.project(PlaceElem::Field(f, ty)) + } + + pub(crate) fn deref(self) -> Self { + self.project(PlaceElem::Deref) + } + + pub(crate) fn downcast(self, adt_def: AdtDef<'tcx>, variant_index: VariantIdx) -> Self { + self.project(PlaceElem::Downcast(Some(adt_def.variant(variant_index).name), variant_index)) + } + + fn index(self, index: Local) -> Self { + self.project(PlaceElem::Index(index)) + } + + pub(crate) fn project(mut self, elem: PlaceElem<'tcx>) -> Self { + self.projection.push(elem); + self + } + + /// Same as `.clone().project(..)` but more efficient + pub(crate) fn clone_project(&self, elem: PlaceElem<'tcx>) -> Self { + Self { + base: self.base, + projection: Vec::from_iter(self.projection.iter().copied().chain([elem])), + } + } +} + +impl<'tcx> From<Local> for PlaceBuilder<'tcx> { + fn from(local: Local) -> Self { + Self { base: PlaceBase::Local(local), projection: Vec::new() } + } +} + +impl<'tcx> From<PlaceBase> for PlaceBuilder<'tcx> { + fn from(base: PlaceBase) -> Self { + Self { base, projection: Vec::new() } + } +} + +impl<'tcx> From<Place<'tcx>> for PlaceBuilder<'tcx> { + fn from(p: Place<'tcx>) -> Self { + Self { base: PlaceBase::Local(p.local), projection: p.projection.to_vec() } + } +} + +impl<'a, 'tcx> Builder<'a, 'tcx> { + /// Compile `expr`, yielding a place that we can move from etc. + /// + /// WARNING: Any user code might: + /// * Invalidate any slice bounds checks performed. + /// * Change the address that this `Place` refers to. + /// * Modify the memory that this place refers to. + /// * Invalidate the memory that this place refers to, this will be caught + /// by borrow checking. + /// + /// Extra care is needed if any user code is allowed to run between calling + /// this method and using it, as is the case for `match` and index + /// expressions. + pub(crate) fn as_place( + &mut self, + mut block: BasicBlock, + expr_id: ExprId, + ) -> BlockAnd<Place<'tcx>> { + let place_builder = unpack!(block = self.as_place_builder(block, expr_id)); + block.and(place_builder.to_place(self)) + } + + /// This is used when constructing a compound `Place`, so that we can avoid creating + /// intermediate `Place` values until we know the full set of projections. + pub(crate) fn as_place_builder( + &mut self, + block: BasicBlock, + expr_id: ExprId, + ) -> BlockAnd<PlaceBuilder<'tcx>> { + self.expr_as_place(block, expr_id, Mutability::Mut, None) + } + + /// Compile `expr`, yielding a place that we can move from etc. + /// Mutability note: The caller of this method promises only to read from the resulting + /// place. The place itself may or may not be mutable: + /// * If this expr is a place expr like a.b, then we will return that place. + /// * Otherwise, a temporary is created: in that event, it will be an immutable temporary. + pub(crate) fn as_read_only_place( + &mut self, + mut block: BasicBlock, + expr_id: ExprId, + ) -> BlockAnd<Place<'tcx>> { + let place_builder = unpack!(block = self.as_read_only_place_builder(block, expr_id)); + block.and(place_builder.to_place(self)) + } + + /// This is used when constructing a compound `Place`, so that we can avoid creating + /// intermediate `Place` values until we know the full set of projections. + /// Mutability note: The caller of this method promises only to read from the resulting + /// place. The place itself may or may not be mutable: + /// * If this expr is a place expr like a.b, then we will return that place. + /// * Otherwise, a temporary is created: in that event, it will be an immutable temporary. + fn as_read_only_place_builder( + &mut self, + block: BasicBlock, + expr_id: ExprId, + ) -> BlockAnd<PlaceBuilder<'tcx>> { + self.expr_as_place(block, expr_id, Mutability::Not, None) + } + + fn expr_as_place( + &mut self, + mut block: BasicBlock, + expr_id: ExprId, + mutability: Mutability, + fake_borrow_temps: Option<&mut Vec<Local>>, + ) -> BlockAnd<PlaceBuilder<'tcx>> { + let expr = &self.thir[expr_id]; + debug!("expr_as_place(block={:?}, expr={:?}, mutability={:?})", block, expr, mutability); + + let this = self; + let expr_span = expr.span; + let source_info = this.source_info(expr_span); + match expr.kind { + ExprKind::Scope { region_scope, lint_level, value } => { + this.in_scope((region_scope, source_info), lint_level, |this| { + this.expr_as_place(block, value, mutability, fake_borrow_temps) + }) + } + ExprKind::Field { lhs, variant_index, name } => { + let lhs_expr = &this.thir[lhs]; + let mut place_builder = + unpack!(block = this.expr_as_place(block, lhs, mutability, fake_borrow_temps,)); + if let ty::Adt(adt_def, _) = lhs_expr.ty.kind() { + if adt_def.is_enum() { + place_builder = place_builder.downcast(*adt_def, variant_index); + } + } + block.and(place_builder.field(name, expr.ty)) + } + ExprKind::Deref { arg } => { + let place_builder = + unpack!(block = this.expr_as_place(block, arg, mutability, fake_borrow_temps,)); + block.and(place_builder.deref()) + } + ExprKind::Index { lhs, index } => this.lower_index_expression( + block, + lhs, + index, + mutability, + fake_borrow_temps, + expr.temp_lifetime, + expr_span, + source_info, + ), + ExprKind::UpvarRef { closure_def_id, var_hir_id } => { + this.lower_captured_upvar(block, closure_def_id.expect_local(), var_hir_id) + } + + ExprKind::VarRef { id } => { + let place_builder = if this.is_bound_var_in_guard(id) { + let index = this.var_local_id(id, RefWithinGuard); + PlaceBuilder::from(index).deref() + } else { + let index = this.var_local_id(id, OutsideGuard); + PlaceBuilder::from(index) + }; + block.and(place_builder) + } + + ExprKind::PlaceTypeAscription { source, ref user_ty, user_ty_span } => { + let place_builder = unpack!( + block = this.expr_as_place(block, source, mutability, fake_borrow_temps,) + ); + if let Some(user_ty) = user_ty { + let ty_source_info = this.source_info(user_ty_span); + let annotation_index = + this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation { + span: user_ty_span, + user_ty: user_ty.clone(), + inferred_ty: expr.ty, + }); + + let place = place_builder.to_place(this); + this.cfg.push(block, Statement { + source_info: ty_source_info, + kind: StatementKind::AscribeUserType( + Box::new((place, UserTypeProjection { + base: annotation_index, + projs: vec![], + })), + Variance::Invariant, + ), + }); + } + block.and(place_builder) + } + ExprKind::ValueTypeAscription { source, ref user_ty, user_ty_span } => { + let source_expr = &this.thir[source]; + let temp = unpack!( + block = this.as_temp(block, source_expr.temp_lifetime, source, mutability) + ); + if let Some(user_ty) = user_ty { + let ty_source_info = this.source_info(user_ty_span); + let annotation_index = + this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation { + span: user_ty_span, + user_ty: user_ty.clone(), + inferred_ty: expr.ty, + }); + this.cfg.push(block, Statement { + source_info: ty_source_info, + kind: StatementKind::AscribeUserType( + Box::new((Place::from(temp), UserTypeProjection { + base: annotation_index, + projs: vec![], + })), + Variance::Invariant, + ), + }); + } + block.and(PlaceBuilder::from(temp)) + } + + ExprKind::Array { .. } + | ExprKind::Tuple { .. } + | ExprKind::Adt { .. } + | ExprKind::Closure { .. } + | ExprKind::Unary { .. } + | ExprKind::Binary { .. } + | ExprKind::LogicalOp { .. } + | ExprKind::Box { .. } + | ExprKind::Cast { .. } + | ExprKind::Use { .. } + | ExprKind::NeverToAny { .. } + | ExprKind::PointerCoercion { .. } + | ExprKind::Repeat { .. } + | ExprKind::Borrow { .. } + | ExprKind::RawBorrow { .. } + | ExprKind::Match { .. } + | ExprKind::If { .. } + | ExprKind::Loop { .. } + | ExprKind::Block { .. } + | ExprKind::Let { .. } + | ExprKind::Assign { .. } + | ExprKind::AssignOp { .. } + | ExprKind::Break { .. } + | ExprKind::Continue { .. } + | ExprKind::Return { .. } + | ExprKind::Become { .. } + | ExprKind::Literal { .. } + | ExprKind::NamedConst { .. } + | ExprKind::NonHirLiteral { .. } + | ExprKind::ZstLiteral { .. } + | ExprKind::ConstParam { .. } + | ExprKind::ConstBlock { .. } + | ExprKind::StaticRef { .. } + | ExprKind::InlineAsm { .. } + | ExprKind::OffsetOf { .. } + | ExprKind::Yield { .. } + | ExprKind::ThreadLocalRef(_) + | ExprKind::Call { .. } => { + // these are not places, so we need to make a temporary. + debug_assert!(!matches!(Category::of(&expr.kind), Some(Category::Place))); + let temp = + unpack!(block = this.as_temp(block, expr.temp_lifetime, expr_id, mutability)); + block.and(PlaceBuilder::from(temp)) + } + } + } + + /// Lower a captured upvar. Note we might not know the actual capture index, + /// so we create a place starting from `PlaceBase::Upvar`, which will be resolved + /// once all projections that allow us to identify a capture have been applied. + fn lower_captured_upvar( + &mut self, + block: BasicBlock, + closure_def_id: LocalDefId, + var_hir_id: LocalVarId, + ) -> BlockAnd<PlaceBuilder<'tcx>> { + block.and(PlaceBuilder::from(PlaceBase::Upvar { var_hir_id, closure_def_id })) + } + + /// Lower an index expression + /// + /// This has two complications; + /// + /// * We need to do a bounds check. + /// * We need to ensure that the bounds check can't be invalidated using an + /// expression like `x[1][{x = y; 2}]`. We use fake borrows here to ensure + /// that this is the case. + fn lower_index_expression( + &mut self, + mut block: BasicBlock, + base: ExprId, + index: ExprId, + mutability: Mutability, + fake_borrow_temps: Option<&mut Vec<Local>>, + temp_lifetime: TempLifetime, + expr_span: Span, + source_info: SourceInfo, + ) -> BlockAnd<PlaceBuilder<'tcx>> { + let base_fake_borrow_temps = &mut Vec::new(); + let is_outermost_index = fake_borrow_temps.is_none(); + let fake_borrow_temps = fake_borrow_temps.unwrap_or(base_fake_borrow_temps); + + let base_place = + unpack!(block = self.expr_as_place(block, base, mutability, Some(fake_borrow_temps),)); + + // Making this a *fresh* temporary means we do not have to worry about + // the index changing later: Nothing will ever change this temporary. + // The "retagging" transformation (for Stacked Borrows) relies on this. + let idx = unpack!(block = self.as_temp(block, temp_lifetime, index, Mutability::Not)); + + block = self.bounds_check(block, &base_place, idx, expr_span, source_info); + + if is_outermost_index { + self.read_fake_borrows(block, fake_borrow_temps, source_info) + } else { + self.add_fake_borrows_of_base( + base_place.to_place(self), + block, + fake_borrow_temps, + expr_span, + source_info, + ); + } + + block.and(base_place.index(idx)) + } + + /// Given a place that's either an array or a slice, returns an operand + /// with the length of the array/slice. + /// + /// For arrays it'll be `Operand::Constant` with the actual length; + /// For slices it'll be `Operand::Move` of a local using `PtrMetadata`. + fn len_of_slice_or_array( + &mut self, + block: BasicBlock, + place: Place<'tcx>, + span: Span, + source_info: SourceInfo, + ) -> Operand<'tcx> { + let place_ty = place.ty(&self.local_decls, self.tcx).ty; + let usize_ty = self.tcx.types.usize; + + match place_ty.kind() { + ty::Array(_elem_ty, len_const) => { + let ty_const = if let Some((_, len_ty)) = len_const.try_to_valtree() + && len_ty != self.tcx.types.usize + { + // Bad const generics can give us a constant from the type that's + // not actually a `usize`, so in that case give an error instead. + // FIXME: It'd be nice if the type checker made sure this wasn't + // possible, instead. + let err = self.tcx.dcx().span_delayed_bug( + span, + format!( + "Array length should have already been a type error, as it's {len_ty:?}" + ), + ); + ty::Const::new_error(self.tcx, err) + } else { + // We know how long an array is, so just use that as a constant + // directly -- no locals needed. We do need one statement so + // that borrow- and initialization-checking consider it used, + // though. FIXME: Do we really *need* to count this as a use? + // Could partial array tracking work off something else instead? + self.cfg.push_fake_read(block, source_info, FakeReadCause::ForIndex, place); + *len_const + }; + + let const_ = Const::from_ty_const(ty_const, usize_ty, self.tcx); + Operand::Constant(Box::new(ConstOperand { span, user_ty: None, const_ })) + } + ty::Slice(_elem_ty) => { + let ptr_or_ref = if let [PlaceElem::Deref] = place.projection[..] + && let local_ty = self.local_decls[place.local].ty + && local_ty.is_trivially_pure_clone_copy() + { + // It's extremely common that we have something that can be + // directly passed to `PtrMetadata`, so avoid an unnecessary + // temporary and statement in those cases. Note that we can + // only do that for `Copy` types -- not `&mut [_]` -- because + // the MIR we're building here needs to pass NLL later. + Operand::Copy(Place::from(place.local)) + } else { + let len_span = self.tcx.with_stable_hashing_context(|hcx| { + let span = source_info.span; + span.mark_with_reason( + None, + DesugaringKind::IndexBoundsCheckReborrow, + span.edition(), + hcx, + ) + }); + let ptr_ty = Ty::new_imm_ptr(self.tcx, place_ty); + let slice_ptr = self.temp(ptr_ty, span); + self.cfg.push_assign( + block, + SourceInfo { span: len_span, ..source_info }, + slice_ptr, + Rvalue::RawPtr(Mutability::Not, place), + ); + Operand::Move(slice_ptr) + }; + + let len = self.temp(usize_ty, span); + self.cfg.push_assign( + block, + source_info, + len, + Rvalue::UnaryOp(UnOp::PtrMetadata, ptr_or_ref), + ); + + Operand::Move(len) + } + _ => { + span_bug!(span, "len called on place of type {place_ty:?}") + } + } + } + + fn bounds_check( + &mut self, + block: BasicBlock, + slice: &PlaceBuilder<'tcx>, + index: Local, + expr_span: Span, + source_info: SourceInfo, + ) -> BasicBlock { + let slice = slice.to_place(self); + + // len = len(slice) + let len = self.len_of_slice_or_array(block, slice, expr_span, source_info); + + // lt = idx < len + let bool_ty = self.tcx.types.bool; + let lt = self.temp(bool_ty, expr_span); + self.cfg.push_assign( + block, + source_info, + lt, + Rvalue::BinaryOp( + BinOp::Lt, + Box::new((Operand::Copy(Place::from(index)), len.to_copy())), + ), + ); + let msg = BoundsCheck { len, index: Operand::Copy(Place::from(index)) }; + + // assert!(lt, "...") + self.assert(block, Operand::Move(lt), true, msg, expr_span) + } + + fn add_fake_borrows_of_base( + &mut self, + base_place: Place<'tcx>, + block: BasicBlock, + fake_borrow_temps: &mut Vec<Local>, + expr_span: Span, + source_info: SourceInfo, + ) { + let tcx = self.tcx; + + let place_ty = base_place.ty(&self.local_decls, tcx); + if let ty::Slice(_) = place_ty.ty.kind() { + // We need to create fake borrows to ensure that the bounds + // check that we just did stays valid. Since we can't assign to + // unsized values, we only need to ensure that none of the + // pointers in the base place are modified. + for (base_place, elem) in base_place.iter_projections().rev() { + match elem { + ProjectionElem::Deref => { + let fake_borrow_deref_ty = base_place.ty(&self.local_decls, tcx).ty; + let fake_borrow_ty = + Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, fake_borrow_deref_ty); + let fake_borrow_temp = + self.local_decls.push(LocalDecl::new(fake_borrow_ty, expr_span)); + let projection = tcx.mk_place_elems(base_place.projection); + self.cfg.push_assign( + block, + source_info, + fake_borrow_temp.into(), + Rvalue::Ref( + tcx.lifetimes.re_erased, + BorrowKind::Fake(FakeBorrowKind::Shallow), + Place { local: base_place.local, projection }, + ), + ); + fake_borrow_temps.push(fake_borrow_temp); + } + ProjectionElem::Index(_) => { + let index_ty = base_place.ty(&self.local_decls, tcx); + match index_ty.ty.kind() { + // The previous index expression has already + // done any index expressions needed here. + ty::Slice(_) => break, + ty::Array(..) => (), + _ => bug!("unexpected index base"), + } + } + ProjectionElem::Field(..) + | ProjectionElem::Downcast(..) + | ProjectionElem::OpaqueCast(..) + | ProjectionElem::Subtype(..) + | ProjectionElem::ConstantIndex { .. } + | ProjectionElem::Subslice { .. } => (), + } + } + } + } + + fn read_fake_borrows( + &mut self, + bb: BasicBlock, + fake_borrow_temps: &mut Vec<Local>, + source_info: SourceInfo, + ) { + // All indexes have been evaluated now, read all of the + // fake borrows so that they are live across those index + // expressions. + for temp in fake_borrow_temps { + self.cfg.push_fake_read(bb, source_info, FakeReadCause::ForIndex, Place::from(*temp)); + } + } +} + +/// Precise capture is enabled if user is using Rust Edition 2021 or higher. +fn enable_precise_capture(closure_span: Span) -> bool { + closure_span.at_least_rust_2021() +} diff --git a/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs new file mode 100644 index 00000000000..9961c2488ef --- /dev/null +++ b/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs @@ -0,0 +1,840 @@ +//! See docs in `build/expr/mod.rs`. + +use rustc_abi::{BackendRepr, FieldIdx, Primitive}; +use rustc_hir::lang_items::LangItem; +use rustc_index::{Idx, IndexVec}; +use rustc_middle::bug; +use rustc_middle::middle::region; +use rustc_middle::mir::interpret::Scalar; +use rustc_middle::mir::*; +use rustc_middle::thir::*; +use rustc_middle::ty::cast::{CastTy, mir_cast_kind}; +use rustc_middle::ty::layout::IntegerExt; +use rustc_middle::ty::util::IntTypeExt; +use rustc_middle::ty::{self, Ty, UpvarArgs}; +use rustc_span::source_map::Spanned; +use rustc_span::{DUMMY_SP, Span}; +use tracing::debug; + +use crate::builder::expr::as_place::PlaceBase; +use crate::builder::expr::category::{Category, RvalueFunc}; +use crate::builder::{BlockAnd, BlockAndExtension, Builder, NeedsTemporary}; + +impl<'a, 'tcx> Builder<'a, 'tcx> { + /// Returns an rvalue suitable for use until the end of the current + /// scope expression. + /// + /// The operand returned from this function will *not be valid* after + /// an ExprKind::Scope is passed, so please do *not* return it from + /// functions to avoid bad miscompiles. + pub(crate) fn as_local_rvalue( + &mut self, + block: BasicBlock, + expr_id: ExprId, + ) -> BlockAnd<Rvalue<'tcx>> { + let local_scope = self.local_scope(); + self.as_rvalue( + block, + TempLifetime { temp_lifetime: Some(local_scope), backwards_incompatible: None }, + expr_id, + ) + } + + /// Compile `expr`, yielding an rvalue. + pub(crate) fn as_rvalue( + &mut self, + mut block: BasicBlock, + scope: TempLifetime, + expr_id: ExprId, + ) -> BlockAnd<Rvalue<'tcx>> { + let this = self; + let expr = &this.thir[expr_id]; + debug!("expr_as_rvalue(block={:?}, scope={:?}, expr={:?})", block, scope, expr); + + let expr_span = expr.span; + let source_info = this.source_info(expr_span); + + match expr.kind { + ExprKind::ThreadLocalRef(did) => block.and(Rvalue::ThreadLocalRef(did)), + ExprKind::Scope { region_scope, lint_level, value } => { + let region_scope = (region_scope, source_info); + this.in_scope(region_scope, lint_level, |this| this.as_rvalue(block, scope, value)) + } + ExprKind::Repeat { value, count } => { + if Some(0) == count.try_to_target_usize(this.tcx) { + this.build_zero_repeat(block, value, scope, source_info) + } else { + let value_operand = unpack!( + block = this.as_operand( + block, + scope, + value, + LocalInfo::Boring, + NeedsTemporary::No + ) + ); + block.and(Rvalue::Repeat(value_operand, count)) + } + } + ExprKind::Binary { op, lhs, rhs } => { + let lhs = unpack!( + block = this.as_operand( + block, + scope, + lhs, + LocalInfo::Boring, + NeedsTemporary::Maybe + ) + ); + let rhs = unpack!( + block = + this.as_operand(block, scope, rhs, LocalInfo::Boring, NeedsTemporary::No) + ); + this.build_binary_op(block, op, expr_span, expr.ty, lhs, rhs) + } + ExprKind::Unary { op, arg } => { + let arg = unpack!( + block = + this.as_operand(block, scope, arg, LocalInfo::Boring, NeedsTemporary::No) + ); + // Check for -MIN on signed integers + if this.check_overflow && op == UnOp::Neg && expr.ty.is_signed() { + let bool_ty = this.tcx.types.bool; + + let minval = this.minval_literal(expr_span, expr.ty); + let is_min = this.temp(bool_ty, expr_span); + + this.cfg.push_assign( + block, + source_info, + is_min, + Rvalue::BinaryOp(BinOp::Eq, Box::new((arg.to_copy(), minval))), + ); + + block = this.assert( + block, + Operand::Move(is_min), + false, + AssertKind::OverflowNeg(arg.to_copy()), + expr_span, + ); + } + block.and(Rvalue::UnaryOp(op, arg)) + } + ExprKind::Box { value } => { + let value_ty = this.thir[value].ty; + let tcx = this.tcx; + let source_info = this.source_info(expr_span); + + let size = this.temp(tcx.types.usize, expr_span); + this.cfg.push_assign( + block, + source_info, + size, + Rvalue::NullaryOp(NullOp::SizeOf, value_ty), + ); + + let align = this.temp(tcx.types.usize, expr_span); + this.cfg.push_assign( + block, + source_info, + align, + Rvalue::NullaryOp(NullOp::AlignOf, value_ty), + ); + + // malloc some memory of suitable size and align: + let exchange_malloc = Operand::function_handle( + tcx, + tcx.require_lang_item(LangItem::ExchangeMalloc, Some(expr_span)), + [], + expr_span, + ); + let storage = this.temp(Ty::new_mut_ptr(tcx, tcx.types.u8), expr_span); + let success = this.cfg.start_new_block(); + this.cfg.terminate(block, source_info, TerminatorKind::Call { + func: exchange_malloc, + args: [Spanned { node: Operand::Move(size), span: DUMMY_SP }, Spanned { + node: Operand::Move(align), + span: DUMMY_SP, + }] + .into(), + destination: storage, + target: Some(success), + unwind: UnwindAction::Continue, + call_source: CallSource::Misc, + fn_span: expr_span, + }); + this.diverge_from(block); + block = success; + + // The `Box<T>` temporary created here is not a part of the HIR, + // and therefore is not considered during coroutine auto-trait + // determination. See the comment about `box` at `yield_in_scope`. + let result = this.local_decls.push(LocalDecl::new(expr.ty, expr_span)); + this.cfg.push(block, Statement { + source_info, + kind: StatementKind::StorageLive(result), + }); + if let Some(scope) = scope.temp_lifetime { + // schedule a shallow free of that memory, lest we unwind: + this.schedule_drop_storage_and_value(expr_span, scope, result); + } + + // Transmute `*mut u8` to the box (thus far, uninitialized): + let box_ = Rvalue::ShallowInitBox(Operand::Move(storage), value_ty); + this.cfg.push_assign(block, source_info, Place::from(result), box_); + + // initialize the box contents: + block = this + .expr_into_dest(this.tcx.mk_place_deref(Place::from(result)), block, value) + .into_block(); + block.and(Rvalue::Use(Operand::Move(Place::from(result)))) + } + ExprKind::Cast { source } => { + let source_expr = &this.thir[source]; + + // Casting an enum to an integer is equivalent to computing the discriminant and casting the + // discriminant. Previously every backend had to repeat the logic for this operation. Now we + // create all the steps directly in MIR with operations all backends need to support anyway. + let (source, ty) = if let ty::Adt(adt_def, ..) = source_expr.ty.kind() + && adt_def.is_enum() + { + let discr_ty = adt_def.repr().discr_type().to_ty(this.tcx); + let temp = unpack!(block = this.as_temp(block, scope, source, Mutability::Not)); + let layout = + this.tcx.layout_of(this.typing_env().as_query_input(source_expr.ty)); + let discr = this.temp(discr_ty, source_expr.span); + this.cfg.push_assign( + block, + source_info, + discr, + Rvalue::Discriminant(temp.into()), + ); + let (op, ty) = (Operand::Move(discr), discr_ty); + + if let BackendRepr::Scalar(scalar) = layout.unwrap().backend_repr + && !scalar.is_always_valid(&this.tcx) + && let Primitive::Int(int_width, _signed) = scalar.primitive() + { + let unsigned_ty = int_width.to_ty(this.tcx, false); + let unsigned_place = this.temp(unsigned_ty, expr_span); + this.cfg.push_assign( + block, + source_info, + unsigned_place, + Rvalue::Cast(CastKind::IntToInt, Operand::Copy(discr), unsigned_ty), + ); + + let bool_ty = this.tcx.types.bool; + let range = scalar.valid_range(&this.tcx); + let merge_op = + if range.start <= range.end { BinOp::BitAnd } else { BinOp::BitOr }; + + let mut comparer = |range: u128, bin_op: BinOp| -> Place<'tcx> { + // We can use `ty::TypingEnv::fully_monomorphized()` here + // as we only need it to compute the layout of a primitive. + let range_val = Const::from_bits( + this.tcx, + range, + ty::TypingEnv::fully_monomorphized(), + unsigned_ty, + ); + let lit_op = this.literal_operand(expr.span, range_val); + let is_bin_op = this.temp(bool_ty, expr_span); + this.cfg.push_assign( + block, + source_info, + is_bin_op, + Rvalue::BinaryOp( + bin_op, + Box::new((Operand::Copy(unsigned_place), lit_op)), + ), + ); + is_bin_op + }; + let assert_place = if range.start == 0 { + comparer(range.end, BinOp::Le) + } else { + let start_place = comparer(range.start, BinOp::Ge); + let end_place = comparer(range.end, BinOp::Le); + let merge_place = this.temp(bool_ty, expr_span); + this.cfg.push_assign( + block, + source_info, + merge_place, + Rvalue::BinaryOp( + merge_op, + Box::new(( + Operand::Move(start_place), + Operand::Move(end_place), + )), + ), + ); + merge_place + }; + this.cfg.push(block, Statement { + source_info, + kind: StatementKind::Intrinsic(Box::new( + NonDivergingIntrinsic::Assume(Operand::Move(assert_place)), + )), + }); + } + + (op, ty) + } else { + let ty = source_expr.ty; + let source = unpack!( + block = this.as_operand( + block, + scope, + source, + LocalInfo::Boring, + NeedsTemporary::No + ) + ); + (source, ty) + }; + let from_ty = CastTy::from_ty(ty); + let cast_ty = CastTy::from_ty(expr.ty); + debug!("ExprKind::Cast from_ty={from_ty:?}, cast_ty={:?}/{cast_ty:?}", expr.ty); + let cast_kind = mir_cast_kind(ty, expr.ty); + block.and(Rvalue::Cast(cast_kind, source, expr.ty)) + } + ExprKind::PointerCoercion { cast, source, is_from_as_cast } => { + let source = unpack!( + block = this.as_operand( + block, + scope, + source, + LocalInfo::Boring, + NeedsTemporary::No + ) + ); + let origin = + if is_from_as_cast { CoercionSource::AsCast } else { CoercionSource::Implicit }; + block.and(Rvalue::Cast(CastKind::PointerCoercion(cast, origin), source, expr.ty)) + } + ExprKind::Array { ref fields } => { + // (*) We would (maybe) be closer to codegen if we + // handled this and other aggregate cases via + // `into()`, not `as_rvalue` -- in that case, instead + // of generating + // + // let tmp1 = ...1; + // let tmp2 = ...2; + // dest = Rvalue::Aggregate(Foo, [tmp1, tmp2]) + // + // we could just generate + // + // dest.f = ...1; + // dest.g = ...2; + // + // The problem is that then we would need to: + // + // (a) have a more complex mechanism for handling + // partial cleanup; + // (b) distinguish the case where the type `Foo` has a + // destructor, in which case creating an instance + // as a whole "arms" the destructor, and you can't + // write individual fields; and, + // (c) handle the case where the type Foo has no + // fields. We don't want `let x: ();` to compile + // to the same MIR as `let x = ();`. + + // first process the set of fields + let el_ty = expr.ty.sequence_element_type(this.tcx); + let fields: IndexVec<FieldIdx, _> = fields + .into_iter() + .copied() + .map(|f| { + unpack!( + block = this.as_operand( + block, + scope, + f, + LocalInfo::Boring, + NeedsTemporary::Maybe + ) + ) + }) + .collect(); + + block.and(Rvalue::Aggregate(Box::new(AggregateKind::Array(el_ty)), fields)) + } + ExprKind::Tuple { ref fields } => { + // see (*) above + // first process the set of fields + let fields: IndexVec<FieldIdx, _> = fields + .into_iter() + .copied() + .map(|f| { + unpack!( + block = this.as_operand( + block, + scope, + f, + LocalInfo::Boring, + NeedsTemporary::Maybe + ) + ) + }) + .collect(); + + block.and(Rvalue::Aggregate(Box::new(AggregateKind::Tuple), fields)) + } + ExprKind::Closure(box ClosureExpr { + closure_id, + args, + ref upvars, + ref fake_reads, + movability: _, + }) => { + // Convert the closure fake reads, if any, from `ExprRef` to mir `Place` + // and push the fake reads. + // This must come before creating the operands. This is required in case + // there is a fake read and a borrow of the same path, since otherwise the + // fake read might interfere with the borrow. Consider an example like this + // one: + // ``` + // let mut x = 0; + // let c = || { + // &mut x; // mutable borrow of `x` + // match x { _ => () } // fake read of `x` + // }; + // ``` + // + for (thir_place, cause, hir_id) in fake_reads.into_iter() { + let place_builder = unpack!(block = this.as_place_builder(block, *thir_place)); + + if let Some(mir_place) = place_builder.try_to_place(this) { + this.cfg.push_fake_read( + block, + this.source_info(this.tcx.hir().span(*hir_id)), + *cause, + mir_place, + ); + } + } + + // see (*) above + let operands: IndexVec<FieldIdx, _> = upvars + .into_iter() + .copied() + .map(|upvar| { + let upvar_expr = &this.thir[upvar]; + match Category::of(&upvar_expr.kind) { + // Use as_place to avoid creating a temporary when + // moving a variable into a closure, so that + // borrowck knows which variables to mark as being + // used as mut. This is OK here because the upvar + // expressions have no side effects and act on + // disjoint places. + // This occurs when capturing by copy/move, while + // by reference captures use as_operand + Some(Category::Place) => { + let place = unpack!(block = this.as_place(block, upvar)); + this.consume_by_copy_or_move(place) + } + _ => { + // Turn mutable borrow captures into unique + // borrow captures when capturing an immutable + // variable. This is sound because the mutation + // that caused the capture will cause an error. + match upvar_expr.kind { + ExprKind::Borrow { + borrow_kind: + BorrowKind::Mut { kind: MutBorrowKind::Default }, + arg, + } => unpack!( + block = this.limit_capture_mutability( + upvar_expr.span, + upvar_expr.ty, + scope.temp_lifetime, + block, + arg, + ) + ), + _ => { + unpack!( + block = this.as_operand( + block, + scope, + upvar, + LocalInfo::Boring, + NeedsTemporary::Maybe + ) + ) + } + } + } + } + }) + .collect(); + + let result = match args { + UpvarArgs::Coroutine(args) => { + Box::new(AggregateKind::Coroutine(closure_id.to_def_id(), args)) + } + UpvarArgs::Closure(args) => { + Box::new(AggregateKind::Closure(closure_id.to_def_id(), args)) + } + UpvarArgs::CoroutineClosure(args) => { + Box::new(AggregateKind::CoroutineClosure(closure_id.to_def_id(), args)) + } + }; + block.and(Rvalue::Aggregate(result, operands)) + } + ExprKind::Assign { .. } | ExprKind::AssignOp { .. } => { + block = this.stmt_expr(block, expr_id, None).into_block(); + block.and(Rvalue::Use(Operand::Constant(Box::new(ConstOperand { + span: expr_span, + user_ty: None, + const_: Const::zero_sized(this.tcx.types.unit), + })))) + } + + ExprKind::OffsetOf { container, fields } => { + block.and(Rvalue::NullaryOp(NullOp::OffsetOf(fields), container)) + } + + ExprKind::Literal { .. } + | ExprKind::NamedConst { .. } + | ExprKind::NonHirLiteral { .. } + | ExprKind::ZstLiteral { .. } + | ExprKind::ConstParam { .. } + | ExprKind::ConstBlock { .. } + | ExprKind::StaticRef { .. } => { + let constant = this.as_constant(expr); + block.and(Rvalue::Use(Operand::Constant(Box::new(constant)))) + } + + ExprKind::Yield { .. } + | ExprKind::Block { .. } + | ExprKind::Match { .. } + | ExprKind::If { .. } + | ExprKind::NeverToAny { .. } + | ExprKind::Use { .. } + | ExprKind::Borrow { .. } + | ExprKind::RawBorrow { .. } + | ExprKind::Adt { .. } + | ExprKind::Loop { .. } + | ExprKind::LogicalOp { .. } + | ExprKind::Call { .. } + | ExprKind::Field { .. } + | ExprKind::Let { .. } + | ExprKind::Deref { .. } + | ExprKind::Index { .. } + | ExprKind::VarRef { .. } + | ExprKind::UpvarRef { .. } + | ExprKind::Break { .. } + | ExprKind::Continue { .. } + | ExprKind::Return { .. } + | ExprKind::Become { .. } + | ExprKind::InlineAsm { .. } + | ExprKind::PlaceTypeAscription { .. } + | ExprKind::ValueTypeAscription { .. } => { + // these do not have corresponding `Rvalue` variants, + // so make an operand and then return that + debug_assert!(!matches!( + Category::of(&expr.kind), + Some(Category::Rvalue(RvalueFunc::AsRvalue) | Category::Constant) + )); + let operand = unpack!( + block = this.as_operand( + block, + scope, + expr_id, + LocalInfo::Boring, + NeedsTemporary::No, + ) + ); + block.and(Rvalue::Use(operand)) + } + } + } + + pub(crate) fn build_binary_op( + &mut self, + mut block: BasicBlock, + op: BinOp, + span: Span, + ty: Ty<'tcx>, + lhs: Operand<'tcx>, + rhs: Operand<'tcx>, + ) -> BlockAnd<Rvalue<'tcx>> { + let source_info = self.source_info(span); + let bool_ty = self.tcx.types.bool; + let rvalue = match op { + BinOp::Add | BinOp::Sub | BinOp::Mul if self.check_overflow && ty.is_integral() => { + let result_tup = Ty::new_tup(self.tcx, &[ty, bool_ty]); + let result_value = self.temp(result_tup, span); + + let op_with_overflow = op.wrapping_to_overflowing().unwrap(); + + self.cfg.push_assign( + block, + source_info, + result_value, + Rvalue::BinaryOp(op_with_overflow, Box::new((lhs.to_copy(), rhs.to_copy()))), + ); + let val_fld = FieldIdx::ZERO; + let of_fld = FieldIdx::new(1); + + let tcx = self.tcx; + let val = tcx.mk_place_field(result_value, val_fld, ty); + let of = tcx.mk_place_field(result_value, of_fld, bool_ty); + + let err = AssertKind::Overflow(op, lhs, rhs); + block = self.assert(block, Operand::Move(of), false, err, span); + + Rvalue::Use(Operand::Move(val)) + } + BinOp::Shl | BinOp::Shr if self.check_overflow && ty.is_integral() => { + // For an unsigned RHS, the shift is in-range for `rhs < bits`. + // For a signed RHS, `IntToInt` cast to the equivalent unsigned + // type and do that same comparison. + // A negative value will be *at least* 128 after the cast (that's i8::MIN), + // and 128 is an overflowing shift amount for all our currently existing types, + // so this cast can never make us miss an overflow. + let (lhs_size, _) = ty.int_size_and_signed(self.tcx); + assert!(lhs_size.bits() <= 128); + let rhs_ty = rhs.ty(&self.local_decls, self.tcx); + let (rhs_size, _) = rhs_ty.int_size_and_signed(self.tcx); + + let (unsigned_rhs, unsigned_ty) = match rhs_ty.kind() { + ty::Uint(_) => (rhs.to_copy(), rhs_ty), + ty::Int(int_width) => { + let uint_ty = Ty::new_uint(self.tcx, int_width.to_unsigned()); + let rhs_temp = self.temp(uint_ty, span); + self.cfg.push_assign( + block, + source_info, + rhs_temp, + Rvalue::Cast(CastKind::IntToInt, rhs.to_copy(), uint_ty), + ); + (Operand::Move(rhs_temp), uint_ty) + } + _ => unreachable!("only integers are shiftable"), + }; + + // This can't overflow because the largest shiftable types are 128-bit, + // which fits in `u8`, the smallest possible `unsigned_ty`. + let lhs_bits = Operand::const_from_scalar( + self.tcx, + unsigned_ty, + Scalar::from_uint(lhs_size.bits(), rhs_size), + span, + ); + + let inbounds = self.temp(bool_ty, span); + self.cfg.push_assign( + block, + source_info, + inbounds, + Rvalue::BinaryOp(BinOp::Lt, Box::new((unsigned_rhs, lhs_bits))), + ); + + let overflow_err = AssertKind::Overflow(op, lhs.to_copy(), rhs.to_copy()); + block = self.assert(block, Operand::Move(inbounds), true, overflow_err, span); + Rvalue::BinaryOp(op, Box::new((lhs, rhs))) + } + BinOp::Div | BinOp::Rem if ty.is_integral() => { + // Checking division and remainder is more complex, since we 1. always check + // and 2. there are two possible failure cases, divide-by-zero and overflow. + + let zero_err = if op == BinOp::Div { + AssertKind::DivisionByZero(lhs.to_copy()) + } else { + AssertKind::RemainderByZero(lhs.to_copy()) + }; + let overflow_err = AssertKind::Overflow(op, lhs.to_copy(), rhs.to_copy()); + + // Check for / 0 + let is_zero = self.temp(bool_ty, span); + let zero = self.zero_literal(span, ty); + self.cfg.push_assign( + block, + source_info, + is_zero, + Rvalue::BinaryOp(BinOp::Eq, Box::new((rhs.to_copy(), zero))), + ); + + block = self.assert(block, Operand::Move(is_zero), false, zero_err, span); + + // We only need to check for the overflow in one case: + // MIN / -1, and only for signed values. + if ty.is_signed() { + let neg_1 = self.neg_1_literal(span, ty); + let min = self.minval_literal(span, ty); + + let is_neg_1 = self.temp(bool_ty, span); + let is_min = self.temp(bool_ty, span); + let of = self.temp(bool_ty, span); + + // this does (rhs == -1) & (lhs == MIN). It could short-circuit instead + + self.cfg.push_assign( + block, + source_info, + is_neg_1, + Rvalue::BinaryOp(BinOp::Eq, Box::new((rhs.to_copy(), neg_1))), + ); + self.cfg.push_assign( + block, + source_info, + is_min, + Rvalue::BinaryOp(BinOp::Eq, Box::new((lhs.to_copy(), min))), + ); + + let is_neg_1 = Operand::Move(is_neg_1); + let is_min = Operand::Move(is_min); + self.cfg.push_assign( + block, + source_info, + of, + Rvalue::BinaryOp(BinOp::BitAnd, Box::new((is_neg_1, is_min))), + ); + + block = self.assert(block, Operand::Move(of), false, overflow_err, span); + } + + Rvalue::BinaryOp(op, Box::new((lhs, rhs))) + } + _ => Rvalue::BinaryOp(op, Box::new((lhs, rhs))), + }; + block.and(rvalue) + } + + fn build_zero_repeat( + &mut self, + mut block: BasicBlock, + value: ExprId, + scope: TempLifetime, + outer_source_info: SourceInfo, + ) -> BlockAnd<Rvalue<'tcx>> { + let this = self; + let value_expr = &this.thir[value]; + let elem_ty = value_expr.ty; + if let Some(Category::Constant) = Category::of(&value_expr.kind) { + // Repeating a const does nothing + } else { + // For a non-const, we may need to generate an appropriate `Drop` + let value_operand = unpack!( + block = this.as_operand(block, scope, value, LocalInfo::Boring, NeedsTemporary::No) + ); + if let Operand::Move(to_drop) = value_operand { + let success = this.cfg.start_new_block(); + this.cfg.terminate(block, outer_source_info, TerminatorKind::Drop { + place: to_drop, + target: success, + unwind: UnwindAction::Continue, + replace: false, + }); + this.diverge_from(block); + block = success; + } + this.record_operands_moved(&[Spanned { node: value_operand, span: DUMMY_SP }]); + } + block.and(Rvalue::Aggregate(Box::new(AggregateKind::Array(elem_ty)), IndexVec::new())) + } + + fn limit_capture_mutability( + &mut self, + upvar_span: Span, + upvar_ty: Ty<'tcx>, + temp_lifetime: Option<region::Scope>, + mut block: BasicBlock, + arg: ExprId, + ) -> BlockAnd<Operand<'tcx>> { + let this = self; + + let source_info = this.source_info(upvar_span); + let temp = this.local_decls.push(LocalDecl::new(upvar_ty, upvar_span)); + + this.cfg.push(block, Statement { source_info, kind: StatementKind::StorageLive(temp) }); + + let arg_place_builder = unpack!(block = this.as_place_builder(block, arg)); + + let mutability = match arg_place_builder.base() { + // We are capturing a path that starts off a local variable in the parent. + // The mutability of the current capture is same as the mutability + // of the local declaration in the parent. + PlaceBase::Local(local) => this.local_decls[local].mutability, + // Parent is a closure and we are capturing a path that is captured + // by the parent itself. The mutability of the current capture + // is same as that of the capture in the parent closure. + PlaceBase::Upvar { .. } => { + let enclosing_upvars_resolved = arg_place_builder.to_place(this); + + match enclosing_upvars_resolved.as_ref() { + PlaceRef { + local, + projection: &[ProjectionElem::Field(upvar_index, _), ..], + } + | PlaceRef { + local, + projection: + &[ProjectionElem::Deref, ProjectionElem::Field(upvar_index, _), ..], + } => { + // Not in a closure + debug_assert!( + local == ty::CAPTURE_STRUCT_LOCAL, + "Expected local to be Local(1), found {local:?}" + ); + // Not in a closure + debug_assert!( + this.upvars.len() > upvar_index.index(), + "Unexpected capture place, upvars={:#?}, upvar_index={:?}", + this.upvars, + upvar_index + ); + this.upvars[upvar_index.index()].mutability + } + _ => bug!("Unexpected capture place"), + } + } + }; + + let borrow_kind = match mutability { + Mutability::Not => BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture }, + Mutability::Mut => BorrowKind::Mut { kind: MutBorrowKind::Default }, + }; + + let arg_place = arg_place_builder.to_place(this); + + this.cfg.push_assign( + block, + source_info, + Place::from(temp), + Rvalue::Ref(this.tcx.lifetimes.re_erased, borrow_kind, arg_place), + ); + + // See the comment in `expr_as_temp` and on the `rvalue_scopes` field for why + // this can be `None`. + if let Some(temp_lifetime) = temp_lifetime { + this.schedule_drop_storage_and_value(upvar_span, temp_lifetime, temp); + } + + block.and(Operand::Move(Place::from(temp))) + } + + // Helper to get a `-1` value of the appropriate type + fn neg_1_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx> { + let typing_env = ty::TypingEnv::fully_monomorphized(); + let size = self.tcx.layout_of(typing_env.as_query_input(ty)).unwrap().size; + let literal = Const::from_bits(self.tcx, size.unsigned_int_max(), typing_env, ty); + + self.literal_operand(span, literal) + } + + // Helper to get the minimum value of the appropriate type + fn minval_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx> { + assert!(ty.is_signed()); + let typing_env = ty::TypingEnv::fully_monomorphized(); + let bits = self.tcx.layout_of(typing_env.as_query_input(ty)).unwrap().size.bits(); + let n = 1 << (bits - 1); + let literal = Const::from_bits(self.tcx, n, typing_env, ty); + + self.literal_operand(span, literal) + } +} diff --git a/compiler/rustc_mir_build/src/builder/expr/as_temp.rs b/compiler/rustc_mir_build/src/builder/expr/as_temp.rs new file mode 100644 index 00000000000..5e3a24e18fb --- /dev/null +++ b/compiler/rustc_mir_build/src/builder/expr/as_temp.rs @@ -0,0 +1,139 @@ +//! See docs in build/expr/mod.rs + +use rustc_data_structures::stack::ensure_sufficient_stack; +use rustc_hir::HirId; +use rustc_middle::middle::region::{Scope, ScopeData}; +use rustc_middle::mir::*; +use rustc_middle::thir::*; +use tracing::{debug, instrument}; + +use crate::builder::scope::DropKind; +use crate::builder::{BlockAnd, BlockAndExtension, Builder}; + +impl<'a, 'tcx> Builder<'a, 'tcx> { + /// Compile `expr` into a fresh temporary. This is used when building + /// up rvalues so as to freeze the value that will be consumed. + pub(crate) fn as_temp( + &mut self, + block: BasicBlock, + temp_lifetime: TempLifetime, + expr_id: ExprId, + mutability: Mutability, + ) -> BlockAnd<Local> { + // this is the only place in mir building that we need to truly need to worry about + // infinite recursion. Everything else does recurse, too, but it always gets broken up + // at some point by inserting an intermediate temporary + ensure_sufficient_stack(|| self.as_temp_inner(block, temp_lifetime, expr_id, mutability)) + } + + #[instrument(skip(self), level = "debug")] + fn as_temp_inner( + &mut self, + mut block: BasicBlock, + temp_lifetime: TempLifetime, + expr_id: ExprId, + mutability: Mutability, + ) -> BlockAnd<Local> { + let this = self; + + let expr = &this.thir[expr_id]; + let expr_span = expr.span; + let source_info = this.source_info(expr_span); + if let ExprKind::Scope { region_scope, lint_level, value } = expr.kind { + return this.in_scope((region_scope, source_info), lint_level, |this| { + this.as_temp(block, temp_lifetime, value, mutability) + }); + } + + let expr_ty = expr.ty; + let deduplicate_temps = this.fixed_temps_scope.is_some() + && this.fixed_temps_scope == temp_lifetime.temp_lifetime; + let temp = if deduplicate_temps && let Some(temp_index) = this.fixed_temps.get(&expr_id) { + *temp_index + } else { + let mut local_decl = LocalDecl::new(expr_ty, expr_span); + if mutability.is_not() { + local_decl = local_decl.immutable(); + } + + debug!("creating temp {:?} with block_context: {:?}", local_decl, this.block_context); + let local_info = match expr.kind { + ExprKind::StaticRef { def_id, .. } => { + assert!(!this.tcx.is_thread_local_static(def_id)); + LocalInfo::StaticRef { def_id, is_thread_local: false } + } + ExprKind::ThreadLocalRef(def_id) => { + assert!(this.tcx.is_thread_local_static(def_id)); + LocalInfo::StaticRef { def_id, is_thread_local: true } + } + ExprKind::NamedConst { def_id, .. } | ExprKind::ConstParam { def_id, .. } => { + LocalInfo::ConstRef { def_id } + } + // Find out whether this temp is being created within the + // tail expression of a block whose result is ignored. + _ if let Some(tail_info) = this.block_context.currently_in_block_tail() => { + LocalInfo::BlockTailTemp(tail_info) + } + + _ if let Some(Scope { data: ScopeData::IfThenRescope, id }) = + temp_lifetime.temp_lifetime => + { + LocalInfo::IfThenRescopeTemp { + if_then: HirId { owner: this.hir_id.owner, local_id: id }, + } + } + + _ => LocalInfo::Boring, + }; + **local_decl.local_info.as_mut().assert_crate_local() = local_info; + this.local_decls.push(local_decl) + }; + debug!(?temp); + if deduplicate_temps { + this.fixed_temps.insert(expr_id, temp); + } + let temp_place = Place::from(temp); + + match expr.kind { + // Don't bother with StorageLive and Dead for these temporaries, + // they are never assigned. + ExprKind::Break { .. } | ExprKind::Continue { .. } | ExprKind::Return { .. } => (), + ExprKind::Block { block } + if let Block { expr: None, targeted_by_break: false, .. } = this.thir[block] + && expr_ty.is_never() => {} + _ => { + this.cfg + .push(block, Statement { source_info, kind: StatementKind::StorageLive(temp) }); + + // In constants, `temp_lifetime` is `None` for temporaries that + // live for the `'static` lifetime. Thus we do not drop these + // temporaries and simply leak them. + // This is equivalent to what `let x = &foo();` does in + // functions. The temporary is lifted to their surrounding + // scope. In a function that means the temporary lives until + // just before the function returns. In constants that means it + // outlives the constant's initialization value computation. + // Anything outliving a constant must have the `'static` + // lifetime and live forever. + // Anything with a shorter lifetime (e.g the `&foo()` in + // `bar(&foo())` or anything within a block will keep the + // regular drops just like runtime code. + if let Some(temp_lifetime) = temp_lifetime.temp_lifetime { + this.schedule_drop(expr_span, temp_lifetime, temp, DropKind::Storage); + } + } + } + + block = this.expr_into_dest(temp_place, block, expr_id).into_block(); + + if let Some(temp_lifetime) = temp_lifetime.temp_lifetime { + this.schedule_drop(expr_span, temp_lifetime, temp, DropKind::Value); + } + + if let Some(backwards_incompatible) = temp_lifetime.backwards_incompatible { + this.schedule_backwards_incompatible_drop(expr_span, backwards_incompatible, temp); + } + + block.and(temp) + } +} diff --git a/compiler/rustc_mir_build/src/builder/expr/category.rs b/compiler/rustc_mir_build/src/builder/expr/category.rs new file mode 100644 index 00000000000..e0349e3e3f6 --- /dev/null +++ b/compiler/rustc_mir_build/src/builder/expr/category.rs @@ -0,0 +1,94 @@ +use rustc_middle::thir::*; + +#[derive(Debug, PartialEq)] +pub(crate) enum Category { + /// An assignable memory location like `x`, `x.f`, `foo()[3]`, that + /// sort of thing. Something that could appear on the LHS of an `=` + /// sign. + Place, + + /// A literal like `23` or `"foo"`. Does not include constant + /// expressions like `3 + 5`. + Constant, + + /// Something that generates a new value at runtime, like `x + y` + /// or `foo()`. + Rvalue(RvalueFunc), +} + +/// Rvalues fall into different "styles" that will determine which fn +/// is best suited to generate them. +#[derive(Debug, PartialEq)] +pub(crate) enum RvalueFunc { + /// Best generated by `into`. This is generally exprs that + /// cause branching, like `match`, but also includes calls. + Into, + + /// Best generated by `as_rvalue`. This is usually the case. + AsRvalue, +} + +impl Category { + /// Determines the category for a given expression. Note that scope + /// and paren expressions have no category. + pub(crate) fn of(ek: &ExprKind<'_>) -> Option<Category> { + match *ek { + ExprKind::Scope { .. } => None, + + ExprKind::Field { .. } + | ExprKind::Deref { .. } + | ExprKind::Index { .. } + | ExprKind::UpvarRef { .. } + | ExprKind::VarRef { .. } + | ExprKind::PlaceTypeAscription { .. } + | ExprKind::ValueTypeAscription { .. } => Some(Category::Place), + + ExprKind::LogicalOp { .. } + | ExprKind::Match { .. } + | ExprKind::If { .. } + | ExprKind::Let { .. } + | ExprKind::NeverToAny { .. } + | ExprKind::Use { .. } + | ExprKind::Adt { .. } + | ExprKind::Borrow { .. } + | ExprKind::RawBorrow { .. } + | ExprKind::Yield { .. } + | ExprKind::Call { .. } + | ExprKind::InlineAsm { .. } => Some(Category::Rvalue(RvalueFunc::Into)), + + ExprKind::Array { .. } + | ExprKind::Tuple { .. } + | ExprKind::Closure { .. } + | ExprKind::Unary { .. } + | ExprKind::Binary { .. } + | ExprKind::Box { .. } + | ExprKind::Cast { .. } + | ExprKind::PointerCoercion { .. } + | ExprKind::Repeat { .. } + | ExprKind::Assign { .. } + | ExprKind::AssignOp { .. } + | ExprKind::ThreadLocalRef(_) + | ExprKind::OffsetOf { .. } => Some(Category::Rvalue(RvalueFunc::AsRvalue)), + + ExprKind::ConstBlock { .. } + | ExprKind::Literal { .. } + | ExprKind::NonHirLiteral { .. } + | ExprKind::ZstLiteral { .. } + | ExprKind::ConstParam { .. } + | ExprKind::StaticRef { .. } + | ExprKind::NamedConst { .. } => Some(Category::Constant), + + ExprKind::Loop { .. } + | ExprKind::Block { .. } + | ExprKind::Break { .. } + | ExprKind::Continue { .. } + | ExprKind::Return { .. } + | ExprKind::Become { .. } => + // FIXME(#27840) these probably want their own + // category, like "nonterminating" + { + Some(Category::Rvalue(RvalueFunc::Into)) + } + } + } +} diff --git a/compiler/rustc_mir_build/src/builder/expr/into.rs b/compiler/rustc_mir_build/src/builder/expr/into.rs new file mode 100644 index 00000000000..88f63d4e22c --- /dev/null +++ b/compiler/rustc_mir_build/src/builder/expr/into.rs @@ -0,0 +1,650 @@ +//! See docs in build/expr/mod.rs + +use rustc_ast::{AsmMacro, InlineAsmOptions}; +use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::stack::ensure_sufficient_stack; +use rustc_hir as hir; +use rustc_middle::mir::*; +use rustc_middle::span_bug; +use rustc_middle::thir::*; +use rustc_middle::ty::CanonicalUserTypeAnnotation; +use rustc_span::source_map::Spanned; +use tracing::{debug, instrument}; + +use crate::builder::expr::category::{Category, RvalueFunc}; +use crate::builder::matches::DeclareLetBindings; +use crate::builder::{BlockAnd, BlockAndExtension, BlockFrame, Builder, NeedsTemporary}; + +impl<'a, 'tcx> Builder<'a, 'tcx> { + /// Compile `expr`, storing the result into `destination`, which + /// is assumed to be uninitialized. + #[instrument(level = "debug", skip(self))] + pub(crate) fn expr_into_dest( + &mut self, + destination: Place<'tcx>, + mut block: BasicBlock, + expr_id: ExprId, + ) -> BlockAnd<()> { + // since we frequently have to reference `self` from within a + // closure, where `self` would be shadowed, it's easier to + // just use the name `this` uniformly + let this = self; + let expr = &this.thir[expr_id]; + let expr_span = expr.span; + let source_info = this.source_info(expr_span); + + let expr_is_block_or_scope = + matches!(expr.kind, ExprKind::Block { .. } | ExprKind::Scope { .. }); + + if !expr_is_block_or_scope { + this.block_context.push(BlockFrame::SubExpr); + } + + let block_and = match expr.kind { + ExprKind::Scope { region_scope, lint_level, value } => { + let region_scope = (region_scope, source_info); + ensure_sufficient_stack(|| { + this.in_scope(region_scope, lint_level, |this| { + this.expr_into_dest(destination, block, value) + }) + }) + } + ExprKind::Block { block: ast_block } => { + this.ast_block(destination, block, ast_block, source_info) + } + ExprKind::Match { scrutinee, ref arms, .. } => this.match_expr( + destination, + block, + scrutinee, + arms, + expr_span, + this.thir[scrutinee].span, + ), + ExprKind::If { cond, then, else_opt, if_then_scope } => { + let then_span = this.thir[then].span; + let then_source_info = this.source_info(then_span); + let condition_scope = this.local_scope(); + + let then_and_else_blocks = this.in_scope( + (if_then_scope, then_source_info), + LintLevel::Inherited, + |this| { + // FIXME: Does this need extra logic to handle let-chains? + let source_info = if this.is_let(cond) { + let variable_scope = + this.new_source_scope(then_span, LintLevel::Inherited); + this.source_scope = variable_scope; + SourceInfo { span: then_span, scope: variable_scope } + } else { + this.source_info(then_span) + }; + + // Lower the condition, and have it branch into `then` and `else` blocks. + let (then_block, else_block) = + this.in_if_then_scope(condition_scope, then_span, |this| { + let then_blk = this + .then_else_break( + block, + cond, + Some(condition_scope), // Temp scope + source_info, + DeclareLetBindings::Yes, // Declare `let` bindings normally + ) + .into_block(); + + // Lower the `then` arm into its block. + this.expr_into_dest(destination, then_blk, then) + }); + + // Pack `(then_block, else_block)` into `BlockAnd<BasicBlock>`. + then_block.and(else_block) + }, + ); + + // Unpack `BlockAnd<BasicBlock>` into `(then_blk, else_blk)`. + let (then_blk, mut else_blk); + else_blk = unpack!(then_blk = then_and_else_blocks); + + // If there is an `else` arm, lower it into `else_blk`. + if let Some(else_expr) = else_opt { + else_blk = this.expr_into_dest(destination, else_blk, else_expr).into_block(); + } else { + // There is no `else` arm, so we know both arms have type `()`. + // Generate the implicit `else {}` by assigning unit. + let correct_si = this.source_info(expr_span.shrink_to_hi()); + this.cfg.push_assign_unit(else_blk, correct_si, destination, this.tcx); + } + + // The `then` and `else` arms have been lowered into their respective + // blocks, so make both of them meet up in a new block. + let join_block = this.cfg.start_new_block(); + this.cfg.goto(then_blk, source_info, join_block); + this.cfg.goto(else_blk, source_info, join_block); + join_block.unit() + } + ExprKind::Let { .. } => { + // After desugaring, `let` expressions should only appear inside `if` + // expressions or `match` guards, possibly nested within a let-chain. + // In both cases they are specifically handled by the lowerings of + // those expressions, so this case is currently unreachable. + span_bug!(expr_span, "unexpected let expression outside of if or match-guard"); + } + ExprKind::NeverToAny { source } => { + let source_expr = &this.thir[source]; + let is_call = + matches!(source_expr.kind, ExprKind::Call { .. } | ExprKind::InlineAsm { .. }); + + // (#66975) Source could be a const of type `!`, so has to + // exist in the generated MIR. + unpack!( + block = + this.as_temp(block, this.local_temp_lifetime(), source, Mutability::Mut) + ); + + // This is an optimization. If the expression was a call then we already have an + // unreachable block. Don't bother to terminate it and create a new one. + if is_call { + block.unit() + } else { + this.cfg.terminate(block, source_info, TerminatorKind::Unreachable); + let end_block = this.cfg.start_new_block(); + end_block.unit() + } + } + ExprKind::LogicalOp { op, lhs, rhs } => { + let condition_scope = this.local_scope(); + let source_info = this.source_info(expr.span); + + this.visit_coverage_branch_operation(op, expr.span); + + // We first evaluate the left-hand side of the predicate ... + let (then_block, else_block) = + this.in_if_then_scope(condition_scope, expr.span, |this| { + this.then_else_break( + block, + lhs, + Some(condition_scope), // Temp scope + source_info, + // This flag controls how inner `let` expressions are lowered, + // but either way there shouldn't be any of those in here. + DeclareLetBindings::LetNotPermitted, + ) + }); + let (short_circuit, continuation, constant) = match op { + LogicalOp::And => (else_block, then_block, false), + LogicalOp::Or => (then_block, else_block, true), + }; + // At this point, the control flow splits into a short-circuiting path + // and a continuation path. + // - If the operator is `&&`, passing `lhs` leads to continuation of evaluation on `rhs`; + // failing it leads to the short-circuting path which assigns `false` to the place. + // - If the operator is `||`, failing `lhs` leads to continuation of evaluation on `rhs`; + // passing it leads to the short-circuting path which assigns `true` to the place. + this.cfg.push_assign_constant( + short_circuit, + source_info, + destination, + ConstOperand { + span: expr.span, + user_ty: None, + const_: Const::from_bool(this.tcx, constant), + }, + ); + let mut rhs_block = + this.expr_into_dest(destination, continuation, rhs).into_block(); + // Instrument the lowered RHS's value for condition coverage. + // (Does nothing if condition coverage is not enabled.) + this.visit_coverage_standalone_condition(rhs, destination, &mut rhs_block); + + let target = this.cfg.start_new_block(); + this.cfg.goto(rhs_block, source_info, target); + this.cfg.goto(short_circuit, source_info, target); + target.unit() + } + ExprKind::Loop { body } => { + // [block] + // | + // [loop_block] -> [body_block] -/eval. body/-> [body_block_end] + // | ^ | + // false link | | + // | +-----------------------------------------+ + // +-> [diverge_cleanup] + // The false link is required to make sure borrowck considers unwinds through the + // body, even when the exact code in the body cannot unwind + + let loop_block = this.cfg.start_new_block(); + + // Start the loop. + this.cfg.goto(block, source_info, loop_block); + + this.in_breakable_scope(Some(loop_block), destination, expr_span, move |this| { + // conduct the test, if necessary + let body_block = this.cfg.start_new_block(); + this.cfg.terminate(loop_block, source_info, TerminatorKind::FalseUnwind { + real_target: body_block, + unwind: UnwindAction::Continue, + }); + this.diverge_from(loop_block); + + // The “return” value of the loop body must always be a unit. We therefore + // introduce a unit temporary as the destination for the loop body. + let tmp = this.get_unit_temp(); + // Execute the body, branching back to the test. + let body_block_end = this.expr_into_dest(tmp, body_block, body).into_block(); + this.cfg.goto(body_block_end, source_info, loop_block); + + // Loops are only exited by `break` expressions. + None + }) + } + ExprKind::Call { ty: _, fun, ref args, from_hir_call, fn_span } => { + let fun = unpack!(block = this.as_local_operand(block, fun)); + let args: Box<[_]> = args + .into_iter() + .copied() + .map(|arg| Spanned { + node: unpack!(block = this.as_local_call_operand(block, arg)), + span: this.thir.exprs[arg].span, + }) + .collect(); + + let success = this.cfg.start_new_block(); + + this.record_operands_moved(&args); + + debug!("expr_into_dest: fn_span={:?}", fn_span); + + this.cfg.terminate(block, source_info, TerminatorKind::Call { + func: fun, + args, + unwind: UnwindAction::Continue, + destination, + // The presence or absence of a return edge affects control-flow sensitive + // MIR checks and ultimately whether code is accepted or not. We can only + // omit the return edge if a return type is visibly uninhabited to a module + // that makes the call. + target: expr + .ty + .is_inhabited_from( + this.tcx, + this.parent_module, + this.infcx.typing_env(this.param_env), + ) + .then_some(success), + call_source: if from_hir_call { + CallSource::Normal + } else { + CallSource::OverloadedOperator + }, + fn_span, + }); + this.diverge_from(block); + success.unit() + } + ExprKind::Use { source } => this.expr_into_dest(destination, block, source), + ExprKind::Borrow { arg, borrow_kind } => { + // We don't do this in `as_rvalue` because we use `as_place` + // for borrow expressions, so we cannot create an `RValue` that + // remains valid across user code. `as_rvalue` is usually called + // by this method anyway, so this shouldn't cause too many + // unnecessary temporaries. + let arg_place = match borrow_kind { + BorrowKind::Shared => { + unpack!(block = this.as_read_only_place(block, arg)) + } + _ => unpack!(block = this.as_place(block, arg)), + }; + let borrow = Rvalue::Ref(this.tcx.lifetimes.re_erased, borrow_kind, arg_place); + this.cfg.push_assign(block, source_info, destination, borrow); + block.unit() + } + ExprKind::RawBorrow { mutability, arg } => { + let place = match mutability { + hir::Mutability::Not => this.as_read_only_place(block, arg), + hir::Mutability::Mut => this.as_place(block, arg), + }; + let address_of = Rvalue::RawPtr(mutability, unpack!(block = place)); + this.cfg.push_assign(block, source_info, destination, address_of); + block.unit() + } + ExprKind::Adt(box AdtExpr { + adt_def, + variant_index, + args, + ref user_ty, + ref fields, + ref base, + }) => { + // See the notes for `ExprKind::Array` in `as_rvalue` and for + // `ExprKind::Borrow` above. + let is_union = adt_def.is_union(); + let active_field_index = is_union.then(|| fields[0].name); + + let scope = this.local_temp_lifetime(); + + // first process the set of fields that were provided + // (evaluating them in order given by user) + let fields_map: FxHashMap<_, _> = fields + .into_iter() + .map(|f| { + ( + f.name, + unpack!( + block = this.as_operand( + block, + scope, + f.expr, + LocalInfo::AggregateTemp, + NeedsTemporary::Maybe, + ) + ), + ) + }) + .collect(); + + let variant = adt_def.variant(variant_index); + let field_names = variant.fields.indices(); + + let fields = match base { + AdtExprBase::None => { + field_names.filter_map(|n| fields_map.get(&n).cloned()).collect() + } + AdtExprBase::Base(FruInfo { base, field_types }) => { + let place_builder = unpack!(block = this.as_place_builder(block, *base)); + + // We desugar FRU as we lower to MIR, so for each + // base-supplied field, generate an operand that + // reads it from the base. + itertools::zip_eq(field_names, &**field_types) + .map(|(n, ty)| match fields_map.get(&n) { + Some(v) => v.clone(), + None => { + let place = + place_builder.clone_project(PlaceElem::Field(n, *ty)); + this.consume_by_copy_or_move(place.to_place(this)) + } + }) + .collect() + } + AdtExprBase::DefaultFields(field_types) => { + itertools::zip_eq(field_names, field_types) + .map(|(n, &ty)| match fields_map.get(&n) { + Some(v) => v.clone(), + None => match variant.fields[n].value { + Some(def) => { + let value = Const::Unevaluated( + UnevaluatedConst::new(def, args), + ty, + ); + Operand::Constant(Box::new(ConstOperand { + span: expr_span, + user_ty: None, + const_: value, + })) + } + None => { + let name = variant.fields[n].name; + span_bug!( + expr_span, + "missing mandatory field `{name}` of type `{ty}`", + ); + } + }, + }) + .collect() + } + }; + + let inferred_ty = expr.ty; + let user_ty = user_ty.as_ref().map(|user_ty| { + this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation { + span: source_info.span, + user_ty: user_ty.clone(), + inferred_ty, + }) + }); + let adt = Box::new(AggregateKind::Adt( + adt_def.did(), + variant_index, + args, + user_ty, + active_field_index, + )); + this.cfg.push_assign( + block, + source_info, + destination, + Rvalue::Aggregate(adt, fields), + ); + block.unit() + } + ExprKind::InlineAsm(box InlineAsmExpr { + asm_macro, + template, + ref operands, + options, + line_spans, + }) => { + use rustc_middle::{mir, thir}; + + let destination_block = this.cfg.start_new_block(); + let mut targets = + if asm_macro.diverges(options) { vec![] } else { vec![destination_block] }; + + let operands = operands + .into_iter() + .map(|op| match *op { + thir::InlineAsmOperand::In { reg, expr } => mir::InlineAsmOperand::In { + reg, + value: unpack!(block = this.as_local_operand(block, expr)), + }, + thir::InlineAsmOperand::Out { reg, late, expr } => { + mir::InlineAsmOperand::Out { + reg, + late, + place: expr.map(|expr| unpack!(block = this.as_place(block, expr))), + } + } + thir::InlineAsmOperand::InOut { reg, late, expr } => { + let place = unpack!(block = this.as_place(block, expr)); + mir::InlineAsmOperand::InOut { + reg, + late, + // This works because asm operands must be Copy + in_value: Operand::Copy(place), + out_place: Some(place), + } + } + thir::InlineAsmOperand::SplitInOut { reg, late, in_expr, out_expr } => { + mir::InlineAsmOperand::InOut { + reg, + late, + in_value: unpack!(block = this.as_local_operand(block, in_expr)), + out_place: out_expr.map(|out_expr| { + unpack!(block = this.as_place(block, out_expr)) + }), + } + } + thir::InlineAsmOperand::Const { value, span } => { + mir::InlineAsmOperand::Const { + value: Box::new(ConstOperand { + span, + user_ty: None, + const_: value, + }), + } + } + thir::InlineAsmOperand::SymFn { value, span } => { + mir::InlineAsmOperand::SymFn { + value: Box::new(ConstOperand { + span, + user_ty: None, + const_: value, + }), + } + } + thir::InlineAsmOperand::SymStatic { def_id } => { + mir::InlineAsmOperand::SymStatic { def_id } + } + thir::InlineAsmOperand::Label { block } => { + let target = this.cfg.start_new_block(); + let target_index = targets.len(); + targets.push(target); + + let tmp = this.get_unit_temp(); + let target = + this.ast_block(tmp, target, block, source_info).into_block(); + this.cfg.terminate(target, source_info, TerminatorKind::Goto { + target: destination_block, + }); + + mir::InlineAsmOperand::Label { target_index } + } + }) + .collect(); + + if !expr.ty.is_never() { + this.cfg.push_assign_unit(block, source_info, destination, this.tcx); + } + + let asm_macro = match asm_macro { + AsmMacro::Asm => InlineAsmMacro::Asm, + AsmMacro::GlobalAsm => { + span_bug!(expr_span, "unexpected global_asm! in inline asm") + } + AsmMacro::NakedAsm => InlineAsmMacro::NakedAsm, + }; + + this.cfg.terminate(block, source_info, TerminatorKind::InlineAsm { + asm_macro, + template, + operands, + options, + line_spans, + targets: targets.into_boxed_slice(), + unwind: if options.contains(InlineAsmOptions::MAY_UNWIND) { + UnwindAction::Continue + } else { + UnwindAction::Unreachable + }, + }); + if options.contains(InlineAsmOptions::MAY_UNWIND) { + this.diverge_from(block); + } + destination_block.unit() + } + + // These cases don't actually need a destination + ExprKind::Assign { .. } | ExprKind::AssignOp { .. } => { + block = this.stmt_expr(block, expr_id, None).into_block(); + this.cfg.push_assign_unit(block, source_info, destination, this.tcx); + block.unit() + } + + ExprKind::Continue { .. } + | ExprKind::Break { .. } + | ExprKind::Return { .. } + | ExprKind::Become { .. } => { + block = this.stmt_expr(block, expr_id, None).into_block(); + // No assign, as these have type `!`. + block.unit() + } + + // Avoid creating a temporary + ExprKind::VarRef { .. } + | ExprKind::UpvarRef { .. } + | ExprKind::PlaceTypeAscription { .. } + | ExprKind::ValueTypeAscription { .. } => { + debug_assert!(Category::of(&expr.kind) == Some(Category::Place)); + + let place = unpack!(block = this.as_place(block, expr_id)); + let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place)); + this.cfg.push_assign(block, source_info, destination, rvalue); + block.unit() + } + ExprKind::Index { .. } | ExprKind::Deref { .. } | ExprKind::Field { .. } => { + debug_assert_eq!(Category::of(&expr.kind), Some(Category::Place)); + + // Create a "fake" temporary variable so that we check that the + // value is Sized. Usually, this is caught in type checking, but + // in the case of box expr there is no such check. + if !destination.projection.is_empty() { + this.local_decls.push(LocalDecl::new(expr.ty, expr.span)); + } + + let place = unpack!(block = this.as_place(block, expr_id)); + let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place)); + this.cfg.push_assign(block, source_info, destination, rvalue); + block.unit() + } + + ExprKind::Yield { value } => { + let scope = this.local_temp_lifetime(); + let value = unpack!( + block = + this.as_operand(block, scope, value, LocalInfo::Boring, NeedsTemporary::No) + ); + let resume = this.cfg.start_new_block(); + this.cfg.terminate(block, source_info, TerminatorKind::Yield { + value, + resume, + resume_arg: destination, + drop: None, + }); + this.coroutine_drop_cleanup(block); + resume.unit() + } + + // these are the cases that are more naturally handled by some other mode + ExprKind::Unary { .. } + | ExprKind::Binary { .. } + | ExprKind::Box { .. } + | ExprKind::Cast { .. } + | ExprKind::PointerCoercion { .. } + | ExprKind::Repeat { .. } + | ExprKind::Array { .. } + | ExprKind::Tuple { .. } + | ExprKind::Closure { .. } + | ExprKind::ConstBlock { .. } + | ExprKind::Literal { .. } + | ExprKind::NamedConst { .. } + | ExprKind::NonHirLiteral { .. } + | ExprKind::ZstLiteral { .. } + | ExprKind::ConstParam { .. } + | ExprKind::ThreadLocalRef(_) + | ExprKind::StaticRef { .. } + | ExprKind::OffsetOf { .. } => { + debug_assert!(match Category::of(&expr.kind).unwrap() { + // should be handled above + Category::Rvalue(RvalueFunc::Into) => false, + + // must be handled above or else we get an + // infinite loop in the builder; see + // e.g., `ExprKind::VarRef` above + Category::Place => false, + + _ => true, + }); + + let rvalue = unpack!(block = this.as_local_rvalue(block, expr_id)); + this.cfg.push_assign(block, source_info, destination, rvalue); + block.unit() + } + }; + + if !expr_is_block_or_scope { + let popped = this.block_context.pop(); + assert!(popped.is_some()); + } + + block_and + } + + fn is_let(&self, expr: ExprId) -> bool { + match self.thir[expr].kind { + ExprKind::Let { .. } => true, + ExprKind::Scope { value, .. } => self.is_let(value), + _ => false, + } + } +} diff --git a/compiler/rustc_mir_build/src/builder/expr/mod.rs b/compiler/rustc_mir_build/src/builder/expr/mod.rs new file mode 100644 index 00000000000..3de43a3370f --- /dev/null +++ b/compiler/rustc_mir_build/src/builder/expr/mod.rs @@ -0,0 +1,70 @@ +//! Builds MIR from expressions. As a caller into this module, you +//! have many options, but the first thing you have to decide is +//! whether you are evaluating this expression for its *value*, its +//! *location*, or as a *constant*. +//! +//! Typically, you want the value: e.g., if you are doing `expr_a + +//! expr_b`, you want the values of those expressions. In that case, +//! you want one of the following functions. Note that if the expr has +//! a type that is not `Copy`, then using any of these functions will +//! "move" the value out of its current home (if any). +//! +//! - `expr_into_dest` -- writes the value into a specific location, which +//! should be uninitialized +//! - `as_operand` -- evaluates the value and yields an `Operand`, +//! suitable for use as an argument to an `Rvalue` +//! - `as_temp` -- evaluates into a temporary; this is similar to `as_operand` +//! except it always returns a fresh place, even for constants +//! - `as_rvalue` -- yields an `Rvalue`, suitable for use in an assignment; +//! as of this writing, never needed outside of the `expr` module itself +//! +//! Sometimes though want the expression's *location*. An example +//! would be during a match statement, or the operand of the `&` +//! operator. In that case, you want `as_place`. This will create a +//! temporary if necessary. +//! +//! Finally, if it's a constant you seek, then call +//! `as_constant`. This creates a `Constant<H>`, but naturally it can +//! only be used on constant expressions and hence is needed only in +//! very limited contexts. +//! +//! ### Implementation notes +//! +//! For any given kind of expression, there is generally one way that +//! can be lowered most naturally. This is specified by the +//! `Category::of` function in the `category` module. For example, a +//! struct expression (or other expression that creates a new value) +//! is typically easiest to write in terms of `as_rvalue` or `into`, +//! whereas a reference to a field is easiest to write in terms of +//! `as_place`. (The exception to this is scope and paren +//! expressions, which have no category.) +//! +//! Therefore, the various functions above make use of one another in +//! a descending fashion. For any given expression, you should pick +//! the most suitable spot to implement it, and then just let the +//! other fns cycle around. The handoff works like this: +//! +//! - `into(place)` -> fallback is to create an rvalue with `as_rvalue` and assign it to `place` +//! - `as_rvalue` -> fallback is to create an Operand with `as_operand` and use `Rvalue::use` +//! - `as_operand` -> either invokes `as_constant` or `as_temp` +//! - `as_constant` -> (no fallback) +//! - `as_temp` -> creates a temporary and either calls `as_place` or `into` +//! - `as_place` -> for rvalues, falls back to `as_temp` and returns that +//! +//! As you can see, there is a cycle where `into` can (in theory) fallback to `as_temp` +//! which can fallback to `into`. So if one of the `ExprKind` variants is not, in fact, +//! implemented in the category where it is supposed to be, there will be a problem. +//! +//! Of those fallbacks, the most interesting one is `into`, because +//! it discriminates based on the category of the expression. This is +//! basically the point where the "by value" operations are bridged +//! over to the "by reference" mode (`as_place`). + +pub(crate) mod as_constant; +mod as_operand; +pub(crate) mod as_place; +mod as_rvalue; +mod as_temp; +pub(crate) mod category; +mod into; +mod stmt; diff --git a/compiler/rustc_mir_build/src/builder/expr/stmt.rs b/compiler/rustc_mir_build/src/builder/expr/stmt.rs new file mode 100644 index 00000000000..4ae3536d9c2 --- /dev/null +++ b/compiler/rustc_mir_build/src/builder/expr/stmt.rs @@ -0,0 +1,196 @@ +use rustc_middle::middle::region; +use rustc_middle::mir::*; +use rustc_middle::span_bug; +use rustc_middle::thir::*; +use rustc_span::source_map::Spanned; +use tracing::debug; + +use crate::builder::scope::BreakableTarget; +use crate::builder::{BlockAnd, BlockAndExtension, BlockFrame, Builder}; + +impl<'a, 'tcx> Builder<'a, 'tcx> { + /// Builds a block of MIR statements to evaluate the THIR `expr`. + /// + /// The `statement_scope` is used if a statement temporary must be dropped. + pub(crate) fn stmt_expr( + &mut self, + mut block: BasicBlock, + expr_id: ExprId, + statement_scope: Option<region::Scope>, + ) -> BlockAnd<()> { + let this = self; + let expr = &this.thir[expr_id]; + let expr_span = expr.span; + let source_info = this.source_info(expr.span); + // Handle a number of expressions that don't need a destination at all. This + // avoids needing a mountain of temporary `()` variables. + match expr.kind { + ExprKind::Scope { region_scope, lint_level, value } => { + this.in_scope((region_scope, source_info), lint_level, |this| { + this.stmt_expr(block, value, statement_scope) + }) + } + ExprKind::Assign { lhs, rhs } => { + let lhs_expr = &this.thir[lhs]; + + // Note: we evaluate assignments right-to-left. This + // is better for borrowck interaction with overloaded + // operators like x[j] = x[i]. + + debug!("stmt_expr Assign block_context.push(SubExpr) : {:?}", expr); + this.block_context.push(BlockFrame::SubExpr); + + // Generate better code for things that don't need to be + // dropped. + if lhs_expr.ty.needs_drop(this.tcx, this.typing_env()) { + let rhs = unpack!(block = this.as_local_rvalue(block, rhs)); + let lhs = unpack!(block = this.as_place(block, lhs)); + block = + this.build_drop_and_replace(block, lhs_expr.span, lhs, rhs).into_block(); + } else { + let rhs = unpack!(block = this.as_local_rvalue(block, rhs)); + let lhs = unpack!(block = this.as_place(block, lhs)); + this.cfg.push_assign(block, source_info, lhs, rhs); + } + + this.block_context.pop(); + block.unit() + } + ExprKind::AssignOp { op, lhs, rhs } => { + // FIXME(#28160) there is an interesting semantics + // question raised here -- should we "freeze" the + // value of the lhs here? I'm inclined to think not, + // since it seems closer to the semantics of the + // overloaded version, which takes `&mut self`. This + // only affects weird things like `x += {x += 1; x}` + // -- is that equal to `x + (x + 1)` or `2*(x+1)`? + + let lhs_ty = this.thir[lhs].ty; + + debug!("stmt_expr AssignOp block_context.push(SubExpr) : {:?}", expr); + this.block_context.push(BlockFrame::SubExpr); + + // As above, RTL. + let rhs = unpack!(block = this.as_local_operand(block, rhs)); + let lhs = unpack!(block = this.as_place(block, lhs)); + + // we don't have to drop prior contents or anything + // because AssignOp is only legal for Copy types + // (overloaded ops should be desugared into a call). + let result = unpack!( + block = + this.build_binary_op(block, op, expr_span, lhs_ty, Operand::Copy(lhs), rhs) + ); + this.cfg.push_assign(block, source_info, lhs, result); + + this.block_context.pop(); + block.unit() + } + ExprKind::Continue { label } => { + this.break_scope(block, None, BreakableTarget::Continue(label), source_info) + } + ExprKind::Break { label, value } => { + this.break_scope(block, value, BreakableTarget::Break(label), source_info) + } + ExprKind::Return { value } => { + this.break_scope(block, value, BreakableTarget::Return, source_info) + } + ExprKind::Become { value } => { + let v = &this.thir[value]; + let ExprKind::Scope { value, lint_level, region_scope } = v.kind else { + span_bug!(v.span, "`thir_check_tail_calls` should have disallowed this {v:?}") + }; + + let v = &this.thir[value]; + let ExprKind::Call { ref args, fun, fn_span, .. } = v.kind else { + span_bug!(v.span, "`thir_check_tail_calls` should have disallowed this {v:?}") + }; + + this.in_scope((region_scope, source_info), lint_level, |this| { + let fun = unpack!(block = this.as_local_operand(block, fun)); + let args: Box<[_]> = args + .into_iter() + .copied() + .map(|arg| Spanned { + node: unpack!(block = this.as_local_call_operand(block, arg)), + span: this.thir.exprs[arg].span, + }) + .collect(); + + this.record_operands_moved(&args); + + debug!("expr_into_dest: fn_span={:?}", fn_span); + + unpack!(block = this.break_for_tail_call(block, &args, source_info)); + + this.cfg.terminate(block, source_info, TerminatorKind::TailCall { + func: fun, + args, + fn_span, + }); + + this.cfg.start_new_block().unit() + }) + } + _ => { + assert!( + statement_scope.is_some(), + "Should not be calling `stmt_expr` on a general expression \ + without a statement scope", + ); + + // Issue #54382: When creating temp for the value of + // expression like: + // + // `{ side_effects(); { let l = stuff(); the_value } }` + // + // it is usually better to focus on `the_value` rather + // than the entirety of block(s) surrounding it. + let adjusted_span = if let ExprKind::Block { block } = expr.kind + && let Some(tail_ex) = this.thir[block].expr + { + let mut expr = &this.thir[tail_ex]; + loop { + match expr.kind { + ExprKind::Block { block } + if let Some(nested_expr) = this.thir[block].expr => + { + expr = &this.thir[nested_expr]; + } + ExprKind::Scope { value: nested_expr, .. } => { + expr = &this.thir[nested_expr]; + } + _ => break, + } + } + this.block_context.push(BlockFrame::TailExpr { + tail_result_is_ignored: true, + span: expr.span, + }); + Some(expr.span) + } else { + None + }; + + let temp = unpack!( + block = this.as_temp( + block, + TempLifetime { + temp_lifetime: statement_scope, + backwards_incompatible: None + }, + expr_id, + Mutability::Not + ) + ); + + if let Some(span) = adjusted_span { + this.local_decls[temp].source_info.span = span; + this.block_context.pop(); + } + + block.unit() + } + } + } +} |
