diff options
Diffstat (limited to 'compiler/rustc_mir_transform/src')
31 files changed, 673 insertions, 662 deletions
diff --git a/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs b/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs index 5bd6fdcf485..35a21a2a834 100644 --- a/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs +++ b/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs @@ -3,6 +3,7 @@ use rustc_ast::InlineAsmOptions; use rustc_middle::mir::*; use rustc_middle::span_bug; use rustc_middle::ty::{self, TyCtxt, layout}; +use rustc_span::sym; use rustc_target::spec::PanicStrategy; /// A pass that runs which is targeted at ensuring that codegen guarantees about @@ -33,6 +34,19 @@ impl<'tcx> crate::MirPass<'tcx> for AbortUnwindingCalls { return; } + // Represent whether this compilation target fundamentally doesn't + // support unwinding at all at an ABI level. If this the target has no + // support for unwinding then cleanup actions, for example, are all + // unnecessary and can be considered unreachable. + // + // Currently this is only true for wasm targets on panic=abort when the + // `exception-handling` target feature is disabled. In such a + // configuration it's illegal to emit exception-related instructions so + // it's not possible to unwind. + let target_supports_unwinding = !(tcx.sess.target.is_like_wasm + && tcx.sess.panic_strategy() == PanicStrategy::Abort + && !tcx.asm_target_features(def_id).contains(&sym::exception_handling)); + // Here we test for this function itself whether its ABI allows // unwinding or not. let body_ty = tcx.type_of(def_id).skip_binder(); @@ -54,12 +68,18 @@ impl<'tcx> crate::MirPass<'tcx> for AbortUnwindingCalls { let Some(terminator) = &mut block.terminator else { continue }; let span = terminator.source_info.span; - // If we see an `UnwindResume` terminator inside a function that cannot unwind, we need - // to replace it with `UnwindTerminate`. - if let TerminatorKind::UnwindResume = &terminator.kind - && !body_can_unwind - { - terminator.kind = TerminatorKind::UnwindTerminate(UnwindTerminateReason::Abi); + // If we see an `UnwindResume` terminator inside a function then: + // + // * If the target doesn't support unwinding at all, then this is an + // unreachable block. + // * If the body cannot unwind, we need to replace it with + // `UnwindTerminate`. + if let TerminatorKind::UnwindResume = &terminator.kind { + if !target_supports_unwinding { + terminator.kind = TerminatorKind::Unreachable; + } else if !body_can_unwind { + terminator.kind = TerminatorKind::UnwindTerminate(UnwindTerminateReason::Abi); + } } if block.is_cleanup { @@ -93,8 +113,9 @@ impl<'tcx> crate::MirPass<'tcx> for AbortUnwindingCalls { _ => continue, }; - if !call_can_unwind { - // If this function call can't unwind, then there's no need for it + if !call_can_unwind || !target_supports_unwinding { + // If this function call can't unwind, or if the target doesn't + // support unwinding at all, then there's no need for it // to have a landing pad. This means that we can remove any cleanup // registered for it (and turn it into `UnwindAction::Unreachable`). let cleanup = block.terminator_mut().unwind_mut().unwrap(); diff --git a/compiler/rustc_mir_transform/src/add_subtyping_projections.rs b/compiler/rustc_mir_transform/src/add_subtyping_projections.rs index be4f84d64d0..a6a60fddf90 100644 --- a/compiler/rustc_mir_transform/src/add_subtyping_projections.rs +++ b/compiler/rustc_mir_transform/src/add_subtyping_projections.rs @@ -40,8 +40,7 @@ impl<'a, 'tcx> MutVisitor<'tcx> for SubTypeChecker<'a, 'tcx> { .new_temp(rval_ty, self.local_decls[place.as_ref().local].source_info.span); let new_place = Place::from(temp); self.patcher.add_assign(location, new_place, rvalue.clone()); - let subtyped = new_place.project_deeper(&[ProjectionElem::Subtype(place_ty)], self.tcx); - *rvalue = Rvalue::Use(Operand::Move(subtyped)); + *rvalue = Rvalue::Cast(CastKind::Subtype, Operand::Move(new_place), place_ty); } } } diff --git a/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs b/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs index 4be67b873f7..b0bf7f484be 100644 --- a/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs +++ b/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs @@ -36,7 +36,9 @@ impl<'tcx> crate::MirPass<'tcx> for CleanupPostBorrowck { CoverageKind::BlockMarker { .. } | CoverageKind::SpanMarker { .. }, ) | StatementKind::FakeRead(..) - | StatementKind::BackwardIncompatibleDropHint { .. } => statement.make_nop(), + | StatementKind::BackwardIncompatibleDropHint { .. } => { + statement.make_nop(true) + } StatementKind::Assign(box ( _, Rvalue::Cast( diff --git a/compiler/rustc_mir_transform/src/copy_prop.rs b/compiler/rustc_mir_transform/src/copy_prop.rs index cddeefca681..f0bc286a940 100644 --- a/compiler/rustc_mir_transform/src/copy_prop.rs +++ b/compiler/rustc_mir_transform/src/copy_prop.rs @@ -138,7 +138,7 @@ impl<'tcx> MutVisitor<'tcx> for Replacer<'_, 'tcx> { if let StatementKind::StorageLive(l) | StatementKind::StorageDead(l) = stmt.kind && self.storage_to_remove.contains(l) { - stmt.make_nop(); + stmt.make_nop(true); return; } @@ -150,7 +150,7 @@ impl<'tcx> MutVisitor<'tcx> for Replacer<'_, 'tcx> { *rhs && lhs == rhs { - stmt.make_nop(); + stmt.make_nop(true); } } } diff --git a/compiler/rustc_mir_transform/src/coroutine.rs b/compiler/rustc_mir_transform/src/coroutine.rs index 4603c695ded..814eded910d 100644 --- a/compiler/rustc_mir_transform/src/coroutine.rs +++ b/compiler/rustc_mir_transform/src/coroutine.rs @@ -86,7 +86,6 @@ use rustc_span::def_id::{DefId, LocalDefId}; use rustc_span::source_map::dummy_spanned; use rustc_span::symbol::sym; use rustc_span::{DUMMY_SP, Span}; -use rustc_target::spec::PanicStrategy; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::infer::TyCtxtInferExt as _; use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode, ObligationCtxt}; @@ -412,7 +411,7 @@ impl<'tcx> MutVisitor<'tcx> for TransformVisitor<'tcx> { if let StatementKind::StorageLive(l) | StatementKind::StorageDead(l) = s.kind && self.remap.contains(l) { - s.make_nop(); + s.make_nop(true); } } @@ -1149,7 +1148,7 @@ fn can_return<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, typing_env: ty::Typing fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool { // Nothing can unwind when landing pads are off. - if tcx.sess.panic_strategy() == PanicStrategy::Abort { + if !tcx.sess.panic_strategy().unwinds() { return false; } @@ -1340,14 +1339,13 @@ fn create_cases<'tcx>( } } - if operation == Operation::Resume { + if operation == Operation::Resume && point.resume_arg != CTX_ARG.into() { // Move the resume argument to the destination place of the `Yield` terminator - let resume_arg = CTX_ARG; statements.push(Statement::new( source_info, StatementKind::Assign(Box::new(( point.resume_arg, - Rvalue::Use(Operand::Move(resume_arg.into())), + Rvalue::Use(Operand::Move(CTX_ARG.into())), ))), )); } @@ -1439,7 +1437,10 @@ fn check_field_tys_sized<'tcx>( } impl<'tcx> crate::MirPass<'tcx> for StateTransform { + #[instrument(level = "debug", skip(self, tcx, body), ret)] fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + debug!(def_id = ?body.source.def_id()); + let Some(old_yield_ty) = body.yield_ty() else { // This only applies to coroutines return; @@ -1518,31 +1519,7 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform { cleanup_async_drops(body); } - // We also replace the resume argument and insert an `Assign`. - // This is needed because the resume argument `_2` might be live across a `yield`, in which - // case there is no `Assign` to it that the transform can turn into a store to the coroutine - // state. After the yield the slot in the coroutine state would then be uninitialized. - let resume_local = CTX_ARG; - let resume_ty = body.local_decls[resume_local].ty; - let old_resume_local = replace_local(resume_local, resume_ty, body, tcx); - - // When first entering the coroutine, move the resume argument into its old local - // (which is now a generator interior). - let source_info = SourceInfo::outermost(body.span); - let stmts = &mut body.basic_blocks_mut()[START_BLOCK].statements; - stmts.insert( - 0, - Statement::new( - source_info, - StatementKind::Assign(Box::new(( - old_resume_local.into(), - Rvalue::Use(Operand::Move(resume_local.into())), - ))), - ), - ); - let always_live_locals = always_storage_live_locals(body); - let movable = coroutine_kind.movability() == hir::Movability::Movable; let liveness_info = locals_live_across_suspend_points(tcx, body, &always_live_locals, movable); @@ -1583,6 +1560,21 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform { }; transform.visit_body(body); + // MIR parameters are not explicitly assigned-to when entering the MIR body. + // If we want to save their values inside the coroutine state, we need to do so explicitly. + let source_info = SourceInfo::outermost(body.span); + let args_iter = body.args_iter(); + body.basic_blocks.as_mut()[START_BLOCK].statements.splice( + 0..0, + args_iter.filter_map(|local| { + let (ty, variant_index, idx) = transform.remap[local]?; + let lhs = transform.make_field(variant_index, idx, ty); + let rhs = Rvalue::Use(Operand::Move(local.into())); + let assign = StatementKind::Assign(Box::new((lhs, rhs))); + Some(Statement::new(source_info, assign)) + }), + ); + // Update our MIR struct to reflect the changes we've made body.arg_count = 2; // self, resume arg body.spread_arg = None; @@ -1891,7 +1883,7 @@ fn check_must_not_suspend_ty<'tcx>( } has_emitted } - ty::Dynamic(binder, _, _) => { + ty::Dynamic(binder, _) => { let mut has_emitted = false; for predicate in binder.iter() { if let ty::ExistentialPredicate::Trait(ref trait_ref) = predicate.skip_binder() { diff --git a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs index fe53de31f75..e970f7ff81a 100644 --- a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs +++ b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs @@ -412,18 +412,6 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> { state: &mut State<FlatSet<Scalar>>, ) -> ValueOrPlace<FlatSet<Scalar>> { let val = match rvalue { - Rvalue::Len(place) => { - let place_ty = place.ty(self.local_decls, self.tcx); - if let ty::Array(_, len) = place_ty.ty.kind() { - Const::Ty(self.tcx.types.usize, *len) - .try_eval_scalar(self.tcx, self.typing_env) - .map_or(FlatSet::Top, FlatSet::Elem) - } else if let [ProjectionElem::Deref] = place.projection[..] { - state.get_len(place.local.into(), &self.map) - } else { - FlatSet::Top - } - } Rvalue::Cast(CastKind::IntToInt | CastKind::IntToFloat, operand, ty) => { let Ok(layout) = self.tcx.layout_of(self.typing_env.as_query_input(*ty)) else { return ValueOrPlace::Value(FlatSet::Top); @@ -452,7 +440,7 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> { FlatSet::Top => FlatSet::Top, } } - Rvalue::Cast(CastKind::Transmute, operand, _) => { + Rvalue::Cast(CastKind::Transmute | CastKind::Subtype, operand, _) => { match self.eval_operand(operand, state) { FlatSet::Elem(op) => self.wrap_immediate(*op), FlatSet::Bottom => FlatSet::Bottom, @@ -465,22 +453,30 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> { let (val, _overflow) = self.binary_op(state, *op, left, right); val } - Rvalue::UnaryOp(op, operand) => match self.eval_operand(operand, state) { - FlatSet::Elem(value) => self - .ecx - .unary_op(*op, &value) - .discard_err() - .map_or(FlatSet::Top, |val| self.wrap_immediate(*val)), - FlatSet::Bottom => FlatSet::Bottom, - FlatSet::Top => FlatSet::Top, - }, + Rvalue::UnaryOp(op, operand) => { + if let UnOp::PtrMetadata = op + && let Some(place) = operand.place() + && let Some(len) = self.map.find_len(place.as_ref()) + { + return ValueOrPlace::Place(len); + } + match self.eval_operand(operand, state) { + FlatSet::Elem(value) => self + .ecx + .unary_op(*op, &value) + .discard_err() + .map_or(FlatSet::Top, |val| self.wrap_immediate(*val)), + FlatSet::Bottom => FlatSet::Bottom, + FlatSet::Top => FlatSet::Top, + } + } Rvalue::NullaryOp(null_op, ty) => { let Ok(layout) = self.tcx.layout_of(self.typing_env.as_query_input(*ty)) else { return ValueOrPlace::Value(FlatSet::Top); }; let val = match null_op { NullOp::SizeOf if layout.is_sized() => layout.size.bytes(), - NullOp::AlignOf if layout.is_sized() => layout.align.abi.bytes(), + NullOp::AlignOf if layout.is_sized() => layout.align.bytes(), NullOp::OffsetOf(fields) => self .ecx .tcx diff --git a/compiler/rustc_mir_transform/src/dead_store_elimination.rs b/compiler/rustc_mir_transform/src/dead_store_elimination.rs index eea2b0990d7..732c3dcd44a 100644 --- a/compiler/rustc_mir_transform/src/dead_store_elimination.rs +++ b/compiler/rustc_mir_transform/src/dead_store_elimination.rs @@ -22,21 +22,22 @@ use rustc_mir_dataflow::impls::{ LivenessTransferFunction, MaybeTransitiveLiveLocals, borrowed_locals, }; +use crate::simplify::UsedInStmtLocals; use crate::util::is_within_packed; /// Performs the optimization on the body /// /// The `borrowed` set must be a `DenseBitSet` of all the locals that are ever borrowed in this /// body. It can be generated via the [`borrowed_locals`] function. -fn eliminate<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { +/// Returns true if any instruction is eliminated. +fn eliminate<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool { let borrowed_locals = borrowed_locals(body); // If the user requests complete debuginfo, mark the locals that appear in it as live, so // we don't remove assignments to them. - let mut always_live = debuginfo_locals(body); - always_live.union(&borrowed_locals); + let debuginfo_locals = debuginfo_locals(body); - let mut live = MaybeTransitiveLiveLocals::new(&always_live) + let mut live = MaybeTransitiveLiveLocals::new(&borrowed_locals, &debuginfo_locals) .iterate_to_fixpoint(tcx, body, None) .into_results_cursor(body); @@ -75,47 +76,36 @@ fn eliminate<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { } for (statement_index, statement) in bb_data.statements.iter().enumerate().rev() { - let loc = Location { block: bb, statement_index }; - if let StatementKind::Assign(assign) = &statement.kind { - if !assign.1.is_safe_to_remove() { - continue; - } - } - match &statement.kind { - StatementKind::Assign(box (place, _)) - | StatementKind::SetDiscriminant { place: box place, .. } - | StatementKind::Deinit(box place) => { - if !place.is_indirect() && !always_live.contains(place.local) { - live.seek_before_primary_effect(loc); - if !live.get().contains(place.local) { - patch.push(loc); - } - } - } - StatementKind::Retag(_, _) - | StatementKind::StorageLive(_) - | StatementKind::StorageDead(_) - | StatementKind::Coverage(_) - | StatementKind::Intrinsic(_) - | StatementKind::ConstEvalCounter - | StatementKind::PlaceMention(_) - | StatementKind::BackwardIncompatibleDropHint { .. } - | StatementKind::Nop => {} - - StatementKind::FakeRead(_) | StatementKind::AscribeUserType(_, _) => { - bug!("{:?} not found in this MIR phase!", statement.kind) + if let Some(destination) = MaybeTransitiveLiveLocals::can_be_removed_if_dead( + &statement.kind, + &borrowed_locals, + &debuginfo_locals, + ) { + let loc = Location { block: bb, statement_index }; + live.seek_before_primary_effect(loc); + if !live.get().contains(destination.local) { + let drop_debuginfo = !debuginfo_locals.contains(destination.local); + // When eliminating a dead statement, we need to address + // the debug information for that statement. + assert!( + drop_debuginfo || statement.kind.as_debuginfo().is_some(), + "don't know how to retain the debug information for {:?}", + statement.kind + ); + patch.push((loc, drop_debuginfo)); } } } } if patch.is_empty() && call_operands_to_move.is_empty() { - return; + return false; } + let eliminated = !patch.is_empty(); let bbs = body.basic_blocks.as_mut_preserves_cfg(); - for Location { block, statement_index } in patch { - bbs[block].statements[statement_index].make_nop(); + for (Location { block, statement_index }, drop_debuginfo) in patch { + bbs[block].statements[statement_index].make_nop(drop_debuginfo); } for (block, argument_index) in call_operands_to_move { let TerminatorKind::Call { ref mut args, .. } = bbs[block].terminator_mut().kind else { @@ -125,6 +115,8 @@ fn eliminate<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let Operand::Copy(place) = *arg else { bug!() }; *arg = Operand::Move(place); } + + eliminated } pub(super) enum DeadStoreElimination { @@ -145,7 +137,12 @@ impl<'tcx> crate::MirPass<'tcx> for DeadStoreElimination { } fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - eliminate(tcx, body); + if eliminate(tcx, body) { + UsedInStmtLocals::new(body).remove_unused_storage_annotations(body); + for data in body.basic_blocks.as_mut_preserves_cfg() { + data.strip_nops(); + } + } } fn is_required(&self) -> bool { diff --git a/compiler/rustc_mir_transform/src/dest_prop.rs b/compiler/rustc_mir_transform/src/dest_prop.rs index 9ba2d274691..1f38433fa5a 100644 --- a/compiler/rustc_mir_transform/src/dest_prop.rs +++ b/compiler/rustc_mir_transform/src/dest_prop.rs @@ -141,7 +141,7 @@ use rustc_data_structures::union_find::UnionFind; use rustc_index::bit_set::DenseBitSet; use rustc_index::interval::SparseIntervalMatrix; use rustc_index::{IndexVec, newtype_index}; -use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; +use rustc_middle::mir::visit::{MutVisitor, PlaceContext, VisitPlacesWith, Visitor}; use rustc_middle::mir::*; use rustc_middle::ty::TyCtxt; use rustc_mir_dataflow::impls::{DefUse, MaybeLiveLocals}; @@ -153,15 +153,7 @@ pub(super) struct DestinationPropagation; impl<'tcx> crate::MirPass<'tcx> for DestinationPropagation { fn is_enabled(&self, sess: &rustc_session::Session) -> bool { - // For now, only run at MIR opt level 3. Two things need to be changed before this can be - // turned on by default: - // 1. Because of the overeager removal of storage statements, this can cause stack space - // regressions. This opt is not the place to fix this though, it's a more general - // problem in MIR. - // 2. Despite being an overall perf improvement, this still causes a 30% regression in - // keccak. We can temporarily fix this by bounding function size, but in the long term - // we should fix this by being smarter about invalidating analysis results. - sess.mir_opt_level() >= 3 + sess.mir_opt_level() >= 2 } #[tracing::instrument(level = "trace", skip(self, tcx, body))] @@ -284,7 +276,7 @@ impl<'tcx> MutVisitor<'tcx> for Merger<'tcx> { StatementKind::StorageDead(local) | StatementKind::StorageLive(local) if self.merged_locals.contains(*local) => { - statement.make_nop(); + statement.make_nop(true); return; } _ => (), @@ -299,7 +291,7 @@ impl<'tcx> MutVisitor<'tcx> for Merger<'tcx> { // (this includes the original statement we wanted to eliminate). if dest == place { debug!("{:?} turned into self-assignment, deleting", location); - statement.make_nop(); + statement.make_nop(true); } } _ => {} @@ -511,22 +503,6 @@ impl TwoStepIndex { } } -struct VisitPlacesWith<F>(F); - -impl<'tcx, F> Visitor<'tcx> for VisitPlacesWith<F> -where - F: FnMut(Place<'tcx>, PlaceContext), -{ - fn visit_local(&mut self, local: Local, ctxt: PlaceContext, _: Location) { - (self.0)(local.into(), ctxt); - } - - fn visit_place(&mut self, place: &Place<'tcx>, ctxt: PlaceContext, location: Location) { - (self.0)(*place, ctxt); - self.visit_projection(place.as_ref(), ctxt, location); - } -} - /// Add points depending on the result of the given dataflow analysis. fn save_as_intervals<'tcx>( elements: &DenseLocationMap, @@ -567,13 +543,15 @@ fn save_as_intervals<'tcx>( // the written-to locals as live in the second half of the statement. // We also ensure that operands read by terminators conflict with writes by that terminator. // For instance a function call may read args after having written to the destination. - VisitPlacesWith(|place, ctxt| match DefUse::for_place(place, ctxt) { - DefUse::Def | DefUse::Use | DefUse::PartialWrite => { - if let Some(relevant) = relevant.shrink[place.local] { - values.insert(relevant, twostep); + VisitPlacesWith(|place: Place<'tcx>, ctxt| { + if let Some(relevant) = relevant.shrink[place.local] { + match DefUse::for_place(place, ctxt) { + DefUse::Def | DefUse::Use | DefUse::PartialWrite => { + values.insert(relevant, twostep); + } + DefUse::NonUse => {} } } - DefUse::NonUse => {} }) .visit_terminator(term, loc); @@ -588,15 +566,32 @@ fn save_as_intervals<'tcx>( twostep = TwoStepIndex::from_u32(twostep.as_u32() + 1); debug_assert_eq!(twostep, two_step_loc(loc, Effect::After)); append_at(&mut values, &state, twostep); - // Ensure we have a non-zero live range even for dead stores. This is done by marking - // all the written-to locals as live in the second half of the statement. - VisitPlacesWith(|place, ctxt| match DefUse::for_place(place, ctxt) { - DefUse::Def | DefUse::PartialWrite => { - if let Some(relevant) = relevant.shrink[place.local] { - values.insert(relevant, twostep); + // Like terminators, ensure we have a non-zero live range even for dead stores. + // Some rvalues interleave reads and writes, for instance `Rvalue::Aggregate`, see + // https://github.com/rust-lang/rust/issues/146383. By precaution, treat statements + // as behaving so by default. + // We make an exception for simple assignments `_a.stuff = {copy|move} _b.stuff`, + // as marking `_b` live here would prevent unification. + let is_simple_assignment = match stmt.kind { + StatementKind::Assign(box ( + lhs, + Rvalue::CopyForDeref(rhs) + | Rvalue::Use(Operand::Copy(rhs) | Operand::Move(rhs)), + )) => lhs.projection == rhs.projection, + _ => false, + }; + VisitPlacesWith(|place: Place<'tcx>, ctxt| { + if let Some(relevant) = relevant.shrink[place.local] { + match DefUse::for_place(place, ctxt) { + DefUse::Def | DefUse::PartialWrite => { + values.insert(relevant, twostep); + } + DefUse::Use if !is_simple_assignment => { + values.insert(relevant, twostep); + } + DefUse::Use | DefUse::NonUse => {} } } - DefUse::Use | DefUse::NonUse => {} }) .visit_statement(stmt, loc); diff --git a/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs b/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs index abbff1c48dd..7c66783548e 100644 --- a/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs +++ b/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs @@ -101,12 +101,15 @@ fn has_ffi_unwind_calls(tcx: TyCtxt<'_>, local_def_id: LocalDefId) -> bool { } fn required_panic_strategy(tcx: TyCtxt<'_>, _: LocalCrate) -> Option<PanicStrategy> { + let local_strategy = tcx.sess.panic_strategy(); + if tcx.is_panic_runtime(LOCAL_CRATE) { - return Some(tcx.sess.panic_strategy()); + return Some(local_strategy); } - if tcx.sess.panic_strategy() == PanicStrategy::Abort { - return Some(PanicStrategy::Abort); + match local_strategy { + PanicStrategy::Abort | PanicStrategy::ImmediateAbort => return Some(local_strategy), + _ => {} } for def_id in tcx.hir_body_owners() { diff --git a/compiler/rustc_mir_transform/src/gvn.rs b/compiler/rustc_mir_transform/src/gvn.rs index bf6aa800d20..3ff8dc6dbb3 100644 --- a/compiler/rustc_mir_transform/src/gvn.rs +++ b/compiler/rustc_mir_transform/src/gvn.rs @@ -85,8 +85,10 @@ //! that contain `AllocId`s. use std::borrow::Cow; +use std::hash::{Hash, Hasher}; use either::Either; +use hashbrown::hash_table::{Entry, HashTable}; use itertools::Itertools as _; use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx}; use rustc_const_eval::const_eval::DummyMachine; @@ -94,7 +96,7 @@ use rustc_const_eval::interpret::{ ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar, intern_const_alloc_for_constprop, }; -use rustc_data_structures::fx::{FxIndexSet, MutableValues}; +use rustc_data_structures::fx::FxHasher; use rustc_data_structures::graph::dominators::Dominators; use rustc_hir::def::DefKind; use rustc_index::bit_set::DenseBitSet; @@ -152,9 +154,29 @@ impl<'tcx> crate::MirPass<'tcx> for GVN { } newtype_index! { + /// This represents a `Value` in the symbolic execution. + #[debug_format = "_v{}"] struct VnIndex {} } +/// Marker type to forbid hashing and comparing opaque values. +/// This struct should only be constructed by `ValueSet::insert_unique` to ensure we use that +/// method to create non-unifiable values. It will ICE if used in `ValueSet::insert`. +#[derive(Copy, Clone, Debug, Eq)] +struct VnOpaque; +impl PartialEq for VnOpaque { + fn eq(&self, _: &VnOpaque) -> bool { + // ICE if we try to compare unique values + unreachable!() + } +} +impl Hash for VnOpaque { + fn hash<T: Hasher>(&self, _: &mut T) { + // ICE if we try to hash unique values + unreachable!() + } +} + #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] enum AddressKind { Ref(BorrowKind), @@ -166,15 +188,17 @@ enum Value<'tcx> { // Root values. /// Used to represent values we know nothing about. /// The `usize` is a counter incremented by `new_opaque`. - Opaque(usize), + Opaque(VnOpaque), /// Evaluated or unevaluated constant value. Constant { value: Const<'tcx>, /// Some constants do not have a deterministic value. To avoid merging two instances of the /// same `Const`, we assign them an additional integer index. - // `disambiguator` is 0 iff the constant is deterministic. - disambiguator: usize, + // `disambiguator` is `None` iff the constant is deterministic. + disambiguator: Option<VnOpaque>, }, + + // Aggregates. /// An aggregate value, either tuple/closure/struct/enum. /// This does not contain unions, as we cannot reason with the value. Aggregate(VariantIdx, Vec<VnIndex>), @@ -192,7 +216,7 @@ enum Value<'tcx> { place: Place<'tcx>, kind: AddressKind, /// Give each borrow and pointer a different provenance, so we don't merge them. - provenance: usize, + provenance: VnOpaque, }, // Extractions. @@ -200,8 +224,6 @@ enum Value<'tcx> { Projection(VnIndex, ProjectionElem<VnIndex, ()>), /// Discriminant of the given value. Discriminant(VnIndex), - /// Length of an array or slice. - Len(VnIndex), // Operations. NullaryOp(NullOp<'tcx>, Ty<'tcx>), @@ -213,6 +235,107 @@ enum Value<'tcx> { }, } +/// Stores and deduplicates pairs of `(Value, Ty)` into in `VnIndex` numbered values. +/// +/// This data structure is mostly a partial reimplementation of `FxIndexMap<VnIndex, (Value, Ty)>`. +/// We do not use a regular `FxIndexMap` to skip hashing values that are unique by construction, +/// like opaque values, address with provenance and non-deterministic constants. +struct ValueSet<'tcx> { + indices: HashTable<VnIndex>, + hashes: IndexVec<VnIndex, u64>, + values: IndexVec<VnIndex, Value<'tcx>>, + types: IndexVec<VnIndex, Ty<'tcx>>, +} + +impl<'tcx> ValueSet<'tcx> { + fn new(num_values: usize) -> ValueSet<'tcx> { + ValueSet { + indices: HashTable::with_capacity(num_values), + hashes: IndexVec::with_capacity(num_values), + values: IndexVec::with_capacity(num_values), + types: IndexVec::with_capacity(num_values), + } + } + + /// Insert a `(Value, Ty)` pair without hashing or deduplication. + /// This always creates a new `VnIndex`. + #[inline] + fn insert_unique( + &mut self, + ty: Ty<'tcx>, + value: impl FnOnce(VnOpaque) -> Value<'tcx>, + ) -> VnIndex { + let value = value(VnOpaque); + + debug_assert!(match value { + Value::Opaque(_) | Value::Address { .. } => true, + Value::Constant { disambiguator, .. } => disambiguator.is_some(), + _ => false, + }); + + let index = self.hashes.push(0); + let _index = self.types.push(ty); + debug_assert_eq!(index, _index); + let _index = self.values.push(value); + debug_assert_eq!(index, _index); + index + } + + /// Insert a `(Value, Ty)` pair to be deduplicated. + /// Returns `true` as second tuple field if this value did not exist previously. + #[allow(rustc::pass_by_value)] // closures take `&VnIndex` + fn insert(&mut self, ty: Ty<'tcx>, value: Value<'tcx>) -> (VnIndex, bool) { + debug_assert!(match value { + Value::Opaque(_) | Value::Address { .. } => false, + Value::Constant { disambiguator, .. } => disambiguator.is_none(), + _ => true, + }); + + let hash: u64 = { + let mut h = FxHasher::default(); + value.hash(&mut h); + ty.hash(&mut h); + h.finish() + }; + + let eq = |index: &VnIndex| self.values[*index] == value && self.types[*index] == ty; + let hasher = |index: &VnIndex| self.hashes[*index]; + match self.indices.entry(hash, eq, hasher) { + Entry::Occupied(entry) => { + let index = *entry.get(); + (index, false) + } + Entry::Vacant(entry) => { + let index = self.hashes.push(hash); + entry.insert(index); + let _index = self.values.push(value); + debug_assert_eq!(index, _index); + let _index = self.types.push(ty); + debug_assert_eq!(index, _index); + (index, true) + } + } + } + + /// Return the `Value` associated with the given `VnIndex`. + #[inline] + fn value(&self, index: VnIndex) -> &Value<'tcx> { + &self.values[index] + } + + /// Return the type associated with the given `VnIndex`. + #[inline] + fn ty(&self, index: VnIndex) -> Ty<'tcx> { + self.types[index] + } + + /// Replace the value associated with `index` with an opaque value. + #[inline] + fn forget(&mut self, index: VnIndex) { + self.values[index] = Value::Opaque(VnOpaque); + } +} + struct VnState<'body, 'tcx> { tcx: TyCtxt<'tcx>, ecx: InterpCx<'tcx, DummyMachine>, @@ -223,11 +346,9 @@ struct VnState<'body, 'tcx> { /// Locals that are assigned that value. // This vector does not hold all the values of `VnIndex` that we create. rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>, - values: FxIndexSet<(Value<'tcx>, Ty<'tcx>)>, + values: ValueSet<'tcx>, /// Values evaluated as constants if possible. evaluated: IndexVec<VnIndex, Option<OpTy<'tcx>>>, - /// Counter to generate different values. - next_opaque: usize, /// Cache the deref values. derefs: Vec<VnIndex>, ssa: &'body SsaLocals, @@ -258,9 +379,8 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { is_coroutine: body.coroutine.is_some(), locals: IndexVec::from_elem(None, local_decls), rev_locals: IndexVec::with_capacity(num_values), - values: FxIndexSet::with_capacity_and_hasher(num_values, Default::default()), + values: ValueSet::new(num_values), evaluated: IndexVec::with_capacity(num_values), - next_opaque: 1, derefs: Vec::new(), ssa, dominators, @@ -274,8 +394,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { #[instrument(level = "trace", skip(self), ret)] fn insert(&mut self, ty: Ty<'tcx>, value: Value<'tcx>) -> VnIndex { - let (index, new) = self.values.insert_full((value, ty)); - let index = VnIndex::from_usize(index); + let (index, new) = self.values.insert(ty, value); if new { // Grow `evaluated` and `rev_locals` here to amortize the allocations. let evaluated = self.eval_to_const(index); @@ -287,18 +406,16 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { index } - fn next_opaque(&mut self) -> usize { - let next_opaque = self.next_opaque; - self.next_opaque += 1; - next_opaque - } - /// Create a new `Value` for which we have no information at all, except that it is distinct /// from all the others. #[instrument(level = "trace", skip(self), ret)] fn new_opaque(&mut self, ty: Ty<'tcx>) -> VnIndex { - let value = Value::Opaque(self.next_opaque()); - self.insert(ty, value) + let index = self.values.insert_unique(ty, Value::Opaque); + let _index = self.evaluated.push(None); + debug_assert_eq!(index, _index); + let _index = self.rev_locals.push(SmallVec::new()); + debug_assert_eq!(index, _index); + index } /// Create a new `Value::Address` distinct from all the others. @@ -311,18 +428,49 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { } AddressKind::Address(mutbl) => Ty::new_ptr(self.tcx, pty, mutbl.to_mutbl_lossy()), }; - let value = Value::Address { place, kind, provenance: self.next_opaque() }; - self.insert(ty, value) + let index = + self.values.insert_unique(ty, |provenance| Value::Address { place, kind, provenance }); + let evaluated = self.eval_to_const(index); + let _index = self.evaluated.push(evaluated); + debug_assert_eq!(index, _index); + let _index = self.rev_locals.push(SmallVec::new()); + debug_assert_eq!(index, _index); + index + } + + #[instrument(level = "trace", skip(self), ret)] + fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex { + let (index, new) = if value.is_deterministic() { + // The constant is deterministic, no need to disambiguate. + let constant = Value::Constant { value, disambiguator: None }; + self.values.insert(value.ty(), constant) + } else { + // Multiple mentions of this constant will yield different values, + // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`. + let index = self.values.insert_unique(value.ty(), |disambiguator| Value::Constant { + value, + disambiguator: Some(disambiguator), + }); + (index, true) + }; + if new { + let evaluated = self.eval_to_const(index); + let _index = self.evaluated.push(evaluated); + debug_assert_eq!(index, _index); + let _index = self.rev_locals.push(SmallVec::new()); + debug_assert_eq!(index, _index); + } + index } #[inline] fn get(&self, index: VnIndex) -> &Value<'tcx> { - &self.values.get_index(index.as_usize()).unwrap().0 + self.values.value(index) } #[inline] fn ty(&self, index: VnIndex) -> Ty<'tcx> { - self.values.get_index(index.as_usize()).unwrap().1 + self.values.ty(index) } /// Record that `local` is assigned `value`. `local` must be SSA. @@ -333,33 +481,18 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { self.rev_locals[value].push(local); } - fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex { - let disambiguator = if value.is_deterministic() { - // The constant is deterministic, no need to disambiguate. - 0 - } else { - // Multiple mentions of this constant will yield different values, - // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`. - let disambiguator = self.next_opaque(); - // `disambiguator: 0` means deterministic. - debug_assert_ne!(disambiguator, 0); - disambiguator - }; - self.insert(value.ty(), Value::Constant { value, disambiguator }) - } - fn insert_bool(&mut self, flag: bool) -> VnIndex { // Booleans are deterministic. let value = Const::from_bool(self.tcx, flag); debug_assert!(value.is_deterministic()); - self.insert(self.tcx.types.bool, Value::Constant { value, disambiguator: 0 }) + self.insert(self.tcx.types.bool, Value::Constant { value, disambiguator: None }) } fn insert_scalar(&mut self, ty: Ty<'tcx>, scalar: Scalar) -> VnIndex { // Scalars are deterministic. let value = Const::from_scalar(self.tcx, scalar, ty); debug_assert!(value.is_deterministic()); - self.insert(ty, Value::Constant { value, disambiguator: 0 }) + self.insert(ty, Value::Constant { value, disambiguator: None }) } fn insert_tuple(&mut self, ty: Ty<'tcx>, values: Vec<VnIndex>) -> VnIndex { @@ -374,8 +507,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { fn invalidate_derefs(&mut self) { for deref in std::mem::take(&mut self.derefs) { - let opaque = self.next_opaque(); - self.values.get_index_mut2(deref.index()).unwrap().0 = Value::Opaque(opaque); + self.values.forget(deref); } } @@ -477,11 +609,6 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?; discr_value.into() } - Len(slice) => { - let slice = self.evaluated[slice].as_ref()?; - let len = slice.len(&self.ecx).discard_err()?; - ImmTy::from_uint(len, ty).into() - } NullaryOp(null_op, arg_ty) => { let arg_layout = self.ecx.layout_of(arg_ty).ok()?; if let NullOp::SizeOf | NullOp::AlignOf = null_op @@ -491,7 +618,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { } let val = match null_op { NullOp::SizeOf => arg_layout.size.bytes(), - NullOp::AlignOf => arg_layout.align.abi.bytes(), + NullOp::AlignOf => arg_layout.align.bytes(), NullOp::OffsetOf(fields) => self .ecx .tcx @@ -529,7 +656,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { let res = self.ecx.float_to_float_or_int(&value, ty).discard_err()?; res.into() } - CastKind::Transmute => { + CastKind::Transmute | CastKind::Subtype => { let value = self.evaluated[value].as_ref()?; // `offset` for immediates generally only supports projections that match the // type of the immediate. However, as a HACK, we exploit that it can also do @@ -661,7 +788,6 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { ProjectionElem::Subslice { from, to, from_end } } ProjectionElem::OpaqueCast(_) => ProjectionElem::OpaqueCast(()), - ProjectionElem::Subtype(_) => ProjectionElem::Subtype(()), ProjectionElem::UnwrapUnsafeBinder(_) => ProjectionElem::UnwrapUnsafeBinder(()), }; @@ -841,7 +967,6 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { } // Operations. - Rvalue::Len(ref mut place) => return self.simplify_len(place, location), Rvalue::Cast(ref mut kind, ref mut value, to) => { return self.simplify_cast(kind, value, to, location); } @@ -1049,7 +1174,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { if op == UnOp::PtrMetadata { let mut was_updated = false; loop { - match self.get(arg_index) { + arg_index = match self.get(arg_index) { // Pointer casts that preserve metadata, such as // `*const [i32]` <-> `*mut [i32]` <-> `*mut [f32]`. // It's critical that this not eliminate cases like @@ -1061,9 +1186,19 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { Value::Cast { kind: CastKind::PtrToPtr, value: inner } if self.pointers_have_same_metadata(self.ty(*inner), arg_ty) => { - arg_index = *inner; - was_updated = true; - continue; + *inner + } + + // We have an unsizing cast, which assigns the length to wide pointer metadata. + Value::Cast { + kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _), + value: from, + } if let Some(from) = self.ty(*from).builtin_deref(true) + && let ty::Array(_, len) = from.kind() + && let Some(to) = self.ty(arg_index).builtin_deref(true) + && let ty::Slice(..) = to.kind() => + { + return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len))); } // `&mut *p`, `&raw *p`, etc don't change metadata. @@ -1072,18 +1207,16 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { place.as_ref() && let Some(local_index) = self.locals[local] => { - arg_index = local_index; - was_updated = true; - continue; + local_index } - _ => { - if was_updated && let Some(op) = self.try_as_operand(arg_index, location) { - *arg_op = op; - } - break; - } - } + _ => break, + }; + was_updated = true; + } + + if was_updated && let Some(op) = self.try_as_operand(arg_index, location) { + *arg_op = op; } } @@ -1407,39 +1540,6 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { Some(self.insert(to, Value::Cast { kind, value })) } - fn simplify_len(&mut self, place: &mut Place<'tcx>, location: Location) -> Option<VnIndex> { - // Trivial case: we are fetching a statically known length. - let place_ty = place.ty(self.local_decls, self.tcx).ty; - if let ty::Array(_, len) = place_ty.kind() { - return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len))); - } - - let mut inner = self.simplify_place_value(place, location)?; - - // The length information is stored in the wide pointer. - // Reborrowing copies length information from one pointer to the other. - while let Value::Address { place: borrowed, .. } = self.get(inner) - && let [PlaceElem::Deref] = borrowed.projection[..] - && let Some(borrowed) = self.locals[borrowed.local] - { - inner = borrowed; - } - - // We have an unsizing cast, which assigns the length to wide pointer metadata. - if let Value::Cast { kind, value: from } = self.get(inner) - && let CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) = kind - && let Some(from) = self.ty(*from).builtin_deref(true) - && let ty::Array(_, len) = from.kind() - && let Some(to) = self.ty(inner).builtin_deref(true) - && let ty::Slice(..) = to.kind() - { - return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len))); - } - - // Fallback: a symbolic `Len`. - Some(self.insert(self.tcx.types.usize, Value::Len(inner))) - } - fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool { let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx); let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx); @@ -1605,7 +1705,7 @@ impl<'tcx> VnState<'_, 'tcx> { // This was already constant in MIR, do not change it. If the constant is not // deterministic, adding an additional mention of it in MIR will not give the same value as // the former mention. - if let Value::Constant { value, disambiguator: 0 } = *self.get(index) { + if let Value::Constant { value, disambiguator: None } = *self.get(index) { debug_assert!(value.is_deterministic()); return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value }); } @@ -1777,7 +1877,7 @@ impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> { StatementKind::StorageLive(l) | StatementKind::StorageDead(l) if self.reused_locals.contains(l) => { - stmt.make_nop() + stmt.make_nop(true) } _ => self.super_statement(stmt, loc), } diff --git a/compiler/rustc_mir_transform/src/impossible_predicates.rs b/compiler/rustc_mir_transform/src/impossible_predicates.rs index b03518de00a..883ee32bdec 100644 --- a/compiler/rustc_mir_transform/src/impossible_predicates.rs +++ b/compiler/rustc_mir_transform/src/impossible_predicates.rs @@ -28,6 +28,7 @@ use rustc_middle::mir::{Body, START_BLOCK, TerminatorKind}; use rustc_middle::ty::{TyCtxt, TypeFlags, TypeVisitableExt}; +use rustc_span::def_id::DefId; use rustc_trait_selection::traits; use tracing::trace; @@ -35,23 +36,29 @@ use crate::pass_manager::MirPass; pub(crate) struct ImpossiblePredicates; +fn has_impossible_predicates(tcx: TyCtxt<'_>, def_id: DefId) -> bool { + let predicates = tcx.predicates_of(def_id).instantiate_identity(tcx); + tracing::trace!(?predicates); + let predicates = predicates.predicates.into_iter().filter(|p| { + !p.has_type_flags( + // Only consider global clauses to simplify. + TypeFlags::HAS_FREE_LOCAL_NAMES + // Clauses that refer to unevaluated constants as they cause cycles. + | TypeFlags::HAS_CT_PROJECTION, + ) + }); + let predicates: Vec<_> = traits::elaborate(tcx, predicates).collect(); + tracing::trace!(?predicates); + predicates.references_error() || traits::impossible_predicates(tcx, predicates) +} + impl<'tcx> MirPass<'tcx> for ImpossiblePredicates { #[tracing::instrument(level = "trace", skip(self, tcx, body))] fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { tracing::trace!(def_id = ?body.source.def_id()); - let predicates = tcx.predicates_of(body.source.def_id()).instantiate_identity(tcx); - tracing::trace!(?predicates); - let predicates = predicates.predicates.into_iter().filter(|p| { - !p.has_type_flags( - // Only consider global clauses to simplify. - TypeFlags::HAS_FREE_LOCAL_NAMES - // Clauses that refer to unevaluated constants as they cause cycles. - | TypeFlags::HAS_CT_PROJECTION, - ) - }); - let predicates: Vec<_> = traits::elaborate(tcx, predicates).collect(); - tracing::trace!(?predicates); - if predicates.references_error() || traits::impossible_predicates(tcx, predicates) { + let impossible = body.tainted_by_errors.is_some() + || has_impossible_predicates(tcx, body.source.def_id()); + if impossible { trace!("found unsatisfiable predicates"); // Clear the body to only contain a single `unreachable` statement. let bbs = body.basic_blocks.as_mut(); diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs index 3d49eb4e8ef..8593e25d6aa 100644 --- a/compiler/rustc_mir_transform/src/inline.rs +++ b/compiler/rustc_mir_transform/src/inline.rs @@ -21,7 +21,7 @@ use tracing::{debug, instrument, trace, trace_span}; use crate::cost_checker::{CostChecker, is_call_like}; use crate::deref_separator::deref_finder; -use crate::simplify::simplify_cfg; +use crate::simplify::{UsedInStmtLocals, simplify_cfg}; use crate::validate::validate_types; use crate::{check_inline, util}; @@ -935,7 +935,7 @@ fn inline_call<'tcx, I: Inliner<'tcx>>( in_cleanup_block: false, return_block, tcx, - always_live_locals: DenseBitSet::new_filled(callee_body.local_decls.len()), + always_live_locals: UsedInStmtLocals::new(&callee_body).locals, }; // Map all `Local`s, `SourceScope`s and `BasicBlock`s to new ones @@ -995,6 +995,10 @@ fn inline_call<'tcx, I: Inliner<'tcx>>( // people working on rust can build with or without debuginfo while // still getting consistent results from the mir-opt tests. caller_body.var_debug_info.append(&mut callee_body.var_debug_info); + } else { + for bb in callee_body.basic_blocks_mut() { + bb.drop_debuginfo(); + } } caller_body.basic_blocks_mut().append(callee_body.basic_blocks_mut()); diff --git a/compiler/rustc_mir_transform/src/jump_threading.rs b/compiler/rustc_mir_transform/src/jump_threading.rs index f9e642e28eb..68298767e7f 100644 --- a/compiler/rustc_mir_transform/src/jump_threading.rs +++ b/compiler/rustc_mir_transform/src/jump_threading.rs @@ -84,7 +84,7 @@ impl<'tcx> crate::MirPass<'tcx> for JumpThreading { body, arena, map: Map::new(tcx, body, Some(MAX_PLACES)), - loop_headers: loop_headers(body), + maybe_loop_headers: maybe_loop_headers(body), opportunities: Vec::new(), }; @@ -100,7 +100,7 @@ impl<'tcx> crate::MirPass<'tcx> for JumpThreading { // Verify that we do not thread through a loop header. for to in opportunities.iter() { - assert!(to.chain.iter().all(|&block| !finder.loop_headers.contains(block))); + assert!(to.chain.iter().all(|&block| !finder.maybe_loop_headers.contains(block))); } OpportunitySet::new(body, opportunities).apply(body); } @@ -124,7 +124,7 @@ struct TOFinder<'a, 'tcx> { ecx: InterpCx<'tcx, DummyMachine>, body: &'a Body<'tcx>, map: Map<'tcx>, - loop_headers: DenseBitSet<BasicBlock>, + maybe_loop_headers: DenseBitSet<BasicBlock>, /// We use an arena to avoid cloning the slices when cloning `state`. arena: &'a DroplessArena, opportunities: Vec<ThreadingOpportunity>, @@ -190,7 +190,7 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> { #[instrument(level = "trace", skip(self))] fn start_from_switch(&mut self, bb: BasicBlock) { let bbdata = &self.body[bb]; - if bbdata.is_cleanup || self.loop_headers.contains(bb) { + if bbdata.is_cleanup || self.maybe_loop_headers.contains(bb) { return; } let Some((discr, targets)) = bbdata.terminator().kind.as_switch() else { return }; @@ -235,7 +235,7 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> { depth: usize, ) { // Do not thread through loop headers. - if self.loop_headers.contains(bb) { + if self.maybe_loop_headers.contains(bb) { return; } @@ -833,20 +833,28 @@ enum Update { Decr, } -/// Compute the set of loop headers in the given body. We define a loop header as a block which has -/// at least a predecessor which it dominates. This definition is only correct for reducible CFGs. -/// But if the CFG is already irreducible, there is no point in trying much harder. -/// is already irreducible. -fn loop_headers(body: &Body<'_>) -> DenseBitSet<BasicBlock> { - let mut loop_headers = DenseBitSet::new_empty(body.basic_blocks.len()); - let dominators = body.basic_blocks.dominators(); - // Only visit reachable blocks. - for (bb, bbdata) in traversal::preorder(body) { +/// Compute the set of loop headers in the given body. A loop header is usually defined as a block +/// which dominates one of its predecessors. This definition is only correct for reducible CFGs. +/// However, computing dominators is expensive, so we approximate according to the post-order +/// traversal order. A loop header for us is a block which is visited after its predecessor in +/// post-order. This is ok as we mostly need a heuristic. +fn maybe_loop_headers(body: &Body<'_>) -> DenseBitSet<BasicBlock> { + let mut maybe_loop_headers = DenseBitSet::new_empty(body.basic_blocks.len()); + let mut visited = DenseBitSet::new_empty(body.basic_blocks.len()); + for (bb, bbdata) in traversal::postorder(body) { + // Post-order means we visit successors before the block for acyclic CFGs. + // If the successor is not visited yet, consider it a loop header. for succ in bbdata.terminator().successors() { - if dominators.dominates(succ, bb) { - loop_headers.insert(succ); + if !visited.contains(succ) { + maybe_loop_headers.insert(succ); } } + + // Only mark `bb` as visited after we checked the successors, in case we have a self-loop. + // bb1: goto -> bb1; + let _new = visited.insert(bb); + debug_assert!(_new); } - loop_headers + + maybe_loop_headers } diff --git a/compiler/rustc_mir_transform/src/known_panics_lint.rs b/compiler/rustc_mir_transform/src/known_panics_lint.rs index 481c7941909..93abc0f8860 100644 --- a/compiler/rustc_mir_transform/src/known_panics_lint.rs +++ b/compiler/rustc_mir_transform/src/known_panics_lint.rs @@ -441,7 +441,6 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { | Rvalue::Use(..) | Rvalue::CopyForDeref(..) | Rvalue::Repeat(..) - | Rvalue::Len(..) | Rvalue::Cast(..) | Rvalue::ShallowInitBox(..) | Rvalue::Discriminant(..) @@ -604,27 +603,13 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { return None; } - Len(place) => { - let len = if let ty::Array(_, n) = place.ty(self.local_decls(), self.tcx).ty.kind() - { - n.try_to_target_usize(self.tcx)? - } else { - match self.get_const(place)? { - Value::Immediate(src) => src.len(&self.ecx).discard_err()?, - Value::Aggregate { fields, .. } => fields.len() as u64, - Value::Uninit => return None, - } - }; - ImmTy::from_scalar(Scalar::from_target_usize(len, self), layout).into() - } - Ref(..) | RawPtr(..) => return None, NullaryOp(ref null_op, ty) => { let op_layout = self.ecx.layout_of(ty).ok()?; let val = match null_op { NullOp::SizeOf => op_layout.size.bytes(), - NullOp::AlignOf => op_layout.align.abi.bytes(), + NullOp::AlignOf => op_layout.align.bytes(), NullOp::OffsetOf(fields) => self .tcx .offset_of_subfield(self.typing_env, op_layout, fields.iter()) @@ -652,7 +637,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { let res = self.ecx.float_to_float_or_int(&value, to).discard_err()?; res.into() } - CastKind::Transmute => { + CastKind::Transmute | CastKind::Subtype => { let value = self.eval_operand(value)?; let to = self.ecx.layout_of(to).ok()?; // `offset` for immediates only supports scalar/scalar-pair ABIs, diff --git a/compiler/rustc_mir_transform/src/large_enums.rs b/compiler/rustc_mir_transform/src/large_enums.rs index 1a91d6bd7da..1b90e9158f6 100644 --- a/compiler/rustc_mir_transform/src/large_enums.rs +++ b/compiler/rustc_mir_transform/src/large_enums.rs @@ -156,7 +156,7 @@ impl<'tcx> crate::MirPass<'tcx> for EnumSizeOpt { patch.add_statement(location, stmt); } - st.make_nop(); + st.make_nop(true); } } diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index 1663dfa744f..9ff7e0b5500 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -156,7 +156,6 @@ declare_passes! { mod match_branches : MatchBranchSimplification; mod mentioned_items : MentionedItems; mod multiple_return_terminators : MultipleReturnTerminators; - mod nrvo : RenameReturnPlace; mod post_drop_elaboration : CheckLiveDrops; mod prettify : ReorderBasicBlocks, ReorderLocals; mod promote_consts : PromoteTemps; @@ -715,7 +714,6 @@ pub(crate) fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<' &jump_threading::JumpThreading, &early_otherwise_branch::EarlyOtherwiseBranch, &simplify_comparison_integral::SimplifyComparisonIntegral, - &dest_prop::DestinationPropagation, &o1(simplify_branches::SimplifyConstCondition::Final), &o1(remove_noop_landing_pads::RemoveNoopLandingPads), &o1(simplify::SimplifyCfg::Final), @@ -723,7 +721,7 @@ pub(crate) fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<' &strip_debuginfo::StripDebugInfo, ©_prop::CopyProp, &dead_store_elimination::DeadStoreElimination::Final, - &nrvo::RenameReturnPlace, + &dest_prop::DestinationPropagation, &simplify::SimplifyLocals::Final, &multiple_return_terminators::MultipleReturnTerminators, &large_enums::EnumSizeOpt { discrepancy: 128 }, diff --git a/compiler/rustc_mir_transform/src/lint.rs b/compiler/rustc_mir_transform/src/lint.rs index f472c7cb493..2ab49645dc4 100644 --- a/compiler/rustc_mir_transform/src/lint.rs +++ b/compiler/rustc_mir_transform/src/lint.rs @@ -6,7 +6,7 @@ use std::borrow::Cow; use rustc_data_structures::fx::FxHashSet; use rustc_index::bit_set::DenseBitSet; -use rustc_middle::mir::visit::{PlaceContext, Visitor}; +use rustc_middle::mir::visit::{PlaceContext, VisitPlacesWith, Visitor}; use rustc_middle::mir::*; use rustc_middle::ty::TyCtxt; use rustc_mir_dataflow::impls::{MaybeStorageDead, MaybeStorageLive, always_storage_live_locals}; @@ -79,15 +79,39 @@ impl<'a, 'tcx> Visitor<'tcx> for Lint<'a, 'tcx> { fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { match &statement.kind { StatementKind::Assign(box (dest, rvalue)) => { - if let Rvalue::Use(Operand::Copy(src) | Operand::Move(src)) = rvalue { - // The sides of an assignment must not alias. Currently this just checks whether - // the places are identical. - if dest == src { - self.fail( - location, - "encountered `Assign` statement with overlapping memory", - ); - } + let forbid_aliasing = match rvalue { + Rvalue::Use(..) + | Rvalue::CopyForDeref(..) + | Rvalue::Repeat(..) + | Rvalue::Aggregate(..) + | Rvalue::Cast(..) + | Rvalue::ShallowInitBox(..) + | Rvalue::WrapUnsafeBinder(..) => true, + Rvalue::ThreadLocalRef(..) + | Rvalue::NullaryOp(..) + | Rvalue::UnaryOp(..) + | Rvalue::BinaryOp(..) + | Rvalue::Ref(..) + | Rvalue::RawPtr(..) + | Rvalue::Discriminant(..) => false, + }; + // The sides of an assignment must not alias. + if forbid_aliasing { + VisitPlacesWith(|src: Place<'tcx>, _| { + if *dest == src + || (dest.local == src.local + && !dest.is_indirect() + && !src.is_indirect()) + { + self.fail( + location, + format!( + "encountered `{statement:?}` statement with overlapping memory" + ), + ); + } + }) + .visit_rvalue(rvalue, location); } } StatementKind::StorageLive(local) => { diff --git a/compiler/rustc_mir_transform/src/nrvo.rs b/compiler/rustc_mir_transform/src/nrvo.rs deleted file mode 100644 index 965002aae04..00000000000 --- a/compiler/rustc_mir_transform/src/nrvo.rs +++ /dev/null @@ -1,234 +0,0 @@ -//! See the docs for [`RenameReturnPlace`]. - -use rustc_hir::Mutability; -use rustc_index::bit_set::DenseBitSet; -use rustc_middle::bug; -use rustc_middle::mir::visit::{MutVisitor, NonUseContext, PlaceContext, Visitor}; -use rustc_middle::mir::{self, BasicBlock, Local, Location}; -use rustc_middle::ty::TyCtxt; -use tracing::{debug, trace}; - -/// This pass looks for MIR that always copies the same local into the return place and eliminates -/// the copy by renaming all uses of that local to `_0`. -/// -/// This allows LLVM to perform an optimization similar to the named return value optimization -/// (NRVO) that is guaranteed in C++. This avoids a stack allocation and `memcpy` for the -/// relatively common pattern of allocating a buffer on the stack, mutating it, and returning it by -/// value like so: -/// -/// ```rust -/// fn foo(init: fn(&mut [u8; 1024])) -> [u8; 1024] { -/// let mut buf = [0; 1024]; -/// init(&mut buf); -/// buf -/// } -/// ``` -/// -/// For now, this pass is very simple and only capable of eliminating a single copy. A more general -/// version of copy propagation, such as the one based on non-overlapping live ranges in [#47954] and -/// [#71003], could yield even more benefits. -/// -/// [#47954]: https://github.com/rust-lang/rust/pull/47954 -/// [#71003]: https://github.com/rust-lang/rust/pull/71003 -pub(super) struct RenameReturnPlace; - -impl<'tcx> crate::MirPass<'tcx> for RenameReturnPlace { - fn is_enabled(&self, sess: &rustc_session::Session) -> bool { - // unsound: #111005 - sess.mir_opt_level() > 0 && sess.opts.unstable_opts.unsound_mir_opts - } - - fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut mir::Body<'tcx>) { - let def_id = body.source.def_id(); - let Some(returned_local) = local_eligible_for_nrvo(body) else { - debug!("`{:?}` was ineligible for NRVO", def_id); - return; - }; - - debug!( - "`{:?}` was eligible for NRVO, making {:?} the return place", - def_id, returned_local - ); - - RenameToReturnPlace { tcx, to_rename: returned_local }.visit_body_preserves_cfg(body); - - // Clean up the `NOP`s we inserted for statements made useless by our renaming. - for block_data in body.basic_blocks.as_mut_preserves_cfg() { - block_data.statements.retain(|stmt| stmt.kind != mir::StatementKind::Nop); - } - - // Overwrite the debuginfo of `_0` with that of the renamed local. - let (renamed_decl, ret_decl) = - body.local_decls.pick2_mut(returned_local, mir::RETURN_PLACE); - - // Sometimes, the return place is assigned a local of a different but coercible type, for - // example `&mut T` instead of `&T`. Overwriting the `LocalInfo` for the return place means - // its type may no longer match the return type of its function. This doesn't cause a - // problem in codegen because these two types are layout-compatible, but may be unexpected. - debug!("_0: {:?} = {:?}: {:?}", ret_decl.ty, returned_local, renamed_decl.ty); - ret_decl.clone_from(renamed_decl); - - // The return place is always mutable. - ret_decl.mutability = Mutability::Mut; - } - - fn is_required(&self) -> bool { - false - } -} - -/// MIR that is eligible for the NRVO must fulfill two conditions: -/// 1. The return place must not be read prior to the `Return` terminator. -/// 2. A simple assignment of a whole local to the return place (e.g., `_0 = _1`) must be the -/// only definition of the return place reaching the `Return` terminator. -/// -/// If the MIR fulfills both these conditions, this function returns the `Local` that is assigned -/// to the return place along all possible paths through the control-flow graph. -fn local_eligible_for_nrvo(body: &mir::Body<'_>) -> Option<Local> { - if IsReturnPlaceRead::run(body) { - return None; - } - - let mut copied_to_return_place = None; - for block in body.basic_blocks.indices() { - // Look for blocks with a `Return` terminator. - if !matches!(body[block].terminator().kind, mir::TerminatorKind::Return) { - continue; - } - - // Look for an assignment of a single local to the return place prior to the `Return`. - let returned_local = find_local_assigned_to_return_place(block, body)?; - match body.local_kind(returned_local) { - // FIXME: Can we do this for arguments as well? - mir::LocalKind::Arg => return None, - - mir::LocalKind::ReturnPointer => bug!("Return place was assigned to itself?"), - mir::LocalKind::Temp => {} - } - - // If multiple different locals are copied to the return place. We can't pick a - // single one to rename. - if copied_to_return_place.is_some_and(|old| old != returned_local) { - return None; - } - - copied_to_return_place = Some(returned_local); - } - - copied_to_return_place -} - -fn find_local_assigned_to_return_place(start: BasicBlock, body: &mir::Body<'_>) -> Option<Local> { - let mut block = start; - let mut seen = DenseBitSet::new_empty(body.basic_blocks.len()); - - // Iterate as long as `block` has exactly one predecessor that we have not yet visited. - while seen.insert(block) { - trace!("Looking for assignments to `_0` in {:?}", block); - - let local = body[block].statements.iter().rev().find_map(as_local_assigned_to_return_place); - if local.is_some() { - return local; - } - - match body.basic_blocks.predecessors()[block].as_slice() { - &[pred] => block = pred, - _ => return None, - } - } - - None -} - -// If this statement is an assignment of an unprojected local to the return place, -// return that local. -fn as_local_assigned_to_return_place(stmt: &mir::Statement<'_>) -> Option<Local> { - if let mir::StatementKind::Assign(box (lhs, rhs)) = &stmt.kind { - if lhs.as_local() == Some(mir::RETURN_PLACE) { - if let mir::Rvalue::Use(mir::Operand::Copy(rhs) | mir::Operand::Move(rhs)) = rhs { - return rhs.as_local(); - } - } - } - - None -} - -struct RenameToReturnPlace<'tcx> { - to_rename: Local, - tcx: TyCtxt<'tcx>, -} - -/// Replaces all uses of `self.to_rename` with `_0`. -impl<'tcx> MutVisitor<'tcx> for RenameToReturnPlace<'tcx> { - fn tcx(&self) -> TyCtxt<'tcx> { - self.tcx - } - - fn visit_statement(&mut self, stmt: &mut mir::Statement<'tcx>, loc: Location) { - // Remove assignments of the local being replaced to the return place, since it is now the - // return place: - // _0 = _1 - if as_local_assigned_to_return_place(stmt) == Some(self.to_rename) { - stmt.kind = mir::StatementKind::Nop; - return; - } - - // Remove storage annotations for the local being replaced: - // StorageLive(_1) - if let mir::StatementKind::StorageLive(local) | mir::StatementKind::StorageDead(local) = - stmt.kind - { - if local == self.to_rename { - stmt.kind = mir::StatementKind::Nop; - return; - } - } - - self.super_statement(stmt, loc) - } - - fn visit_terminator(&mut self, terminator: &mut mir::Terminator<'tcx>, loc: Location) { - // Ignore the implicit "use" of the return place in a `Return` statement. - if let mir::TerminatorKind::Return = terminator.kind { - return; - } - - self.super_terminator(terminator, loc); - } - - fn visit_local(&mut self, l: &mut Local, ctxt: PlaceContext, _: Location) { - if *l == mir::RETURN_PLACE { - assert_eq!(ctxt, PlaceContext::NonUse(NonUseContext::VarDebugInfo)); - } else if *l == self.to_rename { - *l = mir::RETURN_PLACE; - } - } -} - -struct IsReturnPlaceRead(bool); - -impl IsReturnPlaceRead { - fn run(body: &mir::Body<'_>) -> bool { - let mut vis = IsReturnPlaceRead(false); - vis.visit_body(body); - vis.0 - } -} - -impl<'tcx> Visitor<'tcx> for IsReturnPlaceRead { - fn visit_local(&mut self, l: Local, ctxt: PlaceContext, _: Location) { - if l == mir::RETURN_PLACE && ctxt.is_use() && !ctxt.is_place_assignment() { - self.0 = true; - } - } - - fn visit_terminator(&mut self, terminator: &mir::Terminator<'tcx>, loc: Location) { - // Ignore the implicit "use" of the return place in a `Return` statement. - if let mir::TerminatorKind::Return = terminator.kind { - return; - } - - self.super_terminator(terminator, loc); - } -} diff --git a/compiler/rustc_mir_transform/src/patch.rs b/compiler/rustc_mir_transform/src/patch.rs index c781d1a5324..2c535d011a0 100644 --- a/compiler/rustc_mir_transform/src/patch.rs +++ b/compiler/rustc_mir_transform/src/patch.rs @@ -1,4 +1,5 @@ -use rustc_index::{Idx, IndexVec}; +use rustc_data_structures::fx::FxHashMap; +use rustc_index::Idx; use rustc_middle::mir::*; use rustc_middle::ty::Ty; use rustc_span::Span; @@ -9,7 +10,9 @@ use tracing::debug; /// and replacement of terminators, and then apply the queued changes all at /// once with `apply`. This is useful for MIR transformation passes. pub(crate) struct MirPatch<'tcx> { - term_patch_map: IndexVec<BasicBlock, Option<TerminatorKind<'tcx>>>, + term_patch_map: FxHashMap<BasicBlock, TerminatorKind<'tcx>>, + /// Set of statements that should be replaced by `Nop`. + nop_statements: Vec<Location>, new_blocks: Vec<BasicBlockData<'tcx>>, new_statements: Vec<(Location, StatementKind<'tcx>)>, new_locals: Vec<LocalDecl<'tcx>>, @@ -22,17 +25,22 @@ pub(crate) struct MirPatch<'tcx> { terminate_block: Option<(BasicBlock, UnwindTerminateReason)>, body_span: Span, next_local: usize, + /// The number of blocks at the start of the transformation. New blocks + /// get appended at the end. + next_block: usize, } impl<'tcx> MirPatch<'tcx> { /// Creates a new, empty patch. pub(crate) fn new(body: &Body<'tcx>) -> Self { let mut result = MirPatch { - term_patch_map: IndexVec::from_elem(None, &body.basic_blocks), + term_patch_map: Default::default(), + nop_statements: vec![], new_blocks: vec![], new_statements: vec![], new_locals: vec![], next_local: body.local_decls.len(), + next_block: body.basic_blocks.len(), resume_block: None, unreachable_cleanup_block: None, unreachable_no_cleanup_block: None, @@ -141,7 +149,7 @@ impl<'tcx> MirPatch<'tcx> { /// Has a replacement of this block's terminator been queued in this patch? pub(crate) fn is_term_patched(&self, bb: BasicBlock) -> bool { - self.term_patch_map[bb].is_some() + self.term_patch_map.contains_key(&bb) } /// Universal getter for block data, either it is in 'old' blocks or in patched ones @@ -194,18 +202,26 @@ impl<'tcx> MirPatch<'tcx> { /// Queues the addition of a new basic block. pub(crate) fn new_block(&mut self, data: BasicBlockData<'tcx>) -> BasicBlock { - let block = self.term_patch_map.next_index(); + let block = BasicBlock::from_usize(self.next_block + self.new_blocks.len()); debug!("MirPatch: new_block: {:?}: {:?}", block, data); self.new_blocks.push(data); - self.term_patch_map.push(None); block } /// Queues the replacement of a block's terminator. pub(crate) fn patch_terminator(&mut self, block: BasicBlock, new: TerminatorKind<'tcx>) { - assert!(self.term_patch_map[block].is_none()); + assert!(!self.term_patch_map.contains_key(&block)); debug!("MirPatch: patch_terminator({:?}, {:?})", block, new); - self.term_patch_map[block] = Some(new); + self.term_patch_map.insert(block, new); + } + + /// Mark given statement to be replaced by a `Nop`. + /// + /// This method only works on statements from the initial body, and cannot be used to remove + /// statements from `add_statement` or `add_assign`. + #[tracing::instrument(level = "debug", skip(self))] + pub(crate) fn nop_statement(&mut self, loc: Location) { + self.nop_statements.push(loc); } /// Queues the insertion of a statement at a given location. The statement @@ -244,6 +260,7 @@ impl<'tcx> MirPatch<'tcx> { self.new_blocks.len(), body.basic_blocks.len() ); + debug_assert_eq!(self.next_block, body.basic_blocks.len()); let bbs = if self.term_patch_map.is_empty() && self.new_blocks.is_empty() { body.basic_blocks.as_mut_preserves_cfg() } else { @@ -251,11 +268,9 @@ impl<'tcx> MirPatch<'tcx> { }; bbs.extend(self.new_blocks); body.local_decls.extend(self.new_locals); - for (src, patch) in self.term_patch_map.into_iter_enumerated() { - if let Some(patch) = patch { - debug!("MirPatch: patching block {:?}", src); - bbs[src].terminator_mut().kind = patch; - } + + for loc in self.nop_statements { + bbs[loc.block].statements[loc.statement_index].make_nop(true); } let mut new_statements = self.new_statements; @@ -273,12 +288,23 @@ impl<'tcx> MirPatch<'tcx> { } debug!("MirPatch: adding statement {:?} at loc {:?}+{}", stmt, loc, delta); loc.statement_index += delta; - let source_info = Self::source_info_for_index(&body[loc.block], loc); - body[loc.block] + let source_info = Self::source_info_for_index(&bbs[loc.block], loc); + bbs[loc.block] .statements .insert(loc.statement_index, Statement::new(source_info, stmt)); delta += 1; } + + // The order in which we patch terminators does not change the result. + #[allow(rustc::potential_query_instability)] + for (src, patch) in self.term_patch_map { + debug!("MirPatch: patching block {:?}", src); + let bb = &mut bbs[src]; + if let TerminatorKind::Unreachable = patch { + bb.statements.clear(); + } + bb.terminator_mut().kind = patch; + } } fn source_info_for_index(data: &BasicBlockData<'_>, loc: Location) -> SourceInfo { diff --git a/compiler/rustc_mir_transform/src/promote_consts.rs b/compiler/rustc_mir_transform/src/promote_consts.rs index 9ea2eb4f25d..c7dc18a4a13 100644 --- a/compiler/rustc_mir_transform/src/promote_consts.rs +++ b/compiler/rustc_mir_transform/src/promote_consts.rs @@ -292,7 +292,6 @@ impl<'tcx> Validator<'_, 'tcx> { match elem { // Recurse directly. ProjectionElem::ConstantIndex { .. } - | ProjectionElem::Subtype(_) | ProjectionElem::Subslice { .. } | ProjectionElem::UnwrapUnsafeBinder(_) => {} @@ -437,9 +436,7 @@ impl<'tcx> Validator<'_, 'tcx> { self.validate_operand(op)? } - Rvalue::Discriminant(place) | Rvalue::Len(place) => { - self.validate_place(place.as_ref())? - } + Rvalue::Discriminant(place) => self.validate_place(place.as_ref())?, Rvalue::ThreadLocalRef(_) => return Err(Unpromotable), @@ -1052,7 +1049,7 @@ fn promote_candidates<'tcx>( // Eliminate assignments to, and drops of promoted temps. let promoted = |index: Local| temps[index] == TempState::PromotedOut; for block in body.basic_blocks_mut() { - block.statements.retain(|statement| match &statement.kind { + block.retain_statements(|statement| match &statement.kind { StatementKind::Assign(box (place, _)) => { if let Some(index) = place.as_local() { !promoted(index) diff --git a/compiler/rustc_mir_transform/src/ref_prop.rs b/compiler/rustc_mir_transform/src/ref_prop.rs index 6f61215cee2..deb0a146476 100644 --- a/compiler/rustc_mir_transform/src/ref_prop.rs +++ b/compiler/rustc_mir_transform/src/ref_prop.rs @@ -195,10 +195,10 @@ fn compute_replacement<'tcx>( // including DEF. This violates the DEF dominates USE condition, and so is impossible. let is_constant_place = |place: Place<'_>| { // We only allow `Deref` as the first projection, to avoid surprises. - if place.projection.first() == Some(&PlaceElem::Deref) { + if let Some((&PlaceElem::Deref, rest)) = place.projection.split_first() { // `place == (*some_local).xxx`, it is constant only if `some_local` is constant. // We approximate constness using SSAness. - ssa.is_ssa(place.local) && place.projection[1..].iter().all(PlaceElem::is_stable_offset) + ssa.is_ssa(place.local) && rest.iter().all(PlaceElem::is_stable_offset) } else { storage_live.has_single_storage(place.local) && place.projection[..].iter().all(PlaceElem::is_stable_offset) @@ -206,7 +206,7 @@ fn compute_replacement<'tcx>( }; let mut can_perform_opt = |target: Place<'tcx>, loc: Location| { - if target.projection.first() == Some(&PlaceElem::Deref) { + if target.is_indirect_first_projection() { // We are creating a reborrow. As `place.local` is a reference, removing the storage // statements should not make it much harder for LLVM to optimize. storage_to_remove.insert(target.local); @@ -266,7 +266,7 @@ fn compute_replacement<'tcx>( Rvalue::Ref(_, _, place) | Rvalue::RawPtr(_, place) => { let mut place = *place; // Try to see through `place` in order to collapse reborrow chains. - if place.projection.first() == Some(&PlaceElem::Deref) + if let Some((&PlaceElem::Deref, rest)) = place.projection.split_first() && let Value::Pointer(target, inner_needs_unique) = targets[place.local] // Only see through immutable reference and pointers, as we do not know yet if // mutable references are fully replaced. @@ -274,7 +274,7 @@ fn compute_replacement<'tcx>( // Only collapse chain if the pointee is definitely live. && can_perform_opt(target, location) { - place = target.project_deeper(&place.projection[1..], tcx); + place = target.project_deeper(rest, tcx); } assert_ne!(place.local, local); if is_constant_place(place) { @@ -323,7 +323,7 @@ fn compute_replacement<'tcx>( return; } - if place.projection.first() != Some(&PlaceElem::Deref) { + if !place.is_indirect_first_projection() { // This is not a dereference, nothing to do. return; } @@ -392,20 +392,15 @@ impl<'tcx> MutVisitor<'tcx> for Replacer<'tcx> { } fn visit_var_debug_info(&mut self, debuginfo: &mut VarDebugInfo<'tcx>) { - // If the debuginfo is a pointer to another place: - // - if it's a reborrow, see through it; - // - if it's a direct borrow, increase `debuginfo.references`. + // If the debuginfo is a pointer to another place + // and it's a reborrow: see through it while let VarDebugInfoContents::Place(ref mut place) = debuginfo.value && place.projection.is_empty() && let Value::Pointer(target, _) = self.targets[place.local] - && target.projection.iter().all(|p| p.can_use_in_debuginfo()) + && let &[PlaceElem::Deref] = &target.projection[..] { - if let Some((&PlaceElem::Deref, rest)) = target.projection.split_last() { - *place = Place::from(target.local).project_deeper(rest, self.tcx); - self.any_replacement = true; - } else { - break; - } + *place = Place::from(target.local); + self.any_replacement = true; } // Simplify eventual projections left inside `debuginfo`. @@ -414,9 +409,7 @@ impl<'tcx> MutVisitor<'tcx> for Replacer<'tcx> { fn visit_place(&mut self, place: &mut Place<'tcx>, ctxt: PlaceContext, loc: Location) { loop { - if place.projection.first() != Some(&PlaceElem::Deref) { - return; - } + let Some((&PlaceElem::Deref, rest)) = place.projection.split_first() else { return }; let Value::Pointer(target, _) = self.targets[place.local] else { return }; @@ -432,7 +425,7 @@ impl<'tcx> MutVisitor<'tcx> for Replacer<'tcx> { return; } - *place = target.project_deeper(&place.projection[1..], self.tcx); + *place = target.project_deeper(rest, self.tcx); self.any_replacement = true; } } @@ -442,7 +435,7 @@ impl<'tcx> MutVisitor<'tcx> for Replacer<'tcx> { StatementKind::StorageLive(l) | StatementKind::StorageDead(l) if self.storage_to_remove.contains(l) => { - stmt.make_nop(); + stmt.make_nop(true); } // Do not remove assignments as they may still be useful for debuginfo. _ => self.super_statement(stmt, loc), diff --git a/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs b/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs index 5b6d7ffb511..b53c1f6d202 100644 --- a/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs +++ b/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs @@ -1,7 +1,6 @@ use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::*; use rustc_middle::ty::TyCtxt; -use rustc_target::spec::PanicStrategy; use tracing::debug; use crate::patch::MirPatch; @@ -13,7 +12,7 @@ pub(super) struct RemoveNoopLandingPads; impl<'tcx> crate::MirPass<'tcx> for RemoveNoopLandingPads { fn is_enabled(&self, sess: &rustc_session::Session) -> bool { - sess.panic_strategy() != PanicStrategy::Abort + sess.panic_strategy().unwinds() } fn run_pass(&self, _tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { diff --git a/compiler/rustc_mir_transform/src/remove_place_mention.rs b/compiler/rustc_mir_transform/src/remove_place_mention.rs index cb598ceb4df..d56b51bb496 100644 --- a/compiler/rustc_mir_transform/src/remove_place_mention.rs +++ b/compiler/rustc_mir_transform/src/remove_place_mention.rs @@ -14,7 +14,7 @@ impl<'tcx> crate::MirPass<'tcx> for RemovePlaceMention { fn run_pass(&self, _: TyCtxt<'tcx>, body: &mut Body<'tcx>) { trace!("Running RemovePlaceMention on {:?}", body.source); for data in body.basic_blocks.as_mut_preserves_cfg() { - data.statements.retain(|statement| match statement.kind { + data.retain_statements(|statement| match statement.kind { StatementKind::PlaceMention(..) | StatementKind::Nop => false, _ => true, }) diff --git a/compiler/rustc_mir_transform/src/remove_storage_markers.rs b/compiler/rustc_mir_transform/src/remove_storage_markers.rs index 1ae33c00968..cb97d2c865a 100644 --- a/compiler/rustc_mir_transform/src/remove_storage_markers.rs +++ b/compiler/rustc_mir_transform/src/remove_storage_markers.rs @@ -14,7 +14,7 @@ impl<'tcx> crate::MirPass<'tcx> for RemoveStorageMarkers { fn run_pass(&self, _tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { trace!("Running RemoveStorageMarkers on {:?}", body.source); for data in body.basic_blocks.as_mut_preserves_cfg() { - data.statements.retain(|statement| match statement.kind { + data.retain_statements(|statement| match statement.kind { StatementKind::StorageLive(..) | StatementKind::StorageDead(..) | StatementKind::Nop => false, diff --git a/compiler/rustc_mir_transform/src/remove_zsts.rs b/compiler/rustc_mir_transform/src/remove_zsts.rs index c4dc8638b26..90c1b3520b9 100644 --- a/compiler/rustc_mir_transform/src/remove_zsts.rs +++ b/compiler/rustc_mir_transform/src/remove_zsts.rs @@ -141,7 +141,7 @@ impl<'tcx> MutVisitor<'tcx> for Replacer<'_, 'tcx> { && let ty = place_for_ty.ty(self.local_decls, self.tcx).ty && self.known_to_be_zst(ty) { - statement.make_nop(); + statement.make_nop(true); } else { self.super_statement(statement, loc); } diff --git a/compiler/rustc_mir_transform/src/simplify.rs b/compiler/rustc_mir_transform/src/simplify.rs index 75917d23883..8b5efb74205 100644 --- a/compiler/rustc_mir_transform/src/simplify.rs +++ b/compiler/rustc_mir_transform/src/simplify.rs @@ -35,10 +35,12 @@ //! pre-"runtime" MIR! use itertools::Itertools as _; +use rustc_index::bit_set::DenseBitSet; use rustc_index::{Idx, IndexSlice, IndexVec}; use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; use rustc_middle::ty::TyCtxt; +use rustc_mir_dataflow::debuginfo::debuginfo_locals; use rustc_span::DUMMY_SP; use smallvec::SmallVec; use tracing::{debug, trace}; @@ -142,7 +144,7 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { // statements itself to avoid moving the (relatively) large statements twice. // We do not push the statements directly into the target block (`bb`) as that is slower // due to additional reallocations - let mut merged_blocks = Vec::new(); + let mut merged_blocks: Vec<BasicBlock> = Vec::new(); let mut outer_changed = false; loop { let mut changed = false; @@ -157,8 +159,9 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { let mut terminator = self.basic_blocks[bb].terminator.take().expect("invalid terminator state"); - terminator - .successors_mut(|successor| self.collapse_goto_chain(successor, &mut changed)); + terminator.successors_mut(|successor| { + self.collapse_goto_chain(successor, &mut changed); + }); let mut inner_changed = true; merged_blocks.clear(); @@ -175,10 +178,18 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { if statements_to_merge > 0 { let mut statements = std::mem::take(&mut self.basic_blocks[bb].statements); statements.reserve(statements_to_merge); + let mut parent_bb_last_debuginfos = + std::mem::take(&mut self.basic_blocks[bb].after_last_stmt_debuginfos); for &from in &merged_blocks { + if let Some(stmt) = self.basic_blocks[from].statements.first_mut() { + stmt.debuginfos.prepend(&mut parent_bb_last_debuginfos); + } statements.append(&mut self.basic_blocks[from].statements); + parent_bb_last_debuginfos = + std::mem::take(&mut self.basic_blocks[from].after_last_stmt_debuginfos); } self.basic_blocks[bb].statements = statements; + self.basic_blocks[bb].after_last_stmt_debuginfos = parent_bb_last_debuginfos; } self.basic_blocks[bb].terminator = Some(terminator); @@ -218,10 +229,14 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { // goto chains. We should probably benchmark different sizes. let mut terminators: SmallVec<[_; 1]> = Default::default(); let mut current = *start; + // If each successor has only one predecessor, it's a trivial goto chain. + // We can move all debuginfos to the last basic block. + let mut trivial_goto_chain = true; while let Some(terminator) = self.take_terminator_if_simple_goto(current) { let Terminator { kind: TerminatorKind::Goto { target }, .. } = terminator else { unreachable!(); }; + trivial_goto_chain &= self.pred_count[target] == 1; terminators.push((current, terminator)); current = target; } @@ -233,6 +248,17 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { else { unreachable!(); }; + if trivial_goto_chain { + let mut pred_debuginfos = + std::mem::take(&mut self.basic_blocks[current].after_last_stmt_debuginfos); + let debuginfos = if let Some(stmt) = self.basic_blocks[last].statements.first_mut() + { + &mut stmt.debuginfos + } else { + &mut self.basic_blocks[last].after_last_stmt_debuginfos + }; + debuginfos.prepend(&mut pred_debuginfos); + } *changed |= *target != last; *target = last; debug!("collapsing goto chain from {:?} to {:?}", current, target); @@ -303,7 +329,7 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { fn strip_nops(&mut self) { for blk in self.basic_blocks.iter_mut() { - blk.statements.retain(|stmt| !matches!(stmt.kind, StatementKind::Nop)) + blk.strip_nops(); } } } @@ -476,17 +502,22 @@ fn make_local_map<V>( /// Keeps track of used & unused locals. struct UsedLocals { increment: bool, - arg_count: u32, use_count: IndexVec<Local, u32>, + always_used: DenseBitSet<Local>, } impl UsedLocals { /// Determines which locals are used & unused in the given body. fn new(body: &Body<'_>) -> Self { + let mut always_used = debuginfo_locals(body); + always_used.insert(RETURN_PLACE); + for arg in body.args_iter() { + always_used.insert(arg); + } let mut this = Self { increment: true, - arg_count: body.arg_count.try_into().unwrap(), use_count: IndexVec::from_elem(0, &body.local_decls), + always_used, }; this.visit_body(body); this @@ -494,10 +525,16 @@ impl UsedLocals { /// Checks if local is used. /// - /// Return place and arguments are always considered used. + /// Return place, arguments, var debuginfo are always considered used. fn is_used(&self, local: Local) -> bool { - trace!("is_used({:?}): use_count: {:?}", local, self.use_count[local]); - local.as_u32() <= self.arg_count || self.use_count[local] != 0 + trace!( + "is_used({:?}): use_count: {:?}, always_used: {}", + local, + self.use_count[local], + self.always_used.contains(local) + ); + // To keep things simple, we don't handle debugging information here, these are in DSE. + self.always_used.contains(local) || self.use_count[local] != 0 } /// Updates the use counts to reflect the removal of given statement. @@ -539,10 +576,10 @@ impl<'tcx> Visitor<'tcx> for UsedLocals { self.super_statement(statement, location); } - StatementKind::ConstEvalCounter | StatementKind::Nop => {} - - StatementKind::StorageLive(_local) | StatementKind::StorageDead(_local) => {} - + StatementKind::ConstEvalCounter + | StatementKind::Nop + | StatementKind::StorageLive(..) + | StatementKind::StorageDead(..) => {} StatementKind::Assign(box (ref place, ref rvalue)) => { if rvalue.is_safe_to_remove() { self.visit_lhs(place, location); @@ -560,7 +597,10 @@ impl<'tcx> Visitor<'tcx> for UsedLocals { } } - fn visit_local(&mut self, local: Local, _ctx: PlaceContext, _location: Location) { + fn visit_local(&mut self, local: Local, ctx: PlaceContext, _location: Location) { + if matches!(ctx, PlaceContext::NonUse(_)) { + return; + } if self.increment { self.use_count[local] += 1; } else { @@ -583,28 +623,26 @@ fn remove_unused_definitions_helper(used_locals: &mut UsedLocals, body: &mut Bod for data in body.basic_blocks.as_mut_preserves_cfg() { // Remove unnecessary StorageLive and StorageDead annotations. - data.statements.retain(|statement| { - let keep = match &statement.kind { + for statement in data.statements.iter_mut() { + let keep_statement = match &statement.kind { StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => { used_locals.is_used(*local) } - StatementKind::Assign(box (place, _)) => used_locals.is_used(place.local), - - StatementKind::SetDiscriminant { place, .. } - | StatementKind::BackwardIncompatibleDropHint { place, reason: _ } - | StatementKind::Deinit(place) => used_locals.is_used(place.local), - StatementKind::Nop => false, - _ => true, + StatementKind::Assign(box (place, _)) + | StatementKind::SetDiscriminant { box place, .. } + | StatementKind::BackwardIncompatibleDropHint { box place, .. } + | StatementKind::Deinit(box place) => used_locals.is_used(place.local), + _ => continue, }; - - if !keep { - trace!("removing statement {:?}", statement); - modified = true; - used_locals.statement_removed(statement); + if keep_statement { + continue; } - - keep - }); + trace!("removing statement {:?}", statement); + modified = true; + used_locals.statement_removed(statement); + statement.make_nop(true); + } + data.strip_nops(); } } } @@ -619,7 +657,62 @@ impl<'tcx> MutVisitor<'tcx> for LocalUpdater<'tcx> { self.tcx } + fn visit_statement_debuginfo( + &mut self, + stmt_debuginfo: &mut StmtDebugInfo<'tcx>, + location: Location, + ) { + match stmt_debuginfo { + StmtDebugInfo::AssignRef(local, place) => { + if place.as_ref().accessed_locals().any(|local| self.map[local].is_none()) { + *stmt_debuginfo = StmtDebugInfo::InvalidAssign(*local); + } + } + StmtDebugInfo::InvalidAssign(_) => {} + } + self.super_statement_debuginfo(stmt_debuginfo, location); + } + fn visit_local(&mut self, l: &mut Local, _: PlaceContext, _: Location) { *l = self.map[*l].unwrap(); } } + +pub(crate) struct UsedInStmtLocals { + pub(crate) locals: DenseBitSet<Local>, +} + +impl UsedInStmtLocals { + pub(crate) fn new(body: &Body<'_>) -> Self { + let mut this = Self { locals: DenseBitSet::new_empty(body.local_decls.len()) }; + this.visit_body(body); + this + } + + pub(crate) fn remove_unused_storage_annotations<'tcx>(&self, body: &mut Body<'tcx>) { + for data in body.basic_blocks.as_mut_preserves_cfg() { + // Remove unnecessary StorageLive and StorageDead annotations. + for statement in data.statements.iter_mut() { + let keep_statement = match &statement.kind { + StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => { + self.locals.contains(*local) + } + _ => continue, + }; + if keep_statement { + continue; + } + statement.make_nop(true); + } + } + } +} + +impl<'tcx> Visitor<'tcx> for UsedInStmtLocals { + fn visit_local(&mut self, local: Local, context: PlaceContext, _: Location) { + if matches!(context, PlaceContext::NonUse(_)) { + return; + } + self.locals.insert(local); + } +} diff --git a/compiler/rustc_mir_transform/src/simplify_branches.rs b/compiler/rustc_mir_transform/src/simplify_branches.rs index 886f4d6e509..ed94a058ec6 100644 --- a/compiler/rustc_mir_transform/src/simplify_branches.rs +++ b/compiler/rustc_mir_transform/src/simplify_branches.rs @@ -2,6 +2,8 @@ use rustc_middle::mir::*; use rustc_middle::ty::TyCtxt; use tracing::trace; +use crate::patch::MirPatch; + pub(super) enum SimplifyConstCondition { AfterConstProp, Final, @@ -19,8 +21,10 @@ impl<'tcx> crate::MirPass<'tcx> for SimplifyConstCondition { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { trace!("Running SimplifyConstCondition on {:?}", body.source); let typing_env = body.typing_env(tcx); - 'blocks: for block in body.basic_blocks_mut() { - for stmt in block.statements.iter_mut() { + let mut patch = MirPatch::new(body); + + 'blocks: for (bb, block) in body.basic_blocks.iter_enumerated() { + for (statement_index, stmt) in block.statements.iter().enumerate() { // Simplify `assume` of a known value: either a NOP or unreachable. if let StatementKind::Intrinsic(box ref intrinsic) = stmt.kind && let NonDivergingIntrinsic::Assume(discr) = intrinsic @@ -28,17 +32,16 @@ impl<'tcx> crate::MirPass<'tcx> for SimplifyConstCondition { && let Some(constant) = c.const_.try_eval_bool(tcx, typing_env) { if constant { - stmt.make_nop(); + patch.nop_statement(Location { block: bb, statement_index }); } else { - block.statements.clear(); - block.terminator_mut().kind = TerminatorKind::Unreachable; + patch.patch_terminator(bb, TerminatorKind::Unreachable); continue 'blocks; } } } - let terminator = block.terminator_mut(); - terminator.kind = match terminator.kind { + let terminator = block.terminator(); + let terminator = match terminator.kind { TerminatorKind::SwitchInt { discr: Operand::Constant(ref c), ref targets, .. } => { @@ -58,7 +61,9 @@ impl<'tcx> crate::MirPass<'tcx> for SimplifyConstCondition { }, _ => continue, }; + patch.patch_terminator(bb, terminator); } + patch.apply(body); } fn is_required(&self) -> bool { diff --git a/compiler/rustc_mir_transform/src/simplify_comparison_integral.rs b/compiler/rustc_mir_transform/src/simplify_comparison_integral.rs index c60eb566521..4597439e269 100644 --- a/compiler/rustc_mir_transform/src/simplify_comparison_integral.rs +++ b/compiler/rustc_mir_transform/src/simplify_comparison_integral.rs @@ -76,7 +76,7 @@ impl<'tcx> crate::MirPass<'tcx> for SimplifyComparisonIntegral { // delete comparison statement if it the value being switched on was moved, which means // it can not be user later on if opt.can_remove_bin_op_stmt { - bb.statements[opt.bin_op_stmt_idx].make_nop(); + bb.statements[opt.bin_op_stmt_idx].make_nop(true); } else { // if the integer being compared to a const integral is being moved into the // comparison, e.g `_2 = Eq(move _3, const 'x');` @@ -136,7 +136,7 @@ impl<'tcx> crate::MirPass<'tcx> for SimplifyComparisonIntegral { } for (idx, bb_idx) in storage_deads_to_remove { - body.basic_blocks_mut()[bb_idx].statements[idx].make_nop(); + body.basic_blocks_mut()[bb_idx].statements[idx].make_nop(true); } for (idx, stmt) in storage_deads_to_insert { diff --git a/compiler/rustc_mir_transform/src/sroa.rs b/compiler/rustc_mir_transform/src/sroa.rs index 38769885f36..99f10b8d91d 100644 --- a/compiler/rustc_mir_transform/src/sroa.rs +++ b/compiler/rustc_mir_transform/src/sroa.rs @@ -318,7 +318,7 @@ impl<'tcx, 'll> MutVisitor<'tcx> for ReplacementVisitor<'tcx, 'll> { for (_, _, fl) in final_locals { self.patch.add_statement(location, StatementKind::StorageLive(fl)); } - statement.make_nop(); + statement.make_nop(true); } return; } @@ -327,7 +327,7 @@ impl<'tcx, 'll> MutVisitor<'tcx> for ReplacementVisitor<'tcx, 'll> { for (_, _, fl) in final_locals { self.patch.add_statement(location, StatementKind::StorageDead(fl)); } - statement.make_nop(); + statement.make_nop(true); } return; } @@ -337,7 +337,7 @@ impl<'tcx, 'll> MutVisitor<'tcx> for ReplacementVisitor<'tcx, 'll> { self.patch .add_statement(location, StatementKind::Deinit(Box::new(fl.into()))); } - statement.make_nop(); + statement.make_nop(true); return; } } @@ -367,7 +367,7 @@ impl<'tcx, 'll> MutVisitor<'tcx> for ReplacementVisitor<'tcx, 'll> { ); } } - statement.make_nop(); + statement.make_nop(true); return; } } @@ -429,7 +429,7 @@ impl<'tcx, 'll> MutVisitor<'tcx> for ReplacementVisitor<'tcx, 'll> { StatementKind::Assign(Box::new((new_local.into(), rvalue))), ); } - statement.make_nop(); + statement.make_nop(true); return; } } diff --git a/compiler/rustc_mir_transform/src/strip_debuginfo.rs b/compiler/rustc_mir_transform/src/strip_debuginfo.rs index 9ede8aa79c4..7fec25ccb52 100644 --- a/compiler/rustc_mir_transform/src/strip_debuginfo.rs +++ b/compiler/rustc_mir_transform/src/strip_debuginfo.rs @@ -1,5 +1,6 @@ use rustc_middle::mir::*; use rustc_middle::ty::TyCtxt; +use rustc_mir_dataflow::debuginfo::debuginfo_locals; use rustc_session::config::MirStripDebugInfo; /// Conditionally remove some of the VarDebugInfo in MIR. @@ -30,6 +31,22 @@ impl<'tcx> crate::MirPass<'tcx> for StripDebugInfo { if place.local.as_usize() <= body.arg_count && place.local != RETURN_PLACE, ) }); + + let debuginfo_locals = debuginfo_locals(body); + for data in body.basic_blocks.as_mut_preserves_cfg() { + for stmt in data.statements.iter_mut() { + stmt.debuginfos.retain(|debuginfo| match debuginfo { + StmtDebugInfo::AssignRef(local, _) | StmtDebugInfo::InvalidAssign(local) => { + debuginfo_locals.contains(*local) + } + }); + } + data.after_last_stmt_debuginfos.retain(|debuginfo| match debuginfo { + StmtDebugInfo::AssignRef(local, _) | StmtDebugInfo::InvalidAssign(local) => { + debuginfo_locals.contains(*local) + } + }); + } } fn is_required(&self) -> bool { diff --git a/compiler/rustc_mir_transform/src/validate.rs b/compiler/rustc_mir_transform/src/validate.rs index 99e4782e470..cbabb982df8 100644 --- a/compiler/rustc_mir_transform/src/validate.rs +++ b/compiler/rustc_mir_transform/src/validate.rs @@ -814,22 +814,6 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { } } } - ProjectionElem::Subtype(ty) => { - if !util::sub_types( - self.tcx, - self.typing_env, - ty, - place_ref.ty(&self.body.local_decls, self.tcx).ty, - ) { - self.fail( - location, - format!( - "Failed subtyping {ty} and {}", - place_ref.ty(&self.body.local_decls, self.tcx).ty - ), - ) - } - } ProjectionElem::UnwrapUnsafeBinder(unwrapped_ty) => { let binder_ty = place_ref.ty(&self.body.local_decls, self.tcx); let ty::UnsafeBinder(binder_ty) = *binder_ty.ty.kind() else { @@ -1064,14 +1048,6 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { } } Rvalue::Ref(..) => {} - Rvalue::Len(p) => { - let pty = p.ty(&self.body.local_decls, self.tcx).ty; - check_kinds!( - pty, - "Cannot compute length of non-array type {:?}", - ty::Array(..) | ty::Slice(..) - ); - } Rvalue::BinaryOp(op, vals) => { use BinOp::*; let a = vals.0.ty(&self.body.local_decls, self.tcx); @@ -1339,6 +1315,14 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { ); } } + CastKind::Subtype => { + if !util::sub_types(self.tcx, self.typing_env, op_ty, *target_type) { + self.fail( + location, + format!("Failed subtyping {op_ty} and {target_type}"), + ) + } + } } } Rvalue::NullaryOp(NullOp::OffsetOf(indices), container) => { |
