diff options
| author | Laurențiu Nicola <lnicola@users.noreply.github.com> | 2024-11-01 12:26:18 +0000 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2024-11-01 12:26:18 +0000 |
| commit | cf6bc48353ba46fceb43f5efc37858c471d05bb7 (patch) | |
| tree | 0cff0055f2ed9e9d18df92e2d98f10d7b5359075 /compiler/rustc_mir_transform/src | |
| parent | a5b5b466b0517124e8c5d8610d708aa04a1c4fc7 (diff) | |
| parent | 9acf57c48e0d55e7b2d07dae2bb4fbfc3fc010dd (diff) | |
| download | rust-cf6bc48353ba46fceb43f5efc37858c471d05bb7.tar.gz rust-cf6bc48353ba46fceb43f5efc37858c471d05bb7.zip | |
Merge pull request #18457 from lnicola/sync-from-rust
minor: Sync from downstream
Diffstat (limited to 'compiler/rustc_mir_transform/src')
| -rw-r--r-- | compiler/rustc_mir_transform/src/coroutine.rs | 23 | ||||
| -rw-r--r-- | compiler/rustc_mir_transform/src/dataflow_const_prop.rs | 351 | ||||
| -rw-r--r-- | compiler/rustc_mir_transform/src/dead_store_elimination.rs | 3 | ||||
| -rw-r--r-- | compiler/rustc_mir_transform/src/dest_prop.rs | 5 | ||||
| -rw-r--r-- | compiler/rustc_mir_transform/src/elaborate_drops.rs | 8 | ||||
| -rw-r--r-- | compiler/rustc_mir_transform/src/gvn.rs | 23 | ||||
| -rw-r--r-- | compiler/rustc_mir_transform/src/inline.rs | 27 | ||||
| -rw-r--r-- | compiler/rustc_mir_transform/src/known_panics_lint.rs | 10 | ||||
| -rw-r--r-- | compiler/rustc_mir_transform/src/lint.rs | 6 | ||||
| -rw-r--r-- | compiler/rustc_mir_transform/src/ref_prop.rs | 3 | ||||
| -rw-r--r-- | compiler/rustc_mir_transform/src/remove_uninit_drops.rs | 4 | ||||
| -rw-r--r-- | compiler/rustc_mir_transform/src/unreachable_enum_branching.rs | 4 | ||||
| -rw-r--r-- | compiler/rustc_mir_transform/src/validate.rs | 25 |
13 files changed, 358 insertions, 134 deletions
diff --git a/compiler/rustc_mir_transform/src/coroutine.rs b/compiler/rustc_mir_transform/src/coroutine.rs index cd291058977..6d5665b4331 100644 --- a/compiler/rustc_mir_transform/src/coroutine.rs +++ b/compiler/rustc_mir_transform/src/coroutine.rs @@ -64,7 +64,7 @@ use rustc_index::{Idx, IndexVec}; use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; use rustc_middle::mir::*; use rustc_middle::ty::{ - self, CoroutineArgs, CoroutineArgsExt, GenericArgsRef, InstanceKind, Ty, TyCtxt, + self, CoroutineArgs, CoroutineArgsExt, GenericArgsRef, InstanceKind, Ty, TyCtxt, TypingMode, }; use rustc_middle::{bug, span_bug}; use rustc_mir_dataflow::Analysis; @@ -666,14 +666,13 @@ fn locals_live_across_suspend_points<'tcx>( // Calculate when MIR locals have live storage. This gives us an upper bound of their // lifetimes. let mut storage_live = MaybeStorageLive::new(std::borrow::Cow::Borrowed(always_live_locals)) - .into_engine(tcx, body) - .iterate_to_fixpoint() + .iterate_to_fixpoint(tcx, body, None) .into_results_cursor(body); // Calculate the MIR locals which have been previously // borrowed (even if they are still active). let borrowed_locals_results = - MaybeBorrowedLocals.into_engine(tcx, body).pass_name("coroutine").iterate_to_fixpoint(); + MaybeBorrowedLocals.iterate_to_fixpoint(tcx, body, Some("coroutine")); let mut borrowed_locals_cursor = borrowed_locals_results.clone().into_results_cursor(body); @@ -681,16 +680,12 @@ fn locals_live_across_suspend_points<'tcx>( // for. let mut requires_storage_cursor = MaybeRequiresStorage::new(borrowed_locals_results.into_results_cursor(body)) - .into_engine(tcx, body) - .iterate_to_fixpoint() + .iterate_to_fixpoint(tcx, body, None) .into_results_cursor(body); // Calculate the liveness of MIR locals ignoring borrows. - let mut liveness = MaybeLiveLocals - .into_engine(tcx, body) - .pass_name("coroutine") - .iterate_to_fixpoint() - .into_results_cursor(body); + let mut liveness = + MaybeLiveLocals.iterate_to_fixpoint(tcx, body, Some("coroutine")).into_results_cursor(body); let mut storage_liveness_map = IndexVec::from_elem(None, &body.basic_blocks); let mut live_locals_at_suspension_points = Vec::new(); @@ -1501,7 +1496,11 @@ fn check_field_tys_sized<'tcx>( return; } - let infcx = tcx.infer_ctxt().ignoring_regions().build(); + // FIXME(#132279): @lcnr believes that we may want to support coroutines + // whose `Sized`-ness relies on the hidden types of opaques defined by the + // parent function. In this case we'd have to be able to reveal only these + // opaques here. + let infcx = tcx.infer_ctxt().ignoring_regions().build(TypingMode::non_body_analysis()); let param_env = tcx.param_env(def_id); let ocx = ObligationCtxt::new_with_diagnostics(&infcx); diff --git a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs index 002216f50f2..dd85d06540d 100644 --- a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs +++ b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs @@ -2,6 +2,10 @@ //! //! Currently, this pass only propagates scalar values. +use std::assert_matches::assert_matches; +use std::fmt::Formatter; + +use rustc_abi::{BackendRepr, FIRST_VARIANT, FieldIdx, Size, VariantIdx}; use rustc_const_eval::const_eval::{DummyMachine, throw_machine_stop_str}; use rustc_const_eval::interpret::{ ImmTy, Immediate, InterpCx, OpTy, PlaceTy, Projectable, interp_ok, @@ -14,13 +18,13 @@ use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; use rustc_middle::mir::*; use rustc_middle::ty::layout::{HasParamEnv, LayoutOf}; use rustc_middle::ty::{self, Ty, TyCtxt}; -use rustc_mir_dataflow::lattice::FlatSet; +use rustc_mir_dataflow::fmt::DebugWithContext; +use rustc_mir_dataflow::lattice::{FlatSet, HasBottom}; use rustc_mir_dataflow::value_analysis::{ - Map, PlaceIndex, State, TrackElem, ValueAnalysis, ValueAnalysisWrapper, ValueOrPlace, + Map, PlaceIndex, State, TrackElem, ValueOrPlace, debug_with_context, }; use rustc_mir_dataflow::{Analysis, Results, ResultsVisitor}; use rustc_span::DUMMY_SP; -use rustc_target::abi::{Abi, FIRST_VARIANT, FieldIdx, Size, VariantIdx}; use tracing::{debug, debug_span, instrument}; // These constants are somewhat random guesses and have not been optimized. @@ -58,8 +62,8 @@ impl<'tcx> crate::MirPass<'tcx> for DataflowConstProp { // Perform the actual dataflow analysis. let analysis = ConstAnalysis::new(tcx, body, map); - let mut results = debug_span!("analyze") - .in_scope(|| analysis.wrap().into_engine(tcx, body).iterate_to_fixpoint()); + let mut results = + debug_span!("analyze").in_scope(|| analysis.iterate_to_fixpoint(tcx, body, None)); // Collect results and patch the body afterwards. let mut visitor = Collector::new(tcx, &body.local_decls); @@ -69,6 +73,10 @@ impl<'tcx> crate::MirPass<'tcx> for DataflowConstProp { } } +// Note: Currently, places that have their reference taken cannot be tracked. Although this would +// be possible, it has to rely on some aliasing model, which we are not ready to commit to yet. +// Because of that, we can assume that the only way to change the value behind a tracked place is +// by direct assignment. struct ConstAnalysis<'a, 'tcx> { map: Map<'tcx>, tcx: TyCtxt<'tcx>, @@ -77,20 +85,198 @@ struct ConstAnalysis<'a, 'tcx> { param_env: ty::ParamEnv<'tcx>, } -impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { - type Value = FlatSet<Scalar>; +impl<'tcx> Analysis<'tcx> for ConstAnalysis<'_, 'tcx> { + type Domain = State<FlatSet<Scalar>>; const NAME: &'static str = "ConstAnalysis"; - fn map(&self) -> &Map<'tcx> { - &self.map + // The bottom state denotes uninitialized memory. Because we are only doing a sound + // approximation of the actual execution, we can also use this state for places where access + // would be UB. + fn bottom_value(&self, _body: &Body<'tcx>) -> Self::Domain { + State::Unreachable + } + + fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) { + // The initial state maps all tracked places of argument projections to ⊤ and the rest to ⊥. + assert_matches!(state, State::Unreachable); + *state = State::new_reachable(); + for arg in body.args_iter() { + state.flood(PlaceRef { local: arg, projection: &[] }, &self.map); + } + } + + fn apply_statement_effect( + &mut self, + state: &mut Self::Domain, + statement: &Statement<'tcx>, + _location: Location, + ) { + if state.is_reachable() { + self.handle_statement(statement, state); + } + } + + fn apply_terminator_effect<'mir>( + &mut self, + state: &mut Self::Domain, + terminator: &'mir Terminator<'tcx>, + _location: Location, + ) -> TerminatorEdges<'mir, 'tcx> { + if state.is_reachable() { + self.handle_terminator(terminator, state) + } else { + TerminatorEdges::None + } + } + + fn apply_call_return_effect( + &mut self, + state: &mut Self::Domain, + _block: BasicBlock, + return_places: CallReturnPlaces<'_, 'tcx>, + ) { + if state.is_reachable() { + self.handle_call_return(return_places, state) + } + } +} + +impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> { + fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, map: Map<'tcx>) -> Self { + let param_env = tcx.param_env_reveal_all_normalized(body.source.def_id()); + Self { + map, + tcx, + local_decls: &body.local_decls, + ecx: InterpCx::new(tcx, DUMMY_SP, param_env, DummyMachine), + param_env, + } + } + + fn handle_statement(&self, statement: &Statement<'tcx>, state: &mut State<FlatSet<Scalar>>) { + match &statement.kind { + StatementKind::Assign(box (place, rvalue)) => { + self.handle_assign(*place, rvalue, state); + } + StatementKind::SetDiscriminant { box place, variant_index } => { + self.handle_set_discriminant(*place, *variant_index, state); + } + StatementKind::Intrinsic(box intrinsic) => { + self.handle_intrinsic(intrinsic); + } + StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => { + // StorageLive leaves the local in an uninitialized state. + // StorageDead makes it UB to access the local afterwards. + state.flood_with( + Place::from(*local).as_ref(), + &self.map, + FlatSet::<Scalar>::BOTTOM, + ); + } + StatementKind::Deinit(box place) => { + // Deinit makes the place uninitialized. + state.flood_with(place.as_ref(), &self.map, FlatSet::<Scalar>::BOTTOM); + } + StatementKind::Retag(..) => { + // We don't track references. + } + StatementKind::ConstEvalCounter + | StatementKind::Nop + | StatementKind::FakeRead(..) + | StatementKind::PlaceMention(..) + | StatementKind::Coverage(..) + | StatementKind::AscribeUserType(..) => (), + } + } + + fn handle_intrinsic(&self, intrinsic: &NonDivergingIntrinsic<'tcx>) { + match intrinsic { + NonDivergingIntrinsic::Assume(..) => { + // Could use this, but ignoring it is sound. + } + NonDivergingIntrinsic::CopyNonOverlapping(CopyNonOverlapping { + dst: _, + src: _, + count: _, + }) => { + // This statement represents `*dst = *src`, `count` times. + } + } + } + + fn handle_operand( + &self, + operand: &Operand<'tcx>, + state: &mut State<FlatSet<Scalar>>, + ) -> ValueOrPlace<FlatSet<Scalar>> { + match operand { + Operand::Constant(box constant) => { + ValueOrPlace::Value(self.handle_constant(constant, state)) + } + Operand::Copy(place) | Operand::Move(place) => { + // On move, we would ideally flood the place with bottom. But with the current + // framework this is not possible (similar to `InterpCx::eval_operand`). + self.map.find(place.as_ref()).map(ValueOrPlace::Place).unwrap_or(ValueOrPlace::TOP) + } + } + } + + /// The effect of a successful function call return should not be + /// applied here, see [`Analysis::apply_terminator_effect`]. + fn handle_terminator<'mir>( + &self, + terminator: &'mir Terminator<'tcx>, + state: &mut State<FlatSet<Scalar>>, + ) -> TerminatorEdges<'mir, 'tcx> { + match &terminator.kind { + TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => { + // Effect is applied by `handle_call_return`. + } + TerminatorKind::Drop { place, .. } => { + state.flood_with(place.as_ref(), &self.map, FlatSet::<Scalar>::BOTTOM); + } + TerminatorKind::Yield { .. } => { + // They would have an effect, but are not allowed in this phase. + bug!("encountered disallowed terminator"); + } + TerminatorKind::SwitchInt { discr, targets } => { + return self.handle_switch_int(discr, targets, state); + } + TerminatorKind::TailCall { .. } => { + // FIXME(explicit_tail_calls): determine if we need to do something here (probably + // not) + } + TerminatorKind::Goto { .. } + | TerminatorKind::UnwindResume + | TerminatorKind::UnwindTerminate(_) + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::Assert { .. } + | TerminatorKind::CoroutineDrop + | TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } => { + // These terminators have no effect on the analysis. + } + } + terminator.edges() + } + + fn handle_call_return( + &self, + return_places: CallReturnPlaces<'_, 'tcx>, + state: &mut State<FlatSet<Scalar>>, + ) { + return_places.for_each(|place| { + state.flood(place.as_ref(), &self.map); + }) } fn handle_set_discriminant( &self, place: Place<'tcx>, variant_index: VariantIdx, - state: &mut State<Self::Value>, + state: &mut State<FlatSet<Scalar>>, ) { state.flood_discr(place.as_ref(), &self.map); if self.map.find_discr(place.as_ref()).is_some() { @@ -109,17 +295,17 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { &self, target: Place<'tcx>, rvalue: &Rvalue<'tcx>, - state: &mut State<Self::Value>, + state: &mut State<FlatSet<Scalar>>, ) { match rvalue { Rvalue::Use(operand) => { - state.flood(target.as_ref(), self.map()); + state.flood(target.as_ref(), &self.map); if let Some(target) = self.map.find(target.as_ref()) { self.assign_operand(state, target, operand); } } Rvalue::CopyForDeref(rhs) => { - state.flood(target.as_ref(), self.map()); + state.flood(target.as_ref(), &self.map); if let Some(target) = self.map.find(target.as_ref()) { self.assign_operand(state, target, &Operand::Copy(*rhs)); } @@ -127,9 +313,9 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { Rvalue::Aggregate(kind, operands) => { // If we assign `target = Enum::Variant#0(operand)`, // we must make sure that all `target as Variant#i` are `Top`. - state.flood(target.as_ref(), self.map()); + state.flood(target.as_ref(), &self.map); - let Some(target_idx) = self.map().find(target.as_ref()) else { return }; + let Some(target_idx) = self.map.find(target.as_ref()) else { return }; let (variant_target, variant_index) = match **kind { AggregateKind::Tuple | AggregateKind::Closure(..) => (Some(target_idx), None), @@ -148,14 +334,14 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { if let Some(variant_target_idx) = variant_target { for (field_index, operand) in operands.iter_enumerated() { if let Some(field) = - self.map().apply(variant_target_idx, TrackElem::Field(field_index)) + self.map.apply(variant_target_idx, TrackElem::Field(field_index)) { self.assign_operand(state, field, operand); } } } if let Some(variant_index) = variant_index - && let Some(discr_idx) = self.map().apply(target_idx, TrackElem::Discriminant) + && let Some(discr_idx) = self.map.apply(target_idx, TrackElem::Discriminant) { // We are assigning the discriminant as part of an aggregate. // This discriminant can only alias a variant field's value if the operand @@ -170,23 +356,23 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { } Rvalue::BinaryOp(op, box (left, right)) if op.is_overflowing() => { // Flood everything now, so we can use `insert_value_idx` directly later. - state.flood(target.as_ref(), self.map()); + state.flood(target.as_ref(), &self.map); - let Some(target) = self.map().find(target.as_ref()) else { return }; + let Some(target) = self.map.find(target.as_ref()) else { return }; - let value_target = self.map().apply(target, TrackElem::Field(0_u32.into())); - let overflow_target = self.map().apply(target, TrackElem::Field(1_u32.into())); + let value_target = self.map.apply(target, TrackElem::Field(0_u32.into())); + let overflow_target = self.map.apply(target, TrackElem::Field(1_u32.into())); if value_target.is_some() || overflow_target.is_some() { let (val, overflow) = self.binary_op(state, *op, left, right); if let Some(value_target) = value_target { // We have flooded `target` earlier. - state.insert_value_idx(value_target, val, self.map()); + state.insert_value_idx(value_target, val, &self.map); } if let Some(overflow_target) = overflow_target { // We have flooded `target` earlier. - state.insert_value_idx(overflow_target, overflow, self.map()); + state.insert_value_idx(overflow_target, overflow, &self.map); } } } @@ -196,27 +382,30 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { _, ) => { let pointer = self.handle_operand(operand, state); - state.assign(target.as_ref(), pointer, self.map()); + state.assign(target.as_ref(), pointer, &self.map); - if let Some(target_len) = self.map().find_len(target.as_ref()) + if let Some(target_len) = self.map.find_len(target.as_ref()) && let operand_ty = operand.ty(self.local_decls, self.tcx) && let Some(operand_ty) = operand_ty.builtin_deref(true) && let ty::Array(_, len) = operand_ty.kind() && let Some(len) = Const::Ty(self.tcx.types.usize, *len) .try_eval_scalar_int(self.tcx, self.param_env) { - state.insert_value_idx(target_len, FlatSet::Elem(len.into()), self.map()); + state.insert_value_idx(target_len, FlatSet::Elem(len.into()), &self.map); } } - _ => self.super_assign(target, rvalue, state), + _ => { + let result = self.handle_rvalue(rvalue, state); + state.assign(target.as_ref(), result, &self.map); + } } } fn handle_rvalue( &self, rvalue: &Rvalue<'tcx>, - state: &mut State<Self::Value>, - ) -> ValueOrPlace<Self::Value> { + state: &mut State<FlatSet<Scalar>>, + ) -> ValueOrPlace<FlatSet<Scalar>> { let val = match rvalue { Rvalue::Len(place) => { let place_ty = place.ty(self.local_decls, self.tcx); @@ -225,7 +414,7 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { .try_eval_scalar(self.tcx, self.param_env) .map_or(FlatSet::Top, FlatSet::Elem) } else if let [ProjectionElem::Deref] = place.projection[..] { - state.get_len(place.local.into(), self.map()) + state.get_len(place.local.into(), &self.map) } else { FlatSet::Top } @@ -296,8 +485,24 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { }; FlatSet::Elem(Scalar::from_target_usize(val, &self.tcx)) } - Rvalue::Discriminant(place) => state.get_discr(place.as_ref(), self.map()), - _ => return self.super_rvalue(rvalue, state), + Rvalue::Discriminant(place) => state.get_discr(place.as_ref(), &self.map), + Rvalue::Use(operand) => return self.handle_operand(operand, state), + Rvalue::CopyForDeref(place) => { + return self.handle_operand(&Operand::Copy(*place), state); + } + Rvalue::Ref(..) | Rvalue::RawPtr(..) => { + // We don't track such places. + return ValueOrPlace::TOP; + } + Rvalue::Repeat(..) + | Rvalue::ThreadLocalRef(..) + | Rvalue::Cast(..) + | Rvalue::BinaryOp(..) + | Rvalue::Aggregate(..) + | Rvalue::ShallowInitBox(..) => { + // No modification is possible through these r-values. + return ValueOrPlace::TOP; + } }; ValueOrPlace::Value(val) } @@ -305,8 +510,8 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { fn handle_constant( &self, constant: &ConstOperand<'tcx>, - _state: &mut State<Self::Value>, - ) -> Self::Value { + _state: &mut State<FlatSet<Scalar>>, + ) -> FlatSet<Scalar> { constant .const_ .try_eval_scalar(self.tcx, self.param_env) @@ -317,11 +522,11 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { &self, discr: &'mir Operand<'tcx>, targets: &'mir SwitchTargets, - state: &mut State<Self::Value>, + state: &mut State<FlatSet<Scalar>>, ) -> TerminatorEdges<'mir, 'tcx> { let value = match self.handle_operand(discr, state) { ValueOrPlace::Value(value) => value, - ValueOrPlace::Place(place) => state.get_idx(place, self.map()), + ValueOrPlace::Place(place) => state.get_idx(place, &self.map), }; match value { // We are branching on uninitialized data, this is UB, treat it as unreachable. @@ -334,19 +539,6 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { FlatSet::Top => TerminatorEdges::SwitchInt { discr, targets }, } } -} - -impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> { - fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, map: Map<'tcx>) -> Self { - let param_env = tcx.param_env_reveal_all_normalized(body.source.def_id()); - Self { - map, - tcx, - local_decls: &body.local_decls, - ecx: InterpCx::new(tcx, DUMMY_SP, param_env, DummyMachine), - param_env, - } - } /// The caller must have flooded `place`. fn assign_operand( @@ -457,7 +649,7 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> { // a pair and sometimes not. But as a hack we always return a pair // and just make the 2nd component `Bottom` when it does not exist. Some(val) => { - if matches!(val.layout.abi, Abi::ScalarPair(..)) { + if matches!(val.layout.backend_repr, BackendRepr::ScalarPair(..)) { let (val, overflow) = val.to_scalar_pair(); (FlatSet::Elem(val), FlatSet::Elem(overflow)) } else { @@ -470,7 +662,7 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> { // Exactly one side is known, attempt some algebraic simplifications. (FlatSet::Elem(const_arg), _) | (_, FlatSet::Elem(const_arg)) => { let layout = const_arg.layout; - if !matches!(layout.abi, rustc_target::abi::Abi::Scalar(..)) { + if !matches!(layout.backend_repr, rustc_target::abi::BackendRepr::Scalar(..)) { return (FlatSet::Top, FlatSet::Top); } @@ -537,16 +729,40 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> { } } -pub(crate) struct Patch<'tcx> { +/// This is used to visualize the dataflow analysis. +impl<'tcx> DebugWithContext<ConstAnalysis<'_, 'tcx>> for State<FlatSet<Scalar>> { + fn fmt_with(&self, ctxt: &ConstAnalysis<'_, 'tcx>, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + State::Reachable(values) => debug_with_context(values, None, &ctxt.map, f), + State::Unreachable => write!(f, "unreachable"), + } + } + + fn fmt_diff_with( + &self, + old: &Self, + ctxt: &ConstAnalysis<'_, 'tcx>, + f: &mut Formatter<'_>, + ) -> std::fmt::Result { + match (self, old) { + (State::Reachable(this), State::Reachable(old)) => { + debug_with_context(this, Some(old), &ctxt.map, f) + } + _ => Ok(()), // Consider printing something here. + } + } +} + +struct Patch<'tcx> { tcx: TyCtxt<'tcx>, /// For a given MIR location, this stores the values of the operands used by that location. In /// particular, this is before the effect, such that the operands of `_1 = _1 + _2` are /// properly captured. (This may become UB soon, but it is currently emitted even by safe code.) - pub(crate) before_effect: FxHashMap<(Location, Place<'tcx>), Const<'tcx>>, + before_effect: FxHashMap<(Location, Place<'tcx>), Const<'tcx>>, /// Stores the assigned values for assignments where the Rvalue is constant. - pub(crate) assignments: FxHashMap<Location, Const<'tcx>>, + assignments: FxHashMap<Location, Const<'tcx>>, } impl<'tcx> Patch<'tcx> { @@ -589,13 +805,13 @@ impl<'a, 'tcx> Collector<'a, 'tcx> { } let place = map.find(place.as_ref())?; - if layout.abi.is_scalar() + if layout.backend_repr.is_scalar() && let Some(value) = propagatable_scalar(place, state, map) { return Some(Const::Val(ConstValue::Scalar(value), ty)); } - if matches!(layout.abi, Abi::Scalar(..) | Abi::ScalarPair(..)) { + if matches!(layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) { let alloc_id = ecx .intern_with_temp_alloc(layout, |ecx, dest| { try_write_constant(ecx, dest, place, ty, state, map) @@ -641,7 +857,7 @@ fn try_write_constant<'tcx>( } // Fast path for scalars. - if layout.abi.is_scalar() + if layout.backend_repr.is_scalar() && let Some(value) = propagatable_scalar(place, state, map) { return ecx.write_immediate(Immediate::Scalar(value), dest); @@ -725,8 +941,7 @@ fn try_write_constant<'tcx>( interp_ok(()) } -impl<'mir, 'tcx> - ResultsVisitor<'mir, 'tcx, Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>> +impl<'mir, 'tcx> ResultsVisitor<'mir, 'tcx, Results<'tcx, ConstAnalysis<'_, 'tcx>>> for Collector<'_, 'tcx> { type Domain = State<FlatSet<Scalar>>; @@ -734,7 +949,7 @@ impl<'mir, 'tcx> #[instrument(level = "trace", skip(self, results, statement))] fn visit_statement_before_primary_effect( &mut self, - results: &mut Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>, + results: &mut Results<'tcx, ConstAnalysis<'_, 'tcx>>, state: &Self::Domain, statement: &'mir Statement<'tcx>, location: Location, @@ -744,8 +959,8 @@ impl<'mir, 'tcx> OperandCollector { state, visitor: self, - ecx: &mut results.analysis.0.ecx, - map: &results.analysis.0.map, + ecx: &mut results.analysis.ecx, + map: &results.analysis.map, } .visit_rvalue(rvalue, location); } @@ -756,7 +971,7 @@ impl<'mir, 'tcx> #[instrument(level = "trace", skip(self, results, statement))] fn visit_statement_after_primary_effect( &mut self, - results: &mut Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>, + results: &mut Results<'tcx, ConstAnalysis<'_, 'tcx>>, state: &Self::Domain, statement: &'mir Statement<'tcx>, location: Location, @@ -767,10 +982,10 @@ impl<'mir, 'tcx> } StatementKind::Assign(box (place, _)) => { if let Some(value) = self.try_make_constant( - &mut results.analysis.0.ecx, + &mut results.analysis.ecx, place, state, - &results.analysis.0.map, + &results.analysis.map, ) { self.patch.assignments.insert(location, value); } @@ -781,7 +996,7 @@ impl<'mir, 'tcx> fn visit_terminator_before_primary_effect( &mut self, - results: &mut Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>, + results: &mut Results<'tcx, ConstAnalysis<'_, 'tcx>>, state: &Self::Domain, terminator: &'mir Terminator<'tcx>, location: Location, @@ -789,8 +1004,8 @@ impl<'mir, 'tcx> OperandCollector { state, visitor: self, - ecx: &mut results.analysis.0.ecx, - map: &results.analysis.0.map, + ecx: &mut results.analysis.ecx, + map: &results.analysis.map, } .visit_terminator(terminator, location); } diff --git a/compiler/rustc_mir_transform/src/dead_store_elimination.rs b/compiler/rustc_mir_transform/src/dead_store_elimination.rs index edffe6ce78f..2898f82e25c 100644 --- a/compiler/rustc_mir_transform/src/dead_store_elimination.rs +++ b/compiler/rustc_mir_transform/src/dead_store_elimination.rs @@ -37,8 +37,7 @@ fn eliminate<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { always_live.union(&borrowed_locals); let mut live = MaybeTransitiveLiveLocals::new(&always_live) - .into_engine(tcx, body) - .iterate_to_fixpoint() + .iterate_to_fixpoint(tcx, body, None) .into_results_cursor(body); // For blocks with a call terminator, if an argument copy can be turned into a move, diff --git a/compiler/rustc_mir_transform/src/dest_prop.rs b/compiler/rustc_mir_transform/src/dest_prop.rs index ad83c0295ba..beeab0d4a66 100644 --- a/compiler/rustc_mir_transform/src/dest_prop.rs +++ b/compiler/rustc_mir_transform/src/dest_prop.rs @@ -169,10 +169,7 @@ impl<'tcx> crate::MirPass<'tcx> for DestinationPropagation { let borrowed = rustc_mir_dataflow::impls::borrowed_locals(body); - let live = MaybeLiveLocals - .into_engine(tcx, body) - .pass_name("MaybeLiveLocals-DestinationPropagation") - .iterate_to_fixpoint(); + let live = MaybeLiveLocals.iterate_to_fixpoint(tcx, body, Some("MaybeLiveLocals-DestProp")); let points = DenseLocationMap::new(body); let mut live = save_as_intervals(&points, body, live); diff --git a/compiler/rustc_mir_transform/src/elaborate_drops.rs b/compiler/rustc_mir_transform/src/elaborate_drops.rs index 30e1ac05e03..58e1db19438 100644 --- a/compiler/rustc_mir_transform/src/elaborate_drops.rs +++ b/compiler/rustc_mir_transform/src/elaborate_drops.rs @@ -64,18 +64,14 @@ impl<'tcx> crate::MirPass<'tcx> for ElaborateDrops { let mut inits = MaybeInitializedPlaces::new(tcx, body, &env.move_data) .skipping_unreachable_unwind() - .into_engine(tcx, body) - .pass_name("elaborate_drops") - .iterate_to_fixpoint() + .iterate_to_fixpoint(tcx, body, Some("elaborate_drops")) .into_results_cursor(body); let dead_unwinds = compute_dead_unwinds(body, &mut inits); let uninits = MaybeUninitializedPlaces::new(tcx, body, &env.move_data) .mark_inactive_variants_as_uninit() .skipping_unreachable_unwind(dead_unwinds) - .into_engine(tcx, body) - .pass_name("elaborate_drops") - .iterate_to_fixpoint() + .iterate_to_fixpoint(tcx, body, Some("elaborate_drops")) .into_results_cursor(body); let drop_flags = IndexVec::from_elem(None, &env.move_data.move_paths); diff --git a/compiler/rustc_mir_transform/src/gvn.rs b/compiler/rustc_mir_transform/src/gvn.rs index 79c62372df0..274eea9563f 100644 --- a/compiler/rustc_mir_transform/src/gvn.rs +++ b/compiler/rustc_mir_transform/src/gvn.rs @@ -85,6 +85,7 @@ use std::borrow::Cow; use either::Either; +use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx}; use rustc_const_eval::const_eval::DummyMachine; use rustc_const_eval::interpret::{ ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar, @@ -103,7 +104,6 @@ use rustc_middle::ty::layout::{HasParamEnv, LayoutOf}; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_span::DUMMY_SP; use rustc_span::def_id::DefId; -use rustc_target::abi::{self, Abi, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx}; use smallvec::SmallVec; use tracing::{debug, instrument, trace}; @@ -427,7 +427,10 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { }; let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx); ImmTy::from_immediate(ptr_imm, ty).into() - } else if matches!(ty.abi, Abi::Scalar(..) | Abi::ScalarPair(..)) { + } else if matches!( + ty.backend_repr, + BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..) + ) { let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?; let variant_dest = if let Some(variant) = variant { self.ecx.project_downcast(&dest, variant).discard_err()? @@ -573,12 +576,12 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { // limited transmutes: it only works between types with the same layout, and // cannot transmute pointers to integers. if value.as_mplace_or_imm().is_right() { - let can_transmute = match (value.layout.abi, to.abi) { - (Abi::Scalar(s1), Abi::Scalar(s2)) => { + let can_transmute = match (value.layout.backend_repr, to.backend_repr) { + (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => { s1.size(&self.ecx) == s2.size(&self.ecx) && !matches!(s1.primitive(), Primitive::Pointer(..)) } - (Abi::ScalarPair(a1, b1), Abi::ScalarPair(a2, b2)) => { + (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => { a1.size(&self.ecx) == a2.size(&self.ecx) && b1.size(&self.ecx) == b2.size(&self.ecx) && // The alignment of the second component determines its offset, so that also needs to match. @@ -1079,7 +1082,9 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { } } - if let AggregateTy::Def(_, _) = ty + // unsound: https://github.com/rust-lang/rust/issues/132353 + if tcx.sess.opts.unstable_opts.unsound_mir_opts + && let AggregateTy::Def(_, _) = ty && let Some(value) = self.simplify_aggregate_to_copy(rvalue, location, &fields, variant_index) { @@ -1241,7 +1246,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { let as_bits = |value| { let constant = self.evaluated[value].as_ref()?; - if layout.abi.is_scalar() { + if layout.backend_repr.is_scalar() { let scalar = self.ecx.read_scalar(constant).discard_err()?; scalar.to_bits(constant.layout.size).discard_err() } else { @@ -1497,12 +1502,12 @@ fn op_to_prop_const<'tcx>( // Do not synthetize too large constants. Codegen will just memcpy them, which we'd like to // avoid. - if !matches!(op.layout.abi, Abi::Scalar(..) | Abi::ScalarPair(..)) { + if !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) { return None; } // If this constant has scalar ABI, return it as a `ConstValue::Scalar`. - if let Abi::Scalar(abi::Scalar::Initialized { .. }) = op.layout.abi + if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr && let Some(scalar) = ecx.read_scalar(op).discard_err() { if !scalar.try_to_scalar_int().is_ok() { diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs index 42d6bdf6cee..404470db5c5 100644 --- a/compiler/rustc_mir_transform/src/inline.rs +++ b/compiler/rustc_mir_transform/src/inline.rs @@ -244,8 +244,13 @@ impl<'tcx> Inliner<'tcx> { // Normally, this shouldn't be required, but trait normalization failure can create a // validation ICE. let output_type = callee_body.return_ty(); - if !util::relate_types(self.tcx, self.param_env, ty::Covariant, output_type, destination_ty) - { + if !util::sub_types( + self.tcx, + caller_body.typing_mode(self.tcx), + self.param_env, + output_type, + destination_ty, + ) { trace!(?output_type, ?destination_ty); return Err("failed to normalize return type"); } @@ -275,8 +280,13 @@ impl<'tcx> Inliner<'tcx> { self_arg_ty.into_iter().chain(arg_tuple_tys).zip(callee_body.args_iter()) { let input_type = callee_body.local_decls[input].ty; - if !util::relate_types(self.tcx, self.param_env, ty::Covariant, input_type, arg_ty) - { + if !util::sub_types( + self.tcx, + caller_body.typing_mode(self.tcx), + self.param_env, + input_type, + arg_ty, + ) { trace!(?arg_ty, ?input_type); return Err("failed to normalize tuple argument type"); } @@ -285,8 +295,13 @@ impl<'tcx> Inliner<'tcx> { for (arg, input) in args.iter().zip(callee_body.args_iter()) { let input_type = callee_body.local_decls[input].ty; let arg_ty = arg.node.ty(&caller_body.local_decls, self.tcx); - if !util::relate_types(self.tcx, self.param_env, ty::Covariant, input_type, arg_ty) - { + if !util::sub_types( + self.tcx, + caller_body.typing_mode(self.tcx), + self.param_env, + input_type, + arg_ty, + ) { trace!(?arg_ty, ?input_type); return Err("failed to normalize argument type"); } diff --git a/compiler/rustc_mir_transform/src/known_panics_lint.rs b/compiler/rustc_mir_transform/src/known_panics_lint.rs index 08923748eb2..0604665642a 100644 --- a/compiler/rustc_mir_transform/src/known_panics_lint.rs +++ b/compiler/rustc_mir_transform/src/known_panics_lint.rs @@ -4,6 +4,7 @@ use std::fmt::Debug; +use rustc_abi::{BackendRepr, FieldIdx, HasDataLayout, Size, TargetDataLayout, VariantIdx}; use rustc_const_eval::const_eval::DummyMachine; use rustc_const_eval::interpret::{ ImmTy, InterpCx, InterpResult, Projectable, Scalar, format_interp_error, interp_ok, @@ -19,7 +20,6 @@ use rustc_middle::mir::*; use rustc_middle::ty::layout::{LayoutError, LayoutOf, LayoutOfHelpers, TyAndLayout}; use rustc_middle::ty::{self, ConstInt, ParamEnv, ScalarInt, Ty, TyCtxt, TypeVisitableExt}; use rustc_span::Span; -use rustc_target::abi::{Abi, FieldIdx, HasDataLayout, Size, TargetDataLayout, VariantIdx}; use tracing::{debug, instrument, trace}; use crate::errors::{AssertLint, AssertLintKind}; @@ -557,7 +557,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { let right = self.use_ecx(|this| this.ecx.read_immediate(&right))?; let val = self.use_ecx(|this| this.ecx.binary_op(bin_op, &left, &right))?; - if matches!(val.layout.abi, Abi::ScalarPair(..)) { + if matches!(val.layout.backend_repr, BackendRepr::ScalarPair(..)) { // FIXME `Value` should properly support pairs in `Immediate`... but currently // it does not. let (val, overflow) = val.to_pair(&self.ecx); @@ -651,9 +651,9 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { let to = self.ecx.layout_of(to).ok()?; // `offset` for immediates only supports scalar/scalar-pair ABIs, // so bail out if the target is not one. - match (value.layout.abi, to.abi) { - (Abi::Scalar(..), Abi::Scalar(..)) => {} - (Abi::ScalarPair(..), Abi::ScalarPair(..)) => {} + match (value.layout.backend_repr, to.backend_repr) { + (BackendRepr::Scalar(..), BackendRepr::Scalar(..)) => {} + (BackendRepr::ScalarPair(..), BackendRepr::ScalarPair(..)) => {} _ => return None, } diff --git a/compiler/rustc_mir_transform/src/lint.rs b/compiler/rustc_mir_transform/src/lint.rs index 23733994a8b..d8ff1cfc90b 100644 --- a/compiler/rustc_mir_transform/src/lint.rs +++ b/compiler/rustc_mir_transform/src/lint.rs @@ -17,13 +17,11 @@ pub(super) fn lint_body<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, when: String let always_live_locals = &always_storage_live_locals(body); let maybe_storage_live = MaybeStorageLive::new(Cow::Borrowed(always_live_locals)) - .into_engine(tcx, body) - .iterate_to_fixpoint() + .iterate_to_fixpoint(tcx, body, None) .into_results_cursor(body); let maybe_storage_dead = MaybeStorageDead::new(Cow::Borrowed(always_live_locals)) - .into_engine(tcx, body) - .iterate_to_fixpoint() + .iterate_to_fixpoint(tcx, body, None) .into_results_cursor(body); let mut lint = Lint { diff --git a/compiler/rustc_mir_transform/src/ref_prop.rs b/compiler/rustc_mir_transform/src/ref_prop.rs index 53e53d9d5ba..b11b503e8d4 100644 --- a/compiler/rustc_mir_transform/src/ref_prop.rs +++ b/compiler/rustc_mir_transform/src/ref_prop.rs @@ -126,8 +126,7 @@ fn compute_replacement<'tcx>( // Compute `MaybeStorageDead` dataflow to check that we only replace when the pointee is // definitely live. let mut maybe_dead = MaybeStorageDead::new(Cow::Owned(always_live_locals)) - .into_engine(tcx, body) - .iterate_to_fixpoint() + .iterate_to_fixpoint(tcx, body, None) .into_results_cursor(body); // Map for each local to the pointee. diff --git a/compiler/rustc_mir_transform/src/remove_uninit_drops.rs b/compiler/rustc_mir_transform/src/remove_uninit_drops.rs index 09969a4c7cc..55dd96100b0 100644 --- a/compiler/rustc_mir_transform/src/remove_uninit_drops.rs +++ b/compiler/rustc_mir_transform/src/remove_uninit_drops.rs @@ -22,9 +22,7 @@ impl<'tcx> crate::MirPass<'tcx> for RemoveUninitDrops { let move_data = MoveData::gather_moves(body, tcx, |ty| ty.needs_drop(tcx, param_env)); let mut maybe_inits = MaybeInitializedPlaces::new(tcx, body, &move_data) - .into_engine(tcx, body) - .pass_name("remove_uninit_drops") - .iterate_to_fixpoint() + .iterate_to_fixpoint(tcx, body, Some("remove_uninit_drops")) .into_results_cursor(body); let mut to_remove = vec![]; diff --git a/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs b/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs index 5612e779d6b..3011af4d9d7 100644 --- a/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs +++ b/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs @@ -1,5 +1,6 @@ //! A pass that eliminates branches on uninhabited or unreachable enum variants. +use rustc_abi::Variants; use rustc_data_structures::fx::FxHashSet; use rustc_middle::bug; use rustc_middle::mir::patch::MirPatch; @@ -9,7 +10,6 @@ use rustc_middle::mir::{ }; use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::ty::{Ty, TyCtxt}; -use rustc_target::abi::{Abi, Variants}; use tracing::trace; pub(super) struct UnreachableEnumBranching; @@ -65,7 +65,7 @@ fn variant_discriminants<'tcx>( Variants::Multiple { variants, .. } => variants .iter_enumerated() .filter_map(|(idx, layout)| { - (layout.abi != Abi::Uninhabited) + (!layout.is_uninhabited()) .then(|| ty.discriminant_for_variant(tcx, idx).unwrap().val) }) .collect(), diff --git a/compiler/rustc_mir_transform/src/validate.rs b/compiler/rustc_mir_transform/src/validate.rs index 25e68f44456..8109a9b8ba0 100644 --- a/compiler/rustc_mir_transform/src/validate.rs +++ b/compiler/rustc_mir_transform/src/validate.rs @@ -5,7 +5,7 @@ use rustc_hir::LangItem; use rustc_index::IndexVec; use rustc_index::bit_set::BitSet; use rustc_infer::infer::TyCtxtInferExt; -use rustc_infer::traits::{Obligation, ObligationCause, Reveal}; +use rustc_infer::traits::{Obligation, ObligationCause}; use rustc_middle::mir::coverage::CoverageKind; use rustc_middle::mir::visit::{NonUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; @@ -20,7 +20,7 @@ use rustc_target::spec::abi::Abi; use rustc_trait_selection::traits::ObligationCtxt; use rustc_type_ir::Upcast; -use crate::util::{is_within_packed, relate_types}; +use crate::util::{self, is_within_packed}; #[derive(Copy, Clone, Debug, PartialEq, Eq)] enum EdgeKind { @@ -50,11 +50,7 @@ impl<'tcx> crate::MirPass<'tcx> for Validator { } let def_id = body.source.def_id(); let mir_phase = self.mir_phase; - let param_env = match mir_phase.reveal() { - Reveal::UserFacing => tcx.param_env(def_id), - Reveal::All => tcx.param_env_reveal_all_normalized(def_id), - }; - + let param_env = mir_phase.param_env(tcx, def_id); let can_unwind = if mir_phase <= MirPhase::Runtime(RuntimePhase::Initial) { // In this case `AbortUnwindingCalls` haven't yet been executed. true @@ -587,7 +583,14 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { Variance::Covariant }; - crate::util::relate_types(self.tcx, self.param_env, variance, src, dest) + crate::util::relate_types( + self.tcx, + self.body.typing_mode(self.tcx), + self.param_env, + variance, + src, + dest, + ) } /// Check that the given predicate definitely holds in the param-env of this MIR body. @@ -606,7 +609,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { return true; } - let infcx = self.tcx.infer_ctxt().build(); + let infcx = self.tcx.infer_ctxt().build(self.body.typing_mode(self.tcx)); let ocx = ObligationCtxt::new(&infcx); ocx.register_obligation(Obligation::new( self.tcx, @@ -798,10 +801,10 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { } } ProjectionElem::Subtype(ty) => { - if !relate_types( + if !util::sub_types( self.tcx, + self.body.typing_mode(self.tcx), self.param_env, - Variance::Covariant, ty, place_ref.ty(&self.body.local_decls, self.tcx).ty, ) { |
