diff options
Diffstat (limited to 'compiler/rustc_mir_transform/src')
29 files changed, 544 insertions, 399 deletions
diff --git a/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs b/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs index 1abb64219f6..757dc093755 100644 --- a/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs +++ b/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs @@ -66,10 +66,7 @@ impl<'tcx> MirPass<'tcx> for AbortUnwindingCalls { if block.is_cleanup { continue; } - let terminator = match &block.terminator { - Some(terminator) => terminator, - None => continue, - }; + let Some(terminator) = &block.terminator else { continue }; let span = terminator.source_info.span; let call_can_unwind = match &terminator.kind { diff --git a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs index 9eaf2b6a211..8de0aad041c 100644 --- a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs +++ b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs @@ -84,9 +84,8 @@ fn add_move_for_packed_drop<'tcx>( is_cleanup: bool, ) { debug!("add_move_for_packed_drop({:?} @ {:?})", terminator, loc); - let (place, target, unwind) = match terminator.kind { - TerminatorKind::Drop { ref place, target, unwind } => (place, target, unwind), - _ => unreachable!(), + let TerminatorKind::Drop { ref place, target, unwind } = terminator.kind else { + unreachable!(); }; let source_info = terminator.source_info; diff --git a/compiler/rustc_mir_transform/src/check_const_item_mutation.rs b/compiler/rustc_mir_transform/src/check_const_item_mutation.rs index a19a3c8b1d5..097a6186cd5 100644 --- a/compiler/rustc_mir_transform/src/check_const_item_mutation.rs +++ b/compiler/rustc_mir_transform/src/check_const_item_mutation.rs @@ -64,7 +64,7 @@ impl<'tcx> ConstMutationChecker<'_, 'tcx> { place: &Place<'tcx>, const_item: DefId, location: Location, - decorate: impl for<'b> FnOnce(LintDiagnosticBuilder<'b>) -> DiagnosticBuilder<'b>, + decorate: impl for<'b> FnOnce(LintDiagnosticBuilder<'b, ()>) -> DiagnosticBuilder<'b, ()>, ) { // Don't lint on borrowing/assigning when a dereference is involved. // If we 'leave' the temporary via a dereference, we must @@ -88,7 +88,7 @@ impl<'tcx> ConstMutationChecker<'_, 'tcx> { |lint| { decorate(lint) .span_note(self.tcx.def_span(const_item), "`const` item defined here") - .emit() + .emit(); }, ); } diff --git a/compiler/rustc_mir_transform/src/check_packed_ref.rs b/compiler/rustc_mir_transform/src/check_packed_ref.rs index 23d59c80071..f0367958ef8 100644 --- a/compiler/rustc_mir_transform/src/check_packed_ref.rs +++ b/compiler/rustc_mir_transform/src/check_packed_ref.rs @@ -46,7 +46,7 @@ fn unsafe_derive_on_repr_packed(tcx: TyCtxt<'_>, def_id: LocalDefId) { does not derive Copy (error E0133)" .to_string() }; - lint.build(&message).emit() + lint.build(&message).emit(); }); } @@ -110,7 +110,7 @@ impl<'tcx> Visitor<'tcx> for PackedRefChecker<'_, 'tcx> { reference with a raw pointer and use `read_unaligned`/`write_unaligned` \ (loads and stores via `*p` must be properly aligned even when using raw pointers)" ) - .emit() + .emit(); }, ); } diff --git a/compiler/rustc_mir_transform/src/check_unsafety.rs b/compiler/rustc_mir_transform/src/check_unsafety.rs index fd93744d400..f8d0e448ce7 100644 --- a/compiler/rustc_mir_transform/src/check_unsafety.rs +++ b/compiler/rustc_mir_transform/src/check_unsafety.rs @@ -1,17 +1,17 @@ -use rustc_data_structures::fx::FxHashSet; +use rustc_data_structures::fx::FxHashMap; use rustc_errors::struct_span_err; use rustc_hir as hir; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::hir_id::HirId; use rustc_hir::intravisit; -use rustc_hir::Node; use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor}; -use rustc_middle::mir::*; use rustc_middle::ty::query::Providers; use rustc_middle::ty::{self, TyCtxt}; +use rustc_middle::{lint, mir::*}; use rustc_session::lint::builtin::{UNSAFE_OP_IN_UNSAFE_FN, UNUSED_UNSAFE}; use rustc_session::lint::Level; +use std::collections::hash_map; use std::ops::Bound; pub struct UnsafetyChecker<'a, 'tcx> { @@ -21,9 +21,12 @@ pub struct UnsafetyChecker<'a, 'tcx> { source_info: SourceInfo, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, - /// Mark an `unsafe` block as used, so we don't lint it. - used_unsafe: FxHashSet<hir::HirId>, - inherited_blocks: Vec<(hir::HirId, bool)>, + + /// Used `unsafe` blocks in this function. This is used for the "unused_unsafe" lint. + /// + /// The keys are the used `unsafe` blocks, the UnusedUnsafeKind indicates whether + /// or not any of the usages happen at a place that doesn't allow `unsafe_op_in_unsafe_fn`. + used_unsafe_blocks: FxHashMap<HirId, UsedUnsafeBlockData>, } impl<'a, 'tcx> UnsafetyChecker<'a, 'tcx> { @@ -40,8 +43,7 @@ impl<'a, 'tcx> UnsafetyChecker<'a, 'tcx> { source_info: SourceInfo::outermost(body.span), tcx, param_env, - used_unsafe: Default::default(), - inherited_blocks: vec![], + used_unsafe_blocks: Default::default(), } } } @@ -123,9 +125,12 @@ impl<'tcx> Visitor<'tcx> for UnsafetyChecker<'_, 'tcx> { } } &AggregateKind::Closure(def_id, _) | &AggregateKind::Generator(def_id, _, _) => { - let UnsafetyCheckResult { violations, unsafe_blocks } = + let UnsafetyCheckResult { violations, used_unsafe_blocks, .. } = self.tcx.unsafety_check_result(def_id.expect_local()); - self.register_violations(&violations, &unsafe_blocks); + self.register_violations( + violations, + used_unsafe_blocks.iter().map(|(&h, &d)| (h, d)), + ); } }, _ => {} @@ -151,7 +156,7 @@ impl<'tcx> Visitor<'tcx> for UnsafetyChecker<'_, 'tcx> { // temporary holding the static pointer to avoid duplicate errors // <https://github.com/rust-lang/rust/pull/78068#issuecomment-731753506>. if decl.internal && place.projection.first() == Some(&ProjectionElem::Deref) { - // If the projection root is an artifical local that we introduced when + // If the projection root is an artificial local that we introduced when // desugaring `static`, give a more specific error message // (avoid the general "raw pointer" clause below, that would only be confusing). if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info { @@ -251,61 +256,72 @@ impl<'tcx> UnsafetyChecker<'_, 'tcx> { .assert_crate_local() .lint_root; self.register_violations( - &[UnsafetyViolation { source_info, lint_root, kind, details }], - &[], + [&UnsafetyViolation { source_info, lint_root, kind, details }], + [], ); } - fn register_violations( + fn register_violations<'a>( &mut self, - violations: &[UnsafetyViolation], - unsafe_blocks: &[(hir::HirId, bool)], + violations: impl IntoIterator<Item = &'a UnsafetyViolation>, + new_used_unsafe_blocks: impl IntoIterator<Item = (HirId, UsedUnsafeBlockData)>, ) { + use UsedUnsafeBlockData::{AllAllowedInUnsafeFn, SomeDisallowedInUnsafeFn}; + + let update_entry = |this: &mut Self, hir_id, new_usage| { + match this.used_unsafe_blocks.entry(hir_id) { + hash_map::Entry::Occupied(mut entry) => { + if new_usage == SomeDisallowedInUnsafeFn { + *entry.get_mut() = SomeDisallowedInUnsafeFn; + } + } + hash_map::Entry::Vacant(entry) => { + entry.insert(new_usage); + } + }; + }; let safety = self.body.source_scopes[self.source_info.scope] .local_data .as_ref() .assert_crate_local() .safety; - let within_unsafe = match safety { + match safety { // `unsafe` blocks are required in safe code - Safety::Safe => { - for violation in violations { - match violation.kind { - UnsafetyViolationKind::General => {} - UnsafetyViolationKind::UnsafeFn => { - bug!("`UnsafetyViolationKind::UnsafeFn` in an `Safe` context") - } - } - if !self.violations.contains(violation) { - self.violations.push(*violation) + Safety::Safe => violations.into_iter().for_each(|&violation| { + match violation.kind { + UnsafetyViolationKind::General => {} + UnsafetyViolationKind::UnsafeFn => { + bug!("`UnsafetyViolationKind::UnsafeFn` in an `Safe` context") } } - false - } - // With the RFC 2585, no longer allow `unsafe` operations in `unsafe fn`s - Safety::FnUnsafe => { - for violation in violations { - let mut violation = *violation; - - violation.kind = UnsafetyViolationKind::UnsafeFn; - if !self.violations.contains(&violation) { - self.violations.push(violation) - } + if !self.violations.contains(&violation) { + self.violations.push(violation) } - false - } - Safety::BuiltinUnsafe => true, - Safety::ExplicitUnsafe(hir_id) => { - // mark unsafe block as used if there are any unsafe operations inside - if !violations.is_empty() { - self.used_unsafe.insert(hir_id); + }), + // With the RFC 2585, no longer allow `unsafe` operations in `unsafe fn`s + Safety::FnUnsafe => violations.into_iter().for_each(|&(mut violation)| { + violation.kind = UnsafetyViolationKind::UnsafeFn; + if !self.violations.contains(&violation) { + self.violations.push(violation) } - true - } + }), + Safety::BuiltinUnsafe => {} + Safety::ExplicitUnsafe(hir_id) => violations.into_iter().for_each(|violation| { + update_entry( + self, + hir_id, + match self.tcx.lint_level_at_node(UNSAFE_OP_IN_UNSAFE_FN, violation.lint_root).0 + { + Level::Allow => AllAllowedInUnsafeFn(violation.lint_root), + _ => SomeDisallowedInUnsafeFn, + }, + ) + }), }; - self.inherited_blocks.extend( - unsafe_blocks.iter().map(|&(hir_id, is_used)| (hir_id, is_used && !within_unsafe)), - ); + + new_used_unsafe_blocks + .into_iter() + .for_each(|(hir_id, usage_data)| update_entry(self, hir_id, usage_data)); } fn check_mut_borrowing_layout_constrained_field( &mut self, @@ -320,7 +336,7 @@ impl<'tcx> UnsafetyChecker<'_, 'tcx> { ProjectionElem::Field(..) => { let ty = place_base.ty(&self.body.local_decls, self.tcx).ty; if let ty::Adt(def, _) = ty.kind() { - if self.tcx.layout_scalar_valid_range(def.did) + if self.tcx.layout_scalar_valid_range(def.did()) != (Bound::Unbounded, Bound::Unbounded) { let details = if is_mut_use { @@ -387,17 +403,64 @@ pub(crate) fn provide(providers: &mut Providers) { }; } -struct UnusedUnsafeVisitor<'a> { - used_unsafe: &'a FxHashSet<hir::HirId>, - unsafe_blocks: &'a mut Vec<(hir::HirId, bool)>, +/// Context information for [`UnusedUnsafeVisitor`] traversal, +/// saves (innermost) relevant context +#[derive(Copy, Clone, Debug)] +enum Context { + Safe, + /// in an `unsafe fn` + UnsafeFn(HirId), + /// in a *used* `unsafe` block + /// (i.e. a block without unused-unsafe warning) + UnsafeBlock(HirId), +} + +struct UnusedUnsafeVisitor<'a, 'tcx> { + tcx: TyCtxt<'tcx>, + used_unsafe_blocks: &'a FxHashMap<HirId, UsedUnsafeBlockData>, + context: Context, + unused_unsafes: &'a mut Vec<(HirId, UnusedUnsafe)>, } -impl<'tcx> intravisit::Visitor<'tcx> for UnusedUnsafeVisitor<'_> { +impl<'tcx> intravisit::Visitor<'tcx> for UnusedUnsafeVisitor<'_, 'tcx> { fn visit_block(&mut self, block: &'tcx hir::Block<'tcx>) { - intravisit::walk_block(self, block); + use UsedUnsafeBlockData::{AllAllowedInUnsafeFn, SomeDisallowedInUnsafeFn}; if let hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::UserProvided) = block.rules { - self.unsafe_blocks.push((block.hir_id, self.used_unsafe.contains(&block.hir_id))); + let used = match self.tcx.lint_level_at_node(UNUSED_UNSAFE, block.hir_id) { + (Level::Allow, _) => Some(SomeDisallowedInUnsafeFn), + _ => self.used_unsafe_blocks.get(&block.hir_id).copied(), + }; + let unused_unsafe = match (self.context, used) { + (_, None) => UnusedUnsafe::Unused, + (Context::Safe, Some(_)) + | (Context::UnsafeFn(_), Some(SomeDisallowedInUnsafeFn)) => { + let previous_context = self.context; + self.context = Context::UnsafeBlock(block.hir_id); + intravisit::walk_block(self, block); + self.context = previous_context; + return; + } + (Context::UnsafeFn(hir_id), Some(AllAllowedInUnsafeFn(lint_root))) => { + UnusedUnsafe::InUnsafeFn(hir_id, lint_root) + } + (Context::UnsafeBlock(hir_id), Some(_)) => UnusedUnsafe::InUnsafeBlock(hir_id), + }; + self.unused_unsafes.push((block.hir_id, unused_unsafe)); + } + intravisit::walk_block(self, block); + } + + fn visit_fn( + &mut self, + fk: intravisit::FnKind<'tcx>, + _fd: &'tcx hir::FnDecl<'tcx>, + b: hir::BodyId, + _s: rustc_span::Span, + _id: HirId, + ) { + if matches!(fk, intravisit::FnKind::Closure) { + self.visit_body(self.tcx.hir().body(b)) } } } @@ -405,23 +468,38 @@ impl<'tcx> intravisit::Visitor<'tcx> for UnusedUnsafeVisitor<'_> { fn check_unused_unsafe( tcx: TyCtxt<'_>, def_id: LocalDefId, - used_unsafe: &FxHashSet<hir::HirId>, - unsafe_blocks: &mut Vec<(hir::HirId, bool)>, -) { - let body_id = tcx.hir().maybe_body_owned_by(tcx.hir().local_def_id_to_hir_id(def_id)); - - let body_id = match body_id { - Some(body) => body, - None => { - debug!("check_unused_unsafe({:?}) - no body found", def_id); - return; - } + used_unsafe_blocks: &FxHashMap<HirId, UsedUnsafeBlockData>, +) -> Vec<(HirId, UnusedUnsafe)> { + let hir_id = tcx.hir().local_def_id_to_hir_id(def_id); + let body_id = tcx.hir().maybe_body_owned_by(hir_id); + + let Some(body_id) = body_id else { + debug!("check_unused_unsafe({:?}) - no body found", def_id); + return vec![]; }; let body = tcx.hir().body(body_id); - debug!("check_unused_unsafe({:?}, body={:?}, used_unsafe={:?})", def_id, body, used_unsafe); - let mut visitor = UnusedUnsafeVisitor { used_unsafe, unsafe_blocks }; + let context = match tcx.hir().fn_sig_by_hir_id(hir_id) { + Some(sig) if sig.header.unsafety == hir::Unsafety::Unsafe => Context::UnsafeFn(hir_id), + _ => Context::Safe, + }; + + debug!( + "check_unused_unsafe({:?}, context={:?}, body={:?}, used_unsafe_blocks={:?})", + def_id, body, context, used_unsafe_blocks + ); + + let mut unused_unsafes = vec![]; + + let mut visitor = UnusedUnsafeVisitor { + tcx, + used_unsafe_blocks, + context, + unused_unsafes: &mut unused_unsafes, + }; intravisit::Visitor::visit_body(&mut visitor, body); + + unused_unsafes } fn unsafety_check_result<'tcx>( @@ -439,56 +517,52 @@ fn unsafety_check_result<'tcx>( let mut checker = UnsafetyChecker::new(body, def.did, tcx, param_env); checker.visit_body(&body); - check_unused_unsafe(tcx, def.did, &checker.used_unsafe, &mut checker.inherited_blocks); + let unused_unsafes = (!tcx.is_closure(def.did.to_def_id())) + .then(|| check_unused_unsafe(tcx, def.did, &checker.used_unsafe_blocks)); tcx.arena.alloc(UnsafetyCheckResult { - violations: checker.violations.into(), - unsafe_blocks: checker.inherited_blocks.into(), + violations: checker.violations, + used_unsafe_blocks: checker.used_unsafe_blocks, + unused_unsafes, }) } -/// Returns the `HirId` for an enclosing scope that is also `unsafe`. -fn is_enclosed( - tcx: TyCtxt<'_>, - used_unsafe: &FxHashSet<hir::HirId>, - id: hir::HirId, - unsafe_op_in_unsafe_fn_allowed: bool, -) -> Option<(&'static str, hir::HirId)> { - let parent_id = tcx.hir().get_parent_node(id); - if parent_id != id { - if used_unsafe.contains(&parent_id) { - Some(("block", parent_id)) - } else if let Some(Node::Item(&hir::Item { - kind: hir::ItemKind::Fn(ref sig, _, _), .. - })) = tcx.hir().find(parent_id) - { - if sig.header.unsafety == hir::Unsafety::Unsafe && unsafe_op_in_unsafe_fn_allowed { - Some(("fn", parent_id)) - } else { - None - } - } else { - is_enclosed(tcx, used_unsafe, parent_id, unsafe_op_in_unsafe_fn_allowed) - } - } else { - None - } -} - -fn report_unused_unsafe(tcx: TyCtxt<'_>, used_unsafe: &FxHashSet<hir::HirId>, id: hir::HirId) { +fn report_unused_unsafe(tcx: TyCtxt<'_>, kind: UnusedUnsafe, id: HirId) { let span = tcx.sess.source_map().guess_head_span(tcx.hir().span(id)); tcx.struct_span_lint_hir(UNUSED_UNSAFE, id, span, |lint| { let msg = "unnecessary `unsafe` block"; let mut db = lint.build(msg); db.span_label(span, msg); - if let Some((kind, id)) = - is_enclosed(tcx, used_unsafe, id, unsafe_op_in_unsafe_fn_allowed(tcx, id)) - { - db.span_label( - tcx.sess.source_map().guess_head_span(tcx.hir().span(id)), - format!("because it's nested under this `unsafe` {}", kind), - ); + match kind { + UnusedUnsafe::Unused => {} + UnusedUnsafe::InUnsafeBlock(id) => { + db.span_label( + tcx.sess.source_map().guess_head_span(tcx.hir().span(id)), + format!("because it's nested under this `unsafe` block"), + ); + } + UnusedUnsafe::InUnsafeFn(id, usage_lint_root) => { + db.span_label( + tcx.sess.source_map().guess_head_span(tcx.hir().span(id)), + format!("because it's nested under this `unsafe` fn"), + ) + .note( + "this `unsafe` block does contain unsafe operations, \ + but those are already allowed in an `unsafe fn`", + ); + let (level, source) = + tcx.lint_level_at_node(UNSAFE_OP_IN_UNSAFE_FN, usage_lint_root); + assert_eq!(level, Level::Allow); + lint::explain_lint_level_source( + tcx.sess, + UNSAFE_OP_IN_UNSAFE_FN, + Level::Allow, + source, + &mut db, + ); + } } + db.emit(); }); } @@ -501,7 +575,7 @@ pub fn check_unsafety(tcx: TyCtxt<'_>, def_id: LocalDefId) { return; } - let UnsafetyCheckResult { violations, unsafe_blocks } = tcx.unsafety_check_result(def_id); + let UnsafetyCheckResult { violations, unused_unsafes, .. } = tcx.unsafety_check_result(def_id); for &UnsafetyViolation { source_info, lint_root, kind, details } in violations.iter() { let (description, note) = details.description_and_note(); @@ -542,20 +616,8 @@ pub fn check_unsafety(tcx: TyCtxt<'_>, def_id: LocalDefId) { } } - let (mut unsafe_used, mut unsafe_unused): (FxHashSet<_>, Vec<_>) = Default::default(); - for &(block_id, is_used) in unsafe_blocks.iter() { - if is_used { - unsafe_used.insert(block_id); - } else { - unsafe_unused.push(block_id); - } - } - // The unused unsafe blocks might not be in source order; sort them so that the unused unsafe - // error messages are properly aligned and the issue-45107 and lint-unused-unsafe tests pass. - unsafe_unused.sort_by_cached_key(|hir_id| tcx.hir().span(*hir_id)); - - for &block_id in &unsafe_unused { - report_unused_unsafe(tcx, &unsafe_used, block_id); + for &(block_id, kind) in unused_unsafes.as_ref().unwrap() { + report_unused_unsafe(tcx, kind, block_id); } } diff --git a/compiler/rustc_mir_transform/src/const_debuginfo.rs b/compiler/rustc_mir_transform/src/const_debuginfo.rs index 839d94167fe..3577b3d2d80 100644 --- a/compiler/rustc_mir_transform/src/const_debuginfo.rs +++ b/compiler/rustc_mir_transform/src/const_debuginfo.rs @@ -55,14 +55,12 @@ fn find_optimization_oportunities<'tcx>(body: &Body<'tcx>) -> Vec<(Local, Consta let mut locals_to_debuginfo = BitSet::new_empty(body.local_decls.len()); for debuginfo in &body.var_debug_info { - if let VarDebugInfoContents::Place(p) = debuginfo.value { - if let Some(l) = p.as_local() { - locals_to_debuginfo.insert(l); - } + if let VarDebugInfoContents::Place(p) = debuginfo.value && let Some(l) = p.as_local() { + locals_to_debuginfo.insert(l); } } - let mut eligable_locals = Vec::new(); + let mut eligible_locals = Vec::new(); for (local, mutating_uses) in visitor.local_mutating_uses.drain_enumerated(..) { if mutating_uses != 1 || !locals_to_debuginfo.contains(local) { continue; @@ -80,13 +78,13 @@ fn find_optimization_oportunities<'tcx>(body: &Body<'tcx>) -> Vec<(Local, Consta &bb.statements[location.statement_index].kind { if let Some(local) = p.as_local() { - eligable_locals.push((local, *c)); + eligible_locals.push((local, *c)); } } } } - eligable_locals + eligible_locals } impl Visitor<'_> for LocalUseVisitor { diff --git a/compiler/rustc_mir_transform/src/const_prop.rs b/compiler/rustc_mir_transform/src/const_prop.rs index 5810ce6edc9..5ed33ab9fec 100644 --- a/compiler/rustc_mir_transform/src/const_prop.rs +++ b/compiler/rustc_mir_transform/src/const_prop.rs @@ -31,8 +31,8 @@ use rustc_trait_selection::traits; use crate::MirPass; use rustc_const_eval::const_eval::ConstEvalErr; use rustc_const_eval::interpret::{ - self, compile_time_machine, AllocId, Allocation, ConstValue, CtfeValidationMode, Frame, ImmTy, - Immediate, InterpCx, InterpResult, LocalState, LocalValue, MemPlace, MemoryKind, OpTy, + self, compile_time_machine, AllocId, ConstAllocation, ConstValue, CtfeValidationMode, Frame, + ImmTy, Immediate, InterpCx, InterpResult, LocalState, LocalValue, MemPlace, MemoryKind, OpTy, Operand as InterpOperand, PlaceTy, Scalar, ScalarMaybeUninit, StackPopCleanup, StackPopUnwind, }; @@ -274,7 +274,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> fn before_access_global( _memory_extra: &(), _alloc_id: AllocId, - allocation: &Allocation<Self::PointerTag, Self::AllocExtra>, + alloc: ConstAllocation<'tcx, Self::PointerTag, Self::AllocExtra>, _static_def_id: Option<DefId>, is_write: bool, ) -> InterpResult<'tcx> { @@ -283,7 +283,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> } // If the static allocation is mutable, then we can't const prop it as its content // might be different at runtime. - if allocation.mutability == Mutability::Mut { + if alloc.inner().mutability == Mutability::Mut { throw_machine_stop_str!("can't access mutable globals in ConstProp"); } @@ -538,7 +538,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { self.tcx.struct_span_lint_hir(lint, lint_root, source_info.span, |lint| { let mut err = lint.build(message); err.span_label(source_info.span, format!("{:?}", panic)); - err.emit() + err.emit(); }); } } @@ -633,24 +633,22 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { fn propagate_operand(&mut self, operand: &mut Operand<'tcx>) { match *operand { Operand::Copy(l) | Operand::Move(l) => { - if let Some(value) = self.get_const(l) { - if self.should_const_prop(&value) { - // FIXME(felix91gr): this code only handles `Scalar` cases. - // For now, we're not handling `ScalarPair` cases because - // doing so here would require a lot of code duplication. - // We should hopefully generalize `Operand` handling into a fn, - // and use it to do const-prop here and everywhere else - // where it makes sense. - if let interpret::Operand::Immediate(interpret::Immediate::Scalar( - ScalarMaybeUninit::Scalar(scalar), - )) = *value - { - *operand = self.operand_from_scalar( - scalar, - value.layout.ty, - self.source_info.unwrap().span, - ); - } + if let Some(value) = self.get_const(l) && self.should_const_prop(&value) { + // FIXME(felix91gr): this code only handles `Scalar` cases. + // For now, we're not handling `ScalarPair` cases because + // doing so here would require a lot of code duplication. + // We should hopefully generalize `Operand` handling into a fn, + // and use it to do const-prop here and everywhere else + // where it makes sense. + if let interpret::Operand::Immediate(interpret::Immediate::Scalar( + ScalarMaybeUninit::Scalar(scalar), + )) = *value + { + *operand = self.operand_from_scalar( + scalar, + value.layout.ty, + self.source_info.unwrap().span, + ); } } } @@ -843,12 +841,10 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { // FIXME: enable the general case stated above ^. let ty = value.layout.ty; // Only do it for tuples - if let ty::Tuple(substs) = ty.kind() { + if let ty::Tuple(types) = ty.kind() { // Only do it if tuple is also a pair with two scalars - if substs.len() == 2 { + if let [ty1, ty2] = types[..] { let alloc = self.use_ecx(|this| { - let ty1 = substs[0].expect_ty(); - let ty2 = substs[1].expect_ty(); let ty_is_scalar = |ty| { this.ecx.layout_of(ty).ok().map(|layout| layout.abi.is_scalar()) == Some(true) @@ -1088,15 +1084,13 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> { // This will return None if the above `const_prop` invocation only "wrote" a // type whose creation requires no write. E.g. a generator whose initial state // consists solely of uninitialized memory (so it doesn't capture any locals). - if let Some(ref value) = self.get_const(place) { - if self.should_const_prop(value) { - trace!("replacing {:?} with {:?}", rval, value); - self.replace_with_const(rval, value, source_info); - if can_const_prop == ConstPropMode::FullConstProp - || can_const_prop == ConstPropMode::OnlyInsideOwnBlock - { - trace!("propagated into {:?}", place); - } + if let Some(ref value) = self.get_const(place) && self.should_const_prop(value) { + trace!("replacing {:?} with {:?}", rval, value); + self.replace_with_const(rval, value, source_info); + if can_const_prop == ConstPropMode::FullConstProp + || can_const_prop == ConstPropMode::OnlyInsideOwnBlock + { + trace!("propagated into {:?}", place); } } match can_const_prop { @@ -1202,12 +1196,21 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> { AssertKind::RemainderByZero(op) => { Some(AssertKind::RemainderByZero(eval_to_int(op))) } + AssertKind::Overflow(bin_op @ (BinOp::Div | BinOp::Rem), op1, op2) => { + // Division overflow is *UB* in the MIR, and different than the + // other overflow checks. + Some(AssertKind::Overflow( + *bin_op, + eval_to_int(op1), + eval_to_int(op2), + )) + } AssertKind::BoundsCheck { ref len, ref index } => { let len = eval_to_int(len); let index = eval_to_int(index); Some(AssertKind::BoundsCheck { len, index }) } - // Overflow is are already covered by checks on the binary operators. + // Remaining overflow errors are already covered by checks on the binary operators. AssertKind::Overflow(..) | AssertKind::OverflowNeg(_) => None, // Need proper const propagator for these. _ => None, diff --git a/compiler/rustc_mir_transform/src/coverage/debug.rs b/compiler/rustc_mir_transform/src/coverage/debug.rs index 62e060c8e0c..8e28ed2426b 100644 --- a/compiler/rustc_mir_transform/src/coverage/debug.rs +++ b/compiler/rustc_mir_transform/src/coverage/debug.rs @@ -357,14 +357,12 @@ impl DebugCounters { if let Some(counters) = &self.some_counters { if let Some(DebugCounter { counter_kind, some_block_label }) = counters.get(&operand) { if let CoverageKind::Expression { .. } = counter_kind { - if let Some(block_label) = some_block_label { - if debug_options().counter_format.block { - return format!( - "{}:({})", - block_label, - self.format_counter_kind(counter_kind) - ); - } + if let Some(label) = some_block_label && debug_options().counter_format.block { + return format!( + "{}:({})", + label, + self.format_counter_kind(counter_kind) + ); } return format!("({})", self.format_counter_kind(counter_kind)); } diff --git a/compiler/rustc_mir_transform/src/coverage/graph.rs b/compiler/rustc_mir_transform/src/coverage/graph.rs index 57862b6628d..6bb7e676e85 100644 --- a/compiler/rustc_mir_transform/src/coverage/graph.rs +++ b/compiler/rustc_mir_transform/src/coverage/graph.rs @@ -48,7 +48,7 @@ impl CoverageGraph { let mut bcb_successors = Vec::new(); for successor in bcb_filtered_successors(&mir_body, &bcb_data.terminator(mir_body).kind) - .filter_map(|&successor_bb| bb_to_bcb[successor_bb]) + .filter_map(|successor_bb| bb_to_bcb[successor_bb]) { if !seen[successor] { seen[successor] = true; @@ -281,7 +281,7 @@ impl graph::WithPredecessors for CoverageGraph { } rustc_index::newtype_index! { - /// A node in the [control-flow graph][CFG] of CoverageGraph. + /// A node in the control-flow graph of CoverageGraph. pub(super) struct BasicCoverageBlock { DEBUG_FORMAT = "bcb{}", const START_BCB = 0, @@ -483,7 +483,7 @@ impl std::fmt::Debug for BcbBranch { fn bcb_filtered_successors<'a, 'tcx>( body: &'tcx &'a mir::Body<'tcx>, term_kind: &'tcx TerminatorKind<'tcx>, -) -> Box<dyn Iterator<Item = &'a BasicBlock> + 'a> { +) -> Box<dyn Iterator<Item = BasicBlock> + 'a> { let mut successors = term_kind.successors(); Box::new( match &term_kind { @@ -494,9 +494,8 @@ fn bcb_filtered_successors<'a, 'tcx>( // `next().into_iter()`) into the `mir::Successors` aliased type. _ => successors.next().into_iter().chain(&[]), } - .filter(move |&&successor| { - body[successor].terminator().kind != TerminatorKind::Unreachable - }), + .copied() + .filter(move |&successor| body[successor].terminator().kind != TerminatorKind::Unreachable), ) } @@ -695,7 +694,7 @@ pub struct ShortCircuitPreorder< F: Fn( &'tcx &'a mir::Body<'tcx>, &'tcx TerminatorKind<'tcx>, - ) -> Box<dyn Iterator<Item = &'a BasicBlock> + 'a>, + ) -> Box<dyn Iterator<Item = BasicBlock> + 'a>, > { body: &'tcx &'a mir::Body<'tcx>, visited: BitSet<BasicBlock>, @@ -709,7 +708,7 @@ impl< F: Fn( &'tcx &'a mir::Body<'tcx>, &'tcx TerminatorKind<'tcx>, - ) -> Box<dyn Iterator<Item = &'a BasicBlock> + 'a>, + ) -> Box<dyn Iterator<Item = BasicBlock> + 'a>, > ShortCircuitPreorder<'a, 'tcx, F> { pub fn new( @@ -733,7 +732,7 @@ impl< F: Fn( &'tcx &'a mir::Body<'tcx>, &'tcx TerminatorKind<'tcx>, - ) -> Box<dyn Iterator<Item = &'a BasicBlock> + 'a>, + ) -> Box<dyn Iterator<Item = BasicBlock> + 'a>, > Iterator for ShortCircuitPreorder<'a, 'tcx, F> { type Item = (BasicBlock, &'a BasicBlockData<'tcx>); diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs index d1cb2826ded..a36ba9300e4 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans.rs @@ -47,9 +47,9 @@ impl CoverageStatement { } } - pub fn span(&self) -> &Span { + pub fn span(&self) -> Span { match self { - Self::Statement(_, span, _) | Self::Terminator(_, span) => span, + Self::Statement(_, span, _) | Self::Terminator(_, span) => *span, } } } @@ -191,16 +191,13 @@ impl CoverageSpan { /// If the span is part of a macro, and the macro is visible (expands directly to the given /// body_span), returns the macro name symbol. pub fn visible_macro(&self, body_span: Span) -> Option<Symbol> { - if let Some(current_macro) = self.current_macro() { - if self - .expn_span - .parent_callsite() - .unwrap_or_else(|| bug!("macro must have a parent")) - .ctxt() - == body_span.ctxt() - { - return Some(current_macro); - } + if let Some(current_macro) = self.current_macro() && self + .expn_span + .parent_callsite() + .unwrap_or_else(|| bug!("macro must have a parent")) + .ctxt() == body_span.ctxt() + { + return Some(current_macro); } None } @@ -584,21 +581,19 @@ impl<'a, 'tcx> CoverageSpans<'a, 'tcx> { /// In either case, no more spans will match the span of `pending_dups`, so /// add the `pending_dups` if they don't overlap `curr`, and clear the list. fn check_pending_dups(&mut self) { - if let Some(dup) = self.pending_dups.last() { - if dup.span != self.prev().span { - debug!( - " SAME spans, but pending_dups are NOT THE SAME, so BCBs matched on \ - previous iteration, or prev started a new disjoint span" - ); - if dup.span.hi() <= self.curr().span.lo() { - let pending_dups = self.pending_dups.split_off(0); - for dup in pending_dups.into_iter() { - debug!(" ...adding at least one pending={:?}", dup); - self.push_refined_span(dup); - } - } else { - self.pending_dups.clear(); + if let Some(dup) = self.pending_dups.last() && dup.span != self.prev().span { + debug!( + " SAME spans, but pending_dups are NOT THE SAME, so BCBs matched on \ + previous iteration, or prev started a new disjoint span" + ); + if dup.span.hi() <= self.curr().span.lo() { + let pending_dups = self.pending_dups.split_off(0); + for dup in pending_dups.into_iter() { + debug!(" ...adding at least one pending={:?}", dup); + self.push_refined_span(dup); } + } else { + self.pending_dups.clear(); } } } diff --git a/compiler/rustc_mir_transform/src/deaggregator.rs b/compiler/rustc_mir_transform/src/deaggregator.rs index a5491f0ef4e..44753c5f631 100644 --- a/compiler/rustc_mir_transform/src/deaggregator.rs +++ b/compiler/rustc_mir_transform/src/deaggregator.rs @@ -26,11 +26,8 @@ impl<'tcx> MirPass<'tcx> for Deaggregator { let stmt = stmt.replace_nop(); let source_info = stmt.source_info; - let (lhs, kind, operands) = match stmt.kind { - StatementKind::Assign(box (lhs, Rvalue::Aggregate(kind, operands))) => { - (lhs, kind, operands) - } - _ => bug!(), + let StatementKind::Assign(box (lhs, Rvalue::Aggregate(kind, operands))) = stmt.kind else { + bug!(); }; Some(expand_aggregate( diff --git a/compiler/rustc_mir_transform/src/dest_prop.rs b/compiler/rustc_mir_transform/src/dest_prop.rs index 237ead591a5..5d0b58e9c53 100644 --- a/compiler/rustc_mir_transform/src/dest_prop.rs +++ b/compiler/rustc_mir_transform/src/dest_prop.rs @@ -38,12 +38,6 @@ //! It must also not contain any indexing projections, since those take an arbitrary `Local` as //! the index, and that local might only be initialized shortly before `dest` is used. //! -//! Subtle case: If `dest` is a, or projects through a union, then we have to make sure that there -//! remains an assignment to it, since that sets the "active field" of the union. But if `src` is -//! a ZST, it might not be initialized, so there might not be any use of it before the assignment, -//! and performing the optimization would simply delete the assignment, leaving `dest` -//! uninitialized. -//! //! * `src` must be a bare `Local` without any indirections or field projections (FIXME: Is this a //! fundamental restriction or just current impl state?). It can be copied or moved by the //! assignment. @@ -103,7 +97,6 @@ use rustc_index::{ bit_set::{BitMatrix, BitSet}, vec::IndexVec, }; -use rustc_middle::mir::tcx::PlaceTy; use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; use rustc_middle::mir::{dump_mir, PassWhere}; use rustc_middle::mir::{ @@ -135,7 +128,7 @@ impl<'tcx> MirPass<'tcx> for DestinationPropagation { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let def_id = body.source.def_id(); - let candidates = find_candidates(tcx, body); + let candidates = find_candidates(body); if candidates.is_empty() { debug!("{:?}: no dest prop candidates, done", def_id); return; @@ -556,14 +549,15 @@ impl<'a> Conflicts<'a> { target: _, unwind: _, } => { - if let Some(place) = value.place() { - if !place.is_indirect() && !dropped_place.is_indirect() { - self.record_local_conflict( - place.local, - dropped_place.local, - "DropAndReplace operand overlap", - ); - } + if let Some(place) = value.place() + && !place.is_indirect() + && !dropped_place.is_indirect() + { + self.record_local_conflict( + place.local, + dropped_place.local, + "DropAndReplace operand overlap", + ); } } TerminatorKind::Yield { value, resume: _, resume_arg, drop: _ } => { @@ -621,14 +615,15 @@ impl<'a> Conflicts<'a> { for op in operands { match op { InlineAsmOperand::In { reg: _, value } => { - if let Some(p) = value.place() { - if !p.is_indirect() && !dest_place.is_indirect() { - self.record_local_conflict( - p.local, - dest_place.local, - "asm! operand overlap", - ); - } + if let Some(p) = value.place() + && !p.is_indirect() + && !dest_place.is_indirect() + { + self.record_local_conflict( + p.local, + dest_place.local, + "asm! operand overlap", + ); } } InlineAsmOperand::Out { @@ -650,24 +645,26 @@ impl<'a> Conflicts<'a> { in_value, out_place, } => { - if let Some(place) = in_value.place() { - if !place.is_indirect() && !dest_place.is_indirect() { - self.record_local_conflict( - place.local, - dest_place.local, - "asm! operand overlap", - ); - } + if let Some(place) = in_value.place() + && !place.is_indirect() + && !dest_place.is_indirect() + { + self.record_local_conflict( + place.local, + dest_place.local, + "asm! operand overlap", + ); } - if let Some(place) = out_place { - if !place.is_indirect() && !dest_place.is_indirect() { - self.record_local_conflict( - place.local, - dest_place.local, - "asm! operand overlap", - ); - } + if let Some(place) = out_place + && !place.is_indirect() + && !dest_place.is_indirect() + { + self.record_local_conflict( + place.local, + dest_place.local, + "asm! operand overlap", + ); } } InlineAsmOperand::Out { reg: _, late: _, place: None } @@ -803,9 +800,8 @@ struct CandidateAssignment<'tcx> { /// comment) and also throw out assignments that involve a local that has its address taken or is /// otherwise ineligible (eg. locals used as array indices are ignored because we cannot propagate /// arbitrary places into array indices). -fn find_candidates<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> Vec<CandidateAssignment<'tcx>> { +fn find_candidates<'tcx>(body: &Body<'tcx>) -> Vec<CandidateAssignment<'tcx>> { let mut visitor = FindAssignments { - tcx, body, candidates: Vec::new(), ever_borrowed_locals: ever_borrowed_locals(body), @@ -816,7 +812,6 @@ fn find_candidates<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> Vec<CandidateA } struct FindAssignments<'a, 'tcx> { - tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, candidates: Vec<CandidateAssignment<'tcx>>, ever_borrowed_locals: BitSet<Local>, @@ -845,10 +840,11 @@ impl<'tcx> Visitor<'tcx> for FindAssignments<'_, 'tcx> { return; } - // Can't optimize if both locals ever have their address taken (can introduce - // aliasing). - // FIXME: This can be smarter and take `StorageDead` into account (which - // invalidates borrows). + // Can't optimize if either local ever has their address taken. This optimization does + // liveness analysis only based on assignments, and a local can be live even if its + // never assigned to again, because a reference to it might be live. + // FIXME: This can be smarter and take `StorageDead` into account (which invalidates + // borrows). if self.ever_borrowed_locals.contains(dest.local) || self.ever_borrowed_locals.contains(src.local) { @@ -862,22 +858,11 @@ impl<'tcx> Visitor<'tcx> for FindAssignments<'_, 'tcx> { return; } - // Handle the "subtle case" described above by rejecting any `dest` that is or - // projects through a union. - let mut place_ty = PlaceTy::from_ty(self.body.local_decls[dest.local].ty); - if place_ty.ty.is_union() { - return; - } for elem in dest.projection { if let PlaceElem::Index(_) = elem { // `dest` contains an indexing projection. return; } - - place_ty = place_ty.projection_ty(self.tcx, elem); - if place_ty.ty.is_union() { - return; - } } self.candidates.push(CandidateAssignment { diff --git a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs index ba234dccaa6..d72e8d16105 100644 --- a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs +++ b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs @@ -95,7 +95,7 @@ pub struct EarlyOtherwiseBranch; impl<'tcx> MirPass<'tcx> for EarlyOtherwiseBranch { fn is_enabled(&self, sess: &rustc_session::Session) -> bool { - sess.mir_opt_level() >= 2 + sess.mir_opt_level() >= 3 && sess.opts.debugging_opts.unsound_mir_opts } fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { @@ -226,6 +226,37 @@ impl<'tcx> MirPass<'tcx> for EarlyOtherwiseBranch { /// Returns true if computing the discriminant of `place` may be hoisted out of the branch fn may_hoist<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, place: Place<'tcx>) -> bool { + // FIXME(JakobDegen): This is unsound. Someone could write code like this: + // ```rust + // let Q = val; + // if discriminant(P) == otherwise { + // let ptr = &mut Q as *mut _ as *mut u8; + // unsafe { *ptr = 10; } // Any invalid value for the type + // } + // + // match P { + // A => match Q { + // A => { + // // code + // } + // _ => { + // // don't use Q + // } + // } + // _ => { + // // don't use Q + // } + // }; + // ``` + // + // Hoisting the `discriminant(Q)` out of the `A` arm causes us to compute the discriminant of an + // invalid value, which is UB. + // + // In order to fix this, we would either need to show that the discriminant computation of + // `place` is computed in all branches, including the `otherwise` branch, or we would need + // another analysis pass to determine that the place is fully initialized. It might even be best + // to have the hoisting be performed in a different pass and just do the CFG changing in this + // pass. for (place, proj) in place.iter_projections() { match proj { // Dereferencing in the computation of `place` might cause issues from one of two @@ -359,7 +390,7 @@ fn verify_candidate_branch<'tcx>( if branch.statements.len() != 1 { return false; } - // ...assign the descriminant of `place` in that statement + // ...assign the discriminant of `place` in that statement let StatementKind::Assign(boxed) = &branch.statements[0].kind else { return false }; diff --git a/compiler/rustc_mir_transform/src/elaborate_drops.rs b/compiler/rustc_mir_transform/src/elaborate_drops.rs index 7320b2738a7..a4b1d86ff61 100644 --- a/compiler/rustc_mir_transform/src/elaborate_drops.rs +++ b/compiler/rustc_mir_transform/src/elaborate_drops.rs @@ -98,12 +98,9 @@ fn find_dead_unwinds<'tcx>( debug!("find_dead_unwinds @ {:?}: {:?}", bb, bb_data); - let path = match env.move_data.rev_lookup.find(place.as_ref()) { - LookupResult::Exact(e) => e, - LookupResult::Parent(..) => { - debug!("find_dead_unwinds: has parent; skipping"); - continue; - } + let LookupResult::Exact(path) = env.move_data.rev_lookup.find(place.as_ref()) else { + debug!("find_dead_unwinds: has parent; skipping"); + continue; }; flow_inits.seek_before_primary_effect(body.terminator_loc(bb)); diff --git a/compiler/rustc_mir_transform/src/generator.rs b/compiler/rustc_mir_transform/src/generator.rs index 05de52458ad..8bc7f5e9ce2 100644 --- a/compiler/rustc_mir_transform/src/generator.rs +++ b/compiler/rustc_mir_transform/src/generator.rs @@ -210,7 +210,7 @@ struct SuspensionPoint<'tcx> { struct TransformVisitor<'tcx> { tcx: TyCtxt<'tcx>, - state_adt_ref: &'tcx AdtDef, + state_adt_ref: AdtDef<'tcx>, state_substs: SubstsRef<'tcx>, // The type of the discriminant in the generator struct @@ -243,11 +243,11 @@ impl<'tcx> TransformVisitor<'tcx> { val: Operand<'tcx>, source_info: SourceInfo, ) -> impl Iterator<Item = Statement<'tcx>> { - let kind = AggregateKind::Adt(self.state_adt_ref.did, idx, self.state_substs, None, None); - assert_eq!(self.state_adt_ref.variants[idx].fields.len(), 1); + let kind = AggregateKind::Adt(self.state_adt_ref.did(), idx, self.state_substs, None, None); + assert_eq!(self.state_adt_ref.variant(idx).fields.len(), 1); let ty = self .tcx - .type_of(self.state_adt_ref.variants[idx].fields[0].did) + .type_of(self.state_adt_ref.variant(idx).fields[0].did) .subst(self.tcx, self.state_substs); expand_aggregate( Place::return_place(), @@ -463,10 +463,8 @@ fn locals_live_across_suspend_points<'tcx>( // Calculate the MIR locals which have been previously // borrowed (even if they are still active). - let borrowed_locals_results = MaybeBorrowedLocals::all_borrows() - .into_engine(tcx, body_ref) - .pass_name("generator") - .iterate_to_fixpoint(); + let borrowed_locals_results = + MaybeBorrowedLocals.into_engine(tcx, body_ref).pass_name("generator").iterate_to_fixpoint(); let mut borrowed_locals_cursor = rustc_mir_dataflow::ResultsCursor::new(body_ref, &borrowed_locals_results); @@ -1413,22 +1411,16 @@ impl EnsureGeneratorFieldAssignmentsNeverAlias<'_> { impl<'tcx> Visitor<'tcx> for EnsureGeneratorFieldAssignmentsNeverAlias<'_> { fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) { - let lhs = match self.assigned_local { - Some(l) => l, - None => { - // This visitor only invokes `visit_place` for the right-hand side of an assignment - // and only after setting `self.assigned_local`. However, the default impl of - // `Visitor::super_body` may call `visit_place` with a `NonUseContext` for places - // with debuginfo. Ignore them here. - assert!(!context.is_use()); - return; - } + let Some(lhs) = self.assigned_local else { + // This visitor only invokes `visit_place` for the right-hand side of an assignment + // and only after setting `self.assigned_local`. However, the default impl of + // `Visitor::super_body` may call `visit_place` with a `NonUseContext` for places + // with debuginfo. Ignore them here. + assert!(!context.is_use()); + return; }; - let rhs = match self.saved_local_for_direct_place(*place) { - Some(l) => l, - None => return, - }; + let Some(rhs) = self.saved_local_for_direct_place(*place) else { return }; if !self.storage_conflicts.contains(lhs, rhs) { bug!( diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs index 55ce5910c81..23e5f0b4f30 100644 --- a/compiler/rustc_mir_transform/src/inline.rs +++ b/compiler/rustc_mir_transform/src/inline.rs @@ -10,7 +10,7 @@ use rustc_middle::mir::*; use rustc_middle::traits::ObligationCause; use rustc_middle::ty::subst::Subst; use rustc_middle::ty::{self, ConstKind, Instance, InstanceDef, ParamEnv, Ty, TyCtxt}; -use rustc_span::{hygiene::ExpnKind, ExpnData, Span}; +use rustc_span::{hygiene::ExpnKind, ExpnData, LocalExpnId, Span}; use rustc_target::spec::abi::Abi; use super::simplify::{remove_dead_blocks, CfgSimplifier}; @@ -118,9 +118,8 @@ impl<'tcx> Inliner<'tcx> { continue; } - let callsite = match self.resolve_callsite(caller_body, bb, bb_data) { - None => continue, - Some(it) => it, + let Some(callsite) = self.resolve_callsite(caller_body, bb, bb_data) else { + continue; }; let span = trace_span!("process_blocks", %callsite.callee, ?bb); @@ -544,6 +543,16 @@ impl<'tcx> Inliner<'tcx> { // Copy the arguments if needed. let args: Vec<_> = self.make_call_args(args, &callsite, caller_body, &callee_body); + let mut expn_data = ExpnData::default( + ExpnKind::Inlined, + callsite.source_info.span, + self.tcx.sess.edition(), + None, + None, + ); + expn_data.def_site = callee_body.span; + let expn_data = + LocalExpnId::fresh(expn_data, self.tcx.create_stable_hashing_context()); let mut integrator = Integrator { args: &args, new_locals: Local::new(caller_body.local_decls.len()).., @@ -554,8 +563,7 @@ impl<'tcx> Inliner<'tcx> { cleanup_block: cleanup, in_cleanup_block: false, tcx: self.tcx, - callsite_span: callsite.source_info.span, - body_span: callee_body.span, + expn_data, always_live_locals: BitSet::new_filled(callee_body.local_decls.len()), }; @@ -693,8 +701,7 @@ impl<'tcx> Inliner<'tcx> { // The `tmp0`, `tmp1`, and `tmp2` in our example abonve. let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| { // This is e.g., `tuple_tmp.0` in our example above. - let tuple_field = - Operand::Move(tcx.mk_place_field(tuple, Field::new(i), ty.expect_ty())); + let tuple_field = Operand::Move(tcx.mk_place_field(tuple, Field::new(i), ty)); // Spill to a local to make e.g., `tmp0`. self.create_temp_if_necessary(tuple_field, callsite, caller_body) @@ -717,12 +724,11 @@ impl<'tcx> Inliner<'tcx> { caller_body: &mut Body<'tcx>, ) -> Local { // Reuse the operand if it is a moved temporary. - if let Operand::Move(place) = &arg { - if let Some(local) = place.as_local() { - if caller_body.local_kind(local) == LocalKind::Temp { - return local; - } - } + if let Operand::Move(place) = &arg + && let Some(local) = place.as_local() + && caller_body.local_kind(local) == LocalKind::Temp + { + return local; } // Otherwise, create a temporary for the argument. @@ -789,8 +795,7 @@ struct Integrator<'a, 'tcx> { cleanup_block: Option<BasicBlock>, in_cleanup_block: bool, tcx: TyCtxt<'tcx>, - callsite_span: Span, - body_span: Span, + expn_data: LocalExpnId, always_live_locals: BitSet<Local>, } @@ -837,12 +842,8 @@ impl<'tcx> MutVisitor<'tcx> for Integrator<'_, 'tcx> { } fn visit_span(&mut self, span: &mut Span) { - let mut expn_data = - ExpnData::default(ExpnKind::Inlined, *span, self.tcx.sess.edition(), None, None); - expn_data.def_site = self.body_span; // Make sure that all spans track the fact that they were inlined. - *span = - self.callsite_span.fresh_expansion(expn_data, self.tcx.create_stable_hashing_context()); + *span = span.fresh_expansion(self.expn_data); } fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) { diff --git a/compiler/rustc_mir_transform/src/inline/cycle.rs b/compiler/rustc_mir_transform/src/inline/cycle.rs index 44ded1647fc..de93ab7059f 100644 --- a/compiler/rustc_mir_transform/src/inline/cycle.rs +++ b/compiler/rustc_mir_transform/src/inline/cycle.rs @@ -46,12 +46,9 @@ crate fn mir_callgraph_reachable<'tcx>( trace!(%caller); for &(callee, substs) in tcx.mir_inliner_callees(caller.def) { let substs = caller.subst_mir_and_normalize_erasing_regions(tcx, param_env, substs); - let callee = match ty::Instance::resolve(tcx, param_env, callee, substs).unwrap() { - Some(callee) => callee, - None => { - trace!(?callee, "cannot resolve, skipping"); - continue; - } + let Some(callee) = ty::Instance::resolve(tcx, param_env, callee, substs).unwrap() else { + trace!(?callee, "cannot resolve, skipping"); + continue; }; // Found a path. diff --git a/compiler/rustc_mir_transform/src/instcombine.rs b/compiler/rustc_mir_transform/src/instcombine.rs index 792ac68671e..d1c4a4b21d0 100644 --- a/compiler/rustc_mir_transform/src/instcombine.rs +++ b/compiler/rustc_mir_transform/src/instcombine.rs @@ -4,7 +4,7 @@ use crate::MirPass; use rustc_hir::Mutability; use rustc_middle::mir::{ BinOp, Body, Constant, LocalDecls, Operand, Place, ProjectionElem, Rvalue, SourceInfo, - StatementKind, UnOp, + Statement, StatementKind, Terminator, TerminatorKind, UnOp, }; use rustc_middle::ty::{self, TyCtxt}; @@ -29,6 +29,11 @@ impl<'tcx> MirPass<'tcx> for InstCombine { _ => {} } } + + ctx.combine_primitive_clone( + &mut block.terminator.as_mut().unwrap(), + &mut block.statements, + ); } } } @@ -77,10 +82,8 @@ impl<'tcx> InstCombineContext<'tcx, '_> { _ => None, }; - if let Some(new) = new { - if self.should_combine(source_info, rvalue) { - *rvalue = new; - } + if let Some(new) = new && self.should_combine(source_info, rvalue) { + *rvalue = new; } } @@ -132,4 +135,70 @@ impl<'tcx> InstCombineContext<'tcx, '_> { } } } + + fn combine_primitive_clone( + &self, + terminator: &mut Terminator<'tcx>, + statements: &mut Vec<Statement<'tcx>>, + ) { + let TerminatorKind::Call { func, args, destination, .. } = &mut terminator.kind + else { return }; + + // It's definitely not a clone if there are multiple arguments + if args.len() != 1 { + return; + } + + let Some((destination_place, destination_block)) = *destination + else { return }; + + // Only bother looking more if it's easy to know what we're calling + let Some((fn_def_id, fn_substs)) = func.const_fn_def() + else { return }; + + // Clone needs one subst, so we can cheaply rule out other stuff + if fn_substs.len() != 1 { + return; + } + + // These types are easily available from locals, so check that before + // doing DefId lookups to figure out what we're actually calling. + let arg_ty = args[0].ty(self.local_decls, self.tcx); + + let ty::Ref(_region, inner_ty, Mutability::Not) = *arg_ty.kind() + else { return }; + + if !inner_ty.is_trivially_pure_clone_copy() { + return; + } + + let trait_def_id = self.tcx.trait_of_item(fn_def_id); + if trait_def_id.is_none() || trait_def_id != self.tcx.lang_items().clone_trait() { + return; + } + + if !self.tcx.consider_optimizing(|| { + format!( + "InstCombine - Call: {:?} SourceInfo: {:?}", + (fn_def_id, fn_substs), + terminator.source_info + ) + }) { + return; + } + + let Some(arg_place) = args.pop().unwrap().place() + else { return }; + + statements.push(Statement { + source_info: terminator.source_info, + kind: StatementKind::Assign(box ( + destination_place, + Rvalue::Use(Operand::Copy( + arg_place.project_deeper(&[ProjectionElem::Deref], self.tcx), + )), + )), + }); + terminator.kind = TerminatorKind::Goto { target: destination_block }; + } } diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index e7d5bab8fd9..3b2332a6e31 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -1,16 +1,17 @@ +#![allow(rustc::potential_query_instability)] #![feature(box_patterns)] #![feature(box_syntax)] #![feature(crate_visibility_modifier)] +#![feature(let_chains)] #![feature(let_else)] #![feature(map_try_insert)] #![feature(min_specialization)] -#![feature(option_get_or_insert_default)] -#![feature(once_cell)] #![feature(never_type)] +#![feature(once_cell)] +#![feature(option_get_or_insert_default)] #![feature(trusted_step)] #![feature(try_blocks)] #![recursion_limit = "256"] -#![cfg_attr(not(bootstrap), allow(rustc::potential_query_instability))] #[macro_use] extern crate tracing; diff --git a/compiler/rustc_mir_transform/src/lower_intrinsics.rs b/compiler/rustc_mir_transform/src/lower_intrinsics.rs index 4c4497ad629..684d988ee9e 100644 --- a/compiler/rustc_mir_transform/src/lower_intrinsics.rs +++ b/compiler/rustc_mir_transform/src/lower_intrinsics.rs @@ -17,9 +17,8 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics { let terminator = block.terminator.as_mut().unwrap(); if let TerminatorKind::Call { func, args, destination, .. } = &mut terminator.kind { let func_ty = func.ty(local_decls, tcx); - let (intrinsic_name, substs) = match resolve_rust_intrinsic(tcx, func_ty) { - None => continue, - Some(it) => it, + let Some((intrinsic_name, substs)) = resolve_rust_intrinsic(tcx, func_ty) else { + continue; }; match intrinsic_name { sym::unreachable => { diff --git a/compiler/rustc_mir_transform/src/lower_slice_len.rs b/compiler/rustc_mir_transform/src/lower_slice_len.rs index c8297744873..43d1d62a21e 100644 --- a/compiler/rustc_mir_transform/src/lower_slice_len.rs +++ b/compiler/rustc_mir_transform/src/lower_slice_len.rs @@ -61,10 +61,7 @@ fn lower_slice_len_call<'tcx>( if args.len() != 1 { return; } - let arg = match args[0].place() { - Some(arg) => arg, - None => return, - }; + let Some(arg) = args[0].place() else { return }; let func_ty = func.ty(local_decls, tcx); match func_ty.kind() { ty::FnDef(fn_def_id, _) if fn_def_id == &slice_len_fn_item_def_id => { diff --git a/compiler/rustc_mir_transform/src/nrvo.rs b/compiler/rustc_mir_transform/src/nrvo.rs index 797f7ee2685..ec25f298d48 100644 --- a/compiler/rustc_mir_transform/src/nrvo.rs +++ b/compiler/rustc_mir_transform/src/nrvo.rs @@ -39,12 +39,9 @@ impl<'tcx> MirPass<'tcx> for RenameReturnPlace { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut mir::Body<'tcx>) { let def_id = body.source.def_id(); - let returned_local = match local_eligible_for_nrvo(body) { - Some(l) => l, - None => { - debug!("`{:?}` was ineligible for NRVO", def_id); - return; - } + let Some(returned_local) = local_eligible_for_nrvo(body) else { + debug!("`{:?}` was ineligible for NRVO", def_id); + return; }; if !tcx.consider_optimizing(|| format!("RenameReturnPlace {:?}", def_id)) { diff --git a/compiler/rustc_mir_transform/src/remove_uninit_drops.rs b/compiler/rustc_mir_transform/src/remove_uninit_drops.rs index fc5ac97e3e1..d7fb7063114 100644 --- a/compiler/rustc_mir_transform/src/remove_uninit_drops.rs +++ b/compiler/rustc_mir_transform/src/remove_uninit_drops.rs @@ -1,4 +1,4 @@ -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::ChunkedBitSet; use rustc_middle::mir::{Body, Field, Rvalue, Statement, StatementKind, TerminatorKind}; use rustc_middle::ty::subst::SubstsRef; use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, VariantDef}; @@ -89,7 +89,7 @@ impl<'tcx> MirPass<'tcx> for RemoveUninitDrops { fn is_needs_drop_and_init<'tcx>( tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, - maybe_inits: &BitSet<MovePathIndex>, + maybe_inits: &ChunkedBitSet<MovePathIndex>, move_data: &MoveData<'tcx>, ty: Ty<'tcx>, mpi: MovePathIndex, @@ -124,7 +124,7 @@ fn is_needs_drop_and_init<'tcx>( // // If its projection *is* present in `MoveData`, then the field may have been moved // from separate from its parent. Recurse. - adt.variants.iter_enumerated().any(|(vid, variant)| { + adt.variants().iter_enumerated().any(|(vid, variant)| { // Enums have multiple variants, which are discriminated with a `Downcast` projection. // Structs have a single variant, and don't use a `Downcast` projection. let mpi = if adt.is_enum() { @@ -148,8 +148,8 @@ fn is_needs_drop_and_init<'tcx>( }) } - ty::Tuple(_) => ty - .tuple_fields() + ty::Tuple(fields) => fields + .iter() .enumerate() .map(|(f, f_ty)| (Field::from_usize(f), f_ty, mpi)) .any(field_needs_drop_and_init), diff --git a/compiler/rustc_mir_transform/src/remove_zsts.rs b/compiler/rustc_mir_transform/src/remove_zsts.rs index 1d912e61409..785716ebecc 100644 --- a/compiler/rustc_mir_transform/src/remove_zsts.rs +++ b/compiler/rustc_mir_transform/src/remove_zsts.rs @@ -26,9 +26,8 @@ impl<'tcx> MirPass<'tcx> for RemoveZsts { if !maybe_zst(place_ty) { continue; } - let layout = match tcx.layout_of(param_env.and(place_ty)) { - Ok(layout) => layout, - Err(_) => continue, + let Ok(layout) = tcx.layout_of(param_env.and(place_ty)) else { + continue; }; if !layout.is_zst() { continue; diff --git a/compiler/rustc_mir_transform/src/required_consts.rs b/compiler/rustc_mir_transform/src/required_consts.rs index 1c48efd8b42..b87220a3aa4 100644 --- a/compiler/rustc_mir_transform/src/required_consts.rs +++ b/compiler/rustc_mir_transform/src/required_consts.rs @@ -14,10 +14,9 @@ impl<'a, 'tcx> RequiredConstsVisitor<'a, 'tcx> { impl<'tcx> Visitor<'tcx> for RequiredConstsVisitor<'_, 'tcx> { fn visit_constant(&mut self, constant: &Constant<'tcx>, _: Location) { - if let Some(ct) = constant.literal.const_for_ty() { - if let ConstKind::Unevaluated(_) = ct.val() { - self.required_consts.push(*constant); - } + let literal = constant.literal; + if let Some(ct) = literal.const_for_ty() && let ConstKind::Unevaluated(_) = ct.val() { + self.required_consts.push(*constant); } } } diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs index b8feeb993e7..bf031b423c2 100644 --- a/compiler/rustc_mir_transform/src/shim.rs +++ b/compiler/rustc_mir_transform/src/shim.rs @@ -461,10 +461,10 @@ impl<'tcx> CloneShimBuilder<'tcx> { fn tuple_like_shim<I>(&mut self, dest: Place<'tcx>, src: Place<'tcx>, tys: I) where - I: Iterator<Item = Ty<'tcx>>, + I: IntoIterator<Item = Ty<'tcx>>, { let mut previous_field = None; - for (i, ity) in tys.enumerate() { + for (i, ity) in tys.into_iter().enumerate() { let field = Field::new(i); let src_field = self.tcx.mk_place_field(src, field, ity); @@ -734,9 +734,8 @@ pub fn build_adt_ctor(tcx: TyCtxt<'_>, ctor_id: DefId) -> Body<'_> { let sig = tcx.fn_sig(ctor_id).no_bound_vars().expect("LBR in ADT constructor signature"); let sig = tcx.normalize_erasing_regions(param_env, sig); - let (adt_def, substs) = match sig.output().kind() { - ty::Adt(adt_def, substs) => (adt_def, substs), - _ => bug!("unexpected type for ADT ctor {:?}", sig.output()), + let ty::Adt(adt_def, substs) = sig.output().kind() else { + bug!("unexpected type for ADT ctor {:?}", sig.output()); }; debug!("build_ctor: ctor_id={:?} sig={:?}", ctor_id, sig); @@ -761,10 +760,10 @@ pub fn build_adt_ctor(tcx: TyCtxt<'_>, ctor_id: DefId) -> Body<'_> { let statements = expand_aggregate( Place::return_place(), - adt_def.variants[variant_index].fields.iter().enumerate().map(|(idx, field_def)| { + adt_def.variant(variant_index).fields.iter().enumerate().map(|(idx, field_def)| { (Operand::Move(Place::from(Local::new(idx + 1))), field_def.ty(tcx, substs)) }), - AggregateKind::Adt(adt_def.did, variant_index, substs, None, None), + AggregateKind::Adt(adt_def.did(), variant_index, substs, None, None), source_info, tcx, ) diff --git a/compiler/rustc_mir_transform/src/simplify.rs b/compiler/rustc_mir_transform/src/simplify.rs index 4651e1f4ed0..d8b58ce53f8 100644 --- a/compiler/rustc_mir_transform/src/simplify.rs +++ b/compiler/rustc_mir_transform/src/simplify.rs @@ -172,9 +172,8 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { let mut terminators: SmallVec<[_; 1]> = Default::default(); let mut current = *start; while let Some(terminator) = self.take_terminator_if_simple_goto(current) { - let target = match terminator { - Terminator { kind: TerminatorKind::Goto { target }, .. } => target, - _ => unreachable!(), + let Terminator { kind: TerminatorKind::Goto { target }, .. } = terminator else { + unreachable!(); }; terminators.push((current, terminator)); current = target; @@ -182,9 +181,8 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { let last = current; *start = last; while let Some((current, mut terminator)) = terminators.pop() { - let target = match terminator { - Terminator { kind: TerminatorKind::Goto { ref mut target }, .. } => target, - _ => unreachable!(), + let Terminator { kind: TerminatorKind::Goto { ref mut target }, .. } = terminator else { + unreachable!(); }; *changed |= *target != last; *target = last; diff --git a/compiler/rustc_mir_transform/src/simplify_try.rs b/compiler/rustc_mir_transform/src/simplify_try.rs index d5507fcc78c..ce4b45062e8 100644 --- a/compiler/rustc_mir_transform/src/simplify_try.rs +++ b/compiler/rustc_mir_transform/src/simplify_try.rs @@ -362,7 +362,7 @@ fn optimization_applies<'tcx>( return false; } else if last_assigned_to != opt_info.local_tmp_s1 { trace!( - "NO: end of assignemnt chain does not match written enum temp: {:?} != {:?}", + "NO: end of assignment chain does not match written enum temp: {:?} != {:?}", last_assigned_to, opt_info.local_tmp_s1 ); @@ -707,7 +707,7 @@ impl<'tcx> SimplifyBranchSameOptimizationFinder<'_, 'tcx> { ) -> StatementEquality { let helper = |rhs: &Rvalue<'tcx>, place: &Place<'tcx>, - variant_index: &VariantIdx, + variant_index: VariantIdx, switch_value: u128, side_to_choose| { let place_type = place.ty(self.body, self.tcx).ty; @@ -717,7 +717,7 @@ impl<'tcx> SimplifyBranchSameOptimizationFinder<'_, 'tcx> { }; // We need to make sure that the switch value that targets the bb with // SetDiscriminant is the same as the variant discriminant. - let variant_discr = adt.discriminant_for_variant(self.tcx, *variant_index).val; + let variant_discr = adt.discriminant_for_variant(self.tcx, variant_index).val; if variant_discr != switch_value { trace!( "NO: variant discriminant {} does not equal switch value {}", @@ -726,7 +726,7 @@ impl<'tcx> SimplifyBranchSameOptimizationFinder<'_, 'tcx> { ); return StatementEquality::NotEqual; } - let variant_is_fieldless = adt.variants[*variant_index].fields.is_empty(); + let variant_is_fieldless = adt.variant(variant_index).fields.is_empty(); if !variant_is_fieldless { trace!("NO: variant {:?} was not fieldless", variant_index); return StatementEquality::NotEqual; @@ -753,7 +753,7 @@ impl<'tcx> SimplifyBranchSameOptimizationFinder<'_, 'tcx> { // check for case A ( StatementKind::Assign(box (_, rhs)), - StatementKind::SetDiscriminant { place, variant_index }, + &StatementKind::SetDiscriminant { ref place, variant_index }, ) if y_target_and_value.value.is_some() => { // choose basic block of x, as that has the assign helper( @@ -765,8 +765,8 @@ impl<'tcx> SimplifyBranchSameOptimizationFinder<'_, 'tcx> { ) } ( - StatementKind::SetDiscriminant { place, variant_index }, - StatementKind::Assign(box (_, rhs)), + &StatementKind::SetDiscriminant { ref place, variant_index }, + &StatementKind::Assign(box (_, ref rhs)), ) if x_target_and_value.value.is_some() => { // choose basic block of y, as that has the assign helper( diff --git a/compiler/rustc_mir_transform/src/uninhabited_enum_branching.rs b/compiler/rustc_mir_transform/src/uninhabited_enum_branching.rs index cda9ba9dcc8..bd196f11879 100644 --- a/compiler/rustc_mir_transform/src/uninhabited_enum_branching.rs +++ b/compiler/rustc_mir_transform/src/uninhabited_enum_branching.rs @@ -3,7 +3,8 @@ use crate::MirPass; use rustc_data_structures::stable_set::FxHashSet; use rustc_middle::mir::{ - BasicBlockData, Body, Local, Operand, Rvalue, StatementKind, SwitchTargets, TerminatorKind, + BasicBlockData, Body, Local, Operand, Rvalue, StatementKind, SwitchTargets, Terminator, + TerminatorKind, }; use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::ty::{Ty, TyCtxt}; @@ -64,13 +65,35 @@ fn variant_discriminants<'tcx>( Variants::Multiple { variants, .. } => variants .iter_enumerated() .filter_map(|(idx, layout)| { - (layout.abi != Abi::Uninhabited) + (layout.abi() != Abi::Uninhabited) .then(|| ty.discriminant_for_variant(tcx, idx).unwrap().val) }) .collect(), } } +/// Ensures that the `otherwise` branch leads to an unreachable bb, returning `None` if so and a new +/// bb to use as the new target if not. +fn ensure_otherwise_unreachable<'tcx>( + body: &Body<'tcx>, + targets: &SwitchTargets, +) -> Option<BasicBlockData<'tcx>> { + let otherwise = targets.otherwise(); + let bb = &body.basic_blocks()[otherwise]; + if bb.terminator().kind == TerminatorKind::Unreachable + && bb.statements.iter().all(|s| matches!(&s.kind, StatementKind::StorageDead(_))) + { + return None; + } + + let mut new_block = BasicBlockData::new(Some(Terminator { + source_info: bb.terminator().source_info, + kind: TerminatorKind::Unreachable, + })); + new_block.is_cleanup = bb.is_cleanup; + Some(new_block) +} + impl<'tcx> MirPass<'tcx> for UninhabitedEnumBranching { fn is_enabled(&self, sess: &rustc_session::Session) -> bool { sess.mir_opt_level() > 0 @@ -99,12 +122,25 @@ impl<'tcx> MirPass<'tcx> for UninhabitedEnumBranching { if let TerminatorKind::SwitchInt { targets, .. } = &mut body.basic_blocks_mut()[bb].terminator_mut().kind { - let new_targets = SwitchTargets::new( + let mut new_targets = SwitchTargets::new( targets.iter().filter(|(val, _)| allowed_variants.contains(val)), targets.otherwise(), ); - *targets = new_targets; + if new_targets.iter().count() == allowed_variants.len() { + if let Some(updated) = ensure_otherwise_unreachable(body, &new_targets) { + let new_otherwise = body.basic_blocks_mut().push(updated); + *new_targets.all_targets_mut().last_mut().unwrap() = new_otherwise; + } + } + + if let TerminatorKind::SwitchInt { targets, .. } = + &mut body.basic_blocks_mut()[bb].terminator_mut().kind + { + *targets = new_targets; + } else { + unreachable!() + } } else { unreachable!() } |
