diff options
Diffstat (limited to 'compiler')
| -rw-r--r-- | compiler/rustc_mir_build/src/thir/pattern/check_match.rs | 2 | ||||
| -rw-r--r-- | compiler/rustc_mir_transform/src/jump_threading.rs | 262 | ||||
| -rw-r--r-- | compiler/rustc_monomorphize/src/partitioning.rs | 4 | ||||
| -rw-r--r-- | compiler/rustc_pattern_analysis/src/lib.rs | 8 | ||||
| -rw-r--r-- | compiler/rustc_pattern_analysis/src/lints.rs | 4 | ||||
| -rw-r--r-- | compiler/rustc_pattern_analysis/src/pat.rs | 12 | ||||
| -rw-r--r-- | compiler/rustc_pattern_analysis/src/rustc.rs | 20 | ||||
| -rw-r--r-- | compiler/rustc_pattern_analysis/src/usefulness.rs | 17 |
8 files changed, 189 insertions, 140 deletions
diff --git a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs index e8ba83a5527..693dc49c6e8 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs @@ -1130,7 +1130,7 @@ fn collect_non_exhaustive_tys<'tcx>( non_exhaustive_tys.insert(pat.ty().inner()); } if let Constructor::IntRange(range) = pat.ctor() { - if cx.is_range_beyond_boundaries(range, pat.ty()) { + if cx.is_range_beyond_boundaries(range, *pat.ty()) { // The range denotes the values before `isize::MIN` or the values after `usize::MAX`/`isize::MAX`. non_exhaustive_tys.insert(pat.ty().inner()); } diff --git a/compiler/rustc_mir_transform/src/jump_threading.rs b/compiler/rustc_mir_transform/src/jump_threading.rs index dcab124505e..e87f68a0905 100644 --- a/compiler/rustc_mir_transform/src/jump_threading.rs +++ b/compiler/rustc_mir_transform/src/jump_threading.rs @@ -36,16 +36,21 @@ //! cost by `MAX_COST`. use rustc_arena::DroplessArena; +use rustc_const_eval::interpret::{ImmTy, Immediate, InterpCx, OpTy, Projectable}; use rustc_data_structures::fx::FxHashSet; use rustc_index::bit_set::BitSet; use rustc_index::IndexVec; +use rustc_middle::mir::interpret::Scalar; use rustc_middle::mir::visit::Visitor; use rustc_middle::mir::*; -use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt}; +use rustc_middle::ty::layout::LayoutOf; +use rustc_middle::ty::{self, ScalarInt, TyCtxt}; use rustc_mir_dataflow::value_analysis::{Map, PlaceIndex, State, TrackElem}; +use rustc_span::DUMMY_SP; use rustc_target::abi::{TagEncoding, Variants}; use crate::cost_checker::CostChecker; +use crate::dataflow_const_prop::DummyMachine; pub struct JumpThreading; @@ -71,6 +76,7 @@ impl<'tcx> MirPass<'tcx> for JumpThreading { let mut finder = TOFinder { tcx, param_env, + ecx: InterpCx::new(tcx, DUMMY_SP, param_env, DummyMachine), body, arena: &arena, map: &map, @@ -88,7 +94,7 @@ impl<'tcx> MirPass<'tcx> for JumpThreading { debug!(?discr, ?bb); let discr_ty = discr.ty(body, tcx).ty; - let Ok(discr_layout) = tcx.layout_of(param_env.and(discr_ty)) else { continue }; + let Ok(discr_layout) = finder.ecx.layout_of(discr_ty) else { continue }; let Some(discr) = finder.map.find(discr.as_ref()) else { continue }; debug!(?discr); @@ -142,6 +148,7 @@ struct ThreadingOpportunity { struct TOFinder<'tcx, 'a> { tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, + ecx: InterpCx<'tcx, 'tcx, DummyMachine>, body: &'a Body<'tcx>, map: &'a Map, loop_headers: &'a BitSet<BasicBlock>, @@ -329,11 +336,11 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> { } #[instrument(level = "trace", skip(self))] - fn process_operand( + fn process_immediate( &mut self, bb: BasicBlock, lhs: PlaceIndex, - rhs: &Operand<'tcx>, + rhs: ImmTy<'tcx>, state: &mut State<ConditionSet<'a>>, ) -> Option<!> { let register_opportunity = |c: Condition| { @@ -341,13 +348,70 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> { self.opportunities.push(ThreadingOpportunity { chain: vec![bb], target: c.target }) }; + let conditions = state.try_get_idx(lhs, self.map)?; + if let Immediate::Scalar(Scalar::Int(int)) = *rhs { + conditions.iter_matches(int).for_each(register_opportunity); + } + + None + } + + /// If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`. + #[instrument(level = "trace", skip(self))] + fn process_constant( + &mut self, + bb: BasicBlock, + lhs: PlaceIndex, + constant: OpTy<'tcx>, + state: &mut State<ConditionSet<'a>>, + ) { + self.map.for_each_projection_value( + lhs, + constant, + &mut |elem, op| match elem { + TrackElem::Field(idx) => self.ecx.project_field(op, idx.as_usize()).ok(), + TrackElem::Variant(idx) => self.ecx.project_downcast(op, idx).ok(), + TrackElem::Discriminant => { + let variant = self.ecx.read_discriminant(op).ok()?; + let discr_value = + self.ecx.discriminant_for_variant(op.layout.ty, variant).ok()?; + Some(discr_value.into()) + } + TrackElem::DerefLen => { + let op: OpTy<'_> = self.ecx.deref_pointer(op).ok()?.into(); + let len_usize = op.len(&self.ecx).ok()?; + let layout = self.ecx.layout_of(self.tcx.types.usize).unwrap(); + Some(ImmTy::from_uint(len_usize, layout).into()) + } + }, + &mut |place, op| { + if let Some(conditions) = state.try_get_idx(place, self.map) + && let Ok(imm) = self.ecx.read_immediate_raw(op) + && let Some(imm) = imm.right() + && let Immediate::Scalar(Scalar::Int(int)) = *imm + { + conditions.iter_matches(int).for_each(|c: Condition| { + self.opportunities + .push(ThreadingOpportunity { chain: vec![bb], target: c.target }) + }) + } + }, + ); + } + + #[instrument(level = "trace", skip(self))] + fn process_operand( + &mut self, + bb: BasicBlock, + lhs: PlaceIndex, + rhs: &Operand<'tcx>, + state: &mut State<ConditionSet<'a>>, + ) -> Option<!> { match rhs { // If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`. Operand::Constant(constant) => { - let conditions = state.try_get_idx(lhs, self.map)?; - let constant = - constant.const_.normalize(self.tcx, self.param_env).try_to_scalar_int()?; - conditions.iter_matches(constant).for_each(register_opportunity); + let constant = self.ecx.eval_mir_constant(&constant.const_, None, None).ok()?; + self.process_constant(bb, lhs, constant, state); } // Transfer the conditions on the copied rhs. Operand::Move(rhs) | Operand::Copy(rhs) => { @@ -360,6 +424,84 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> { } #[instrument(level = "trace", skip(self))] + fn process_assign( + &mut self, + bb: BasicBlock, + lhs_place: &Place<'tcx>, + rhs: &Rvalue<'tcx>, + state: &mut State<ConditionSet<'a>>, + ) -> Option<!> { + let lhs = self.map.find(lhs_place.as_ref())?; + match rhs { + Rvalue::Use(operand) => self.process_operand(bb, lhs, operand, state)?, + // Transfer the conditions on the copy rhs. + Rvalue::CopyForDeref(rhs) => { + self.process_operand(bb, lhs, &Operand::Copy(*rhs), state)? + } + Rvalue::Discriminant(rhs) => { + let rhs = self.map.find_discr(rhs.as_ref())?; + state.insert_place_idx(rhs, lhs, self.map); + } + // If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`. + Rvalue::Aggregate(box ref kind, ref operands) => { + let agg_ty = lhs_place.ty(self.body, self.tcx).ty; + let lhs = match kind { + // Do not support unions. + AggregateKind::Adt(.., Some(_)) => return None, + AggregateKind::Adt(_, variant_index, ..) if agg_ty.is_enum() => { + if let Some(discr_target) = self.map.apply(lhs, TrackElem::Discriminant) + && let Ok(discr_value) = + self.ecx.discriminant_for_variant(agg_ty, *variant_index) + { + self.process_immediate(bb, discr_target, discr_value, state); + } + self.map.apply(lhs, TrackElem::Variant(*variant_index))? + } + _ => lhs, + }; + for (field_index, operand) in operands.iter_enumerated() { + if let Some(field) = self.map.apply(lhs, TrackElem::Field(field_index)) { + self.process_operand(bb, field, operand, state); + } + } + } + // Transfer the conditions on the copy rhs, after inversing polarity. + Rvalue::UnaryOp(UnOp::Not, Operand::Move(place) | Operand::Copy(place)) => { + let conditions = state.try_get_idx(lhs, self.map)?; + let place = self.map.find(place.as_ref())?; + let conds = conditions.map(self.arena, Condition::inv); + state.insert_value_idx(place, conds, self.map); + } + // We expect `lhs ?= A`. We found `lhs = Eq(rhs, B)`. + // Create a condition on `rhs ?= B`. + Rvalue::BinaryOp( + op, + box (Operand::Move(place) | Operand::Copy(place), Operand::Constant(value)) + | box (Operand::Constant(value), Operand::Move(place) | Operand::Copy(place)), + ) => { + let conditions = state.try_get_idx(lhs, self.map)?; + let place = self.map.find(place.as_ref())?; + let equals = match op { + BinOp::Eq => ScalarInt::TRUE, + BinOp::Ne => ScalarInt::FALSE, + _ => return None, + }; + let value = value.const_.normalize(self.tcx, self.param_env).try_to_scalar_int()?; + let conds = conditions.map(self.arena, |c| Condition { + value, + polarity: if c.matches(equals) { Polarity::Eq } else { Polarity::Ne }, + ..c + }); + state.insert_value_idx(place, conds, self.map); + } + + _ => {} + } + + None + } + + #[instrument(level = "trace", skip(self))] fn process_statement( &mut self, bb: BasicBlock, @@ -374,18 +516,6 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> { // Below, `lhs` is the return value of `mutated_statement`, // the place to which `conditions` apply. - let discriminant_for_variant = |enum_ty: Ty<'tcx>, variant_index| { - let discr = enum_ty.discriminant_for_variant(self.tcx, variant_index)?; - let discr_layout = self.tcx.layout_of(self.param_env.and(discr.ty)).ok()?; - let scalar = ScalarInt::try_from_uint(discr.val, discr_layout.size)?; - Some(Operand::const_from_scalar( - self.tcx, - discr.ty, - scalar.into(), - rustc_span::DUMMY_SP, - )) - }; - match &stmt.kind { // If we expect `discriminant(place) ?= A`, // we have an opportunity if `variant_index ?= A`. @@ -395,7 +525,7 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> { // `SetDiscriminant` may be a no-op if the assigned variant is the untagged variant // of a niche encoding. If we cannot ensure that we write to the discriminant, do // nothing. - let enum_layout = self.tcx.layout_of(self.param_env.and(enum_ty)).ok()?; + let enum_layout = self.ecx.layout_of(enum_ty).ok()?; let writes_discriminant = match enum_layout.variants { Variants::Single { index } => { assert_eq!(index, *variant_index); @@ -408,8 +538,8 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> { } => *variant_index != untagged_variant, }; if writes_discriminant { - let discr = discriminant_for_variant(enum_ty, *variant_index)?; - self.process_operand(bb, discr_target, &discr, state)?; + let discr = self.ecx.discriminant_for_variant(enum_ty, *variant_index).ok()?; + self.process_immediate(bb, discr_target, discr, state)?; } } // If we expect `lhs ?= true`, we have an opportunity if we assume `lhs == true`. @@ -420,89 +550,7 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> { conditions.iter_matches(ScalarInt::TRUE).for_each(register_opportunity); } StatementKind::Assign(box (lhs_place, rhs)) => { - if let Some(lhs) = self.map.find(lhs_place.as_ref()) { - match rhs { - Rvalue::Use(operand) => self.process_operand(bb, lhs, operand, state)?, - // Transfer the conditions on the copy rhs. - Rvalue::CopyForDeref(rhs) => { - self.process_operand(bb, lhs, &Operand::Copy(*rhs), state)? - } - Rvalue::Discriminant(rhs) => { - let rhs = self.map.find_discr(rhs.as_ref())?; - state.insert_place_idx(rhs, lhs, self.map); - } - // If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`. - Rvalue::Aggregate(box ref kind, ref operands) => { - let agg_ty = lhs_place.ty(self.body, self.tcx).ty; - let lhs = match kind { - // Do not support unions. - AggregateKind::Adt(.., Some(_)) => return None, - AggregateKind::Adt(_, variant_index, ..) if agg_ty.is_enum() => { - if let Some(discr_target) = - self.map.apply(lhs, TrackElem::Discriminant) - && let Some(discr_value) = - discriminant_for_variant(agg_ty, *variant_index) - { - self.process_operand(bb, discr_target, &discr_value, state); - } - self.map.apply(lhs, TrackElem::Variant(*variant_index))? - } - _ => lhs, - }; - for (field_index, operand) in operands.iter_enumerated() { - if let Some(field) = - self.map.apply(lhs, TrackElem::Field(field_index)) - { - self.process_operand(bb, field, operand, state); - } - } - } - // Transfer the conditions on the copy rhs, after inversing polarity. - Rvalue::UnaryOp(UnOp::Not, Operand::Move(place) | Operand::Copy(place)) => { - let conditions = state.try_get_idx(lhs, self.map)?; - let place = self.map.find(place.as_ref())?; - let conds = conditions.map(self.arena, Condition::inv); - state.insert_value_idx(place, conds, self.map); - } - // We expect `lhs ?= A`. We found `lhs = Eq(rhs, B)`. - // Create a condition on `rhs ?= B`. - Rvalue::BinaryOp( - op, - box ( - Operand::Move(place) | Operand::Copy(place), - Operand::Constant(value), - ) - | box ( - Operand::Constant(value), - Operand::Move(place) | Operand::Copy(place), - ), - ) => { - let conditions = state.try_get_idx(lhs, self.map)?; - let place = self.map.find(place.as_ref())?; - let equals = match op { - BinOp::Eq => ScalarInt::TRUE, - BinOp::Ne => ScalarInt::FALSE, - _ => return None, - }; - let value = value - .const_ - .normalize(self.tcx, self.param_env) - .try_to_scalar_int()?; - let conds = conditions.map(self.arena, |c| Condition { - value, - polarity: if c.matches(equals) { - Polarity::Eq - } else { - Polarity::Ne - }, - ..c - }); - state.insert_value_idx(place, conds, self.map); - } - - _ => {} - } - } + self.process_assign(bb, lhs_place, rhs, state)?; } _ => {} } @@ -577,7 +625,7 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> { let discr = discr.place()?; let discr_ty = discr.ty(self.body, self.tcx).ty; - let discr_layout = self.tcx.layout_of(self.param_env.and(discr_ty)).ok()?; + let discr_layout = self.ecx.layout_of(discr_ty).ok()?; let conditions = state.try_get(discr.as_ref(), self.map)?; if let Some((value, _)) = targets.iter().find(|&(_, target)| target == target_bb) { diff --git a/compiler/rustc_monomorphize/src/partitioning.rs b/compiler/rustc_monomorphize/src/partitioning.rs index 2c40cd4d8f2..7ff182381b8 100644 --- a/compiler/rustc_monomorphize/src/partitioning.rs +++ b/compiler/rustc_monomorphize/src/partitioning.rs @@ -182,7 +182,7 @@ where } // Ensure CGUs are sorted by name, so that we get deterministic results. - if !codegen_units.is_sorted_by(|a, b| Some(a.name().as_str().cmp(b.name().as_str()))) { + if !codegen_units.is_sorted_by(|a, b| a.name().as_str() <= b.name().as_str()) { let mut names = String::new(); for cgu in codegen_units.iter() { names += &format!("- {}\n", cgu.name()); @@ -317,7 +317,7 @@ fn merge_codegen_units<'tcx>( assert!(cx.tcx.sess.codegen_units().as_usize() >= 1); // A sorted order here ensures merging is deterministic. - assert!(codegen_units.is_sorted_by(|a, b| Some(a.name().as_str().cmp(b.name().as_str())))); + assert!(codegen_units.is_sorted_by(|a, b| a.name().as_str() <= b.name().as_str())); // This map keeps track of what got merged into what. let mut cgu_contents: FxHashMap<Symbol, Vec<Symbol>> = diff --git a/compiler/rustc_pattern_analysis/src/lib.rs b/compiler/rustc_pattern_analysis/src/lib.rs index 21fa8e68d82..4fd01b5e638 100644 --- a/compiler/rustc_pattern_analysis/src/lib.rs +++ b/compiler/rustc_pattern_analysis/src/lib.rs @@ -82,7 +82,7 @@ impl<'a, T: ?Sized> Captures<'a> for T {} /// Most of the crate is parameterized on a type that implements this trait. pub trait TypeCx: Sized + fmt::Debug { /// The type of a pattern. - type Ty: Copy + Clone + fmt::Debug; // FIXME: remove Copy + type Ty: Clone + fmt::Debug; /// Errors that can abort analysis. type Error: fmt::Debug; /// The index of an enum variant. @@ -97,16 +97,16 @@ pub trait TypeCx: Sized + fmt::Debug { fn is_exhaustive_patterns_feature_on(&self) -> bool; /// The number of fields for this constructor. - fn ctor_arity(&self, ctor: &Constructor<Self>, ty: Self::Ty) -> usize; + fn ctor_arity(&self, ctor: &Constructor<Self>, ty: &Self::Ty) -> usize; /// The types of the fields for this constructor. The result must have a length of /// `ctor_arity()`. - fn ctor_sub_tys(&self, ctor: &Constructor<Self>, ty: Self::Ty) -> &[Self::Ty]; + fn ctor_sub_tys(&self, ctor: &Constructor<Self>, ty: &Self::Ty) -> &[Self::Ty]; /// The set of all the constructors for `ty`. /// /// This must follow the invariants of `ConstructorSet` - fn ctors_for_ty(&self, ty: Self::Ty) -> Result<ConstructorSet<Self>, Self::Error>; + fn ctors_for_ty(&self, ty: &Self::Ty) -> Result<ConstructorSet<Self>, Self::Error>; /// Best-effort `Debug` implementation. fn debug_pat(f: &mut fmt::Formatter<'_>, pat: &DeconstructedPat<'_, Self>) -> fmt::Result; diff --git a/compiler/rustc_pattern_analysis/src/lints.rs b/compiler/rustc_pattern_analysis/src/lints.rs index d9dbd8250ef..f9f065fbe8b 100644 --- a/compiler/rustc_pattern_analysis/src/lints.rs +++ b/compiler/rustc_pattern_analysis/src/lints.rs @@ -46,7 +46,7 @@ impl<'p, 'tcx> PatternColumn<'p, 'tcx> { } fn head_ty(&self) -> Option<RevealedTy<'tcx>> { - self.patterns.first().map(|pat| pat.ty()) + self.patterns.first().map(|pat| *pat.ty()) } /// Do constructor splitting on the constructors of the column. @@ -101,7 +101,7 @@ fn collect_nonexhaustive_missing_variants<'a, 'p, 'tcx>( let Some(ty) = column.head_ty() else { return Ok(Vec::new()); }; - let pcx = &PlaceCtxt::new_dummy(cx, ty); + let pcx = &PlaceCtxt::new_dummy(cx, &ty); let set = column.analyze_ctors(pcx)?; if set.present.is_empty() { diff --git a/compiler/rustc_pattern_analysis/src/pat.rs b/compiler/rustc_pattern_analysis/src/pat.rs index 75fe59edf88..8cd0ecb073c 100644 --- a/compiler/rustc_pattern_analysis/src/pat.rs +++ b/compiler/rustc_pattern_analysis/src/pat.rs @@ -54,8 +54,8 @@ impl<'p, Cx: TypeCx> DeconstructedPat<'p, Cx> { pub fn ctor(&self) -> &Constructor<Cx> { &self.ctor } - pub fn ty(&self) -> Cx::Ty { - self.ty + pub fn ty(&self) -> &Cx::Ty { + &self.ty } /// Returns the extra data stored in a pattern. Returns `None` if the pattern is a wildcard that /// does not correspond to a user-supplied pattern. @@ -242,15 +242,15 @@ impl<Cx: TypeCx> WitnessPat<Cx> { /// `Some(_)`. pub(crate) fn wild_from_ctor(pcx: &PlaceCtxt<'_, Cx>, ctor: Constructor<Cx>) -> Self { let field_tys = pcx.ctor_sub_tys(&ctor); - let fields = field_tys.iter().map(|ty| Self::wildcard(*ty)).collect(); - Self::new(ctor, fields, pcx.ty) + let fields = field_tys.iter().cloned().map(|ty| Self::wildcard(ty)).collect(); + Self::new(ctor, fields, pcx.ty.clone()) } pub fn ctor(&self) -> &Constructor<Cx> { &self.ctor } - pub fn ty(&self) -> Cx::Ty { - self.ty + pub fn ty(&self) -> &Cx::Ty { + &self.ty } pub fn iter_fields(&self) -> impl Iterator<Item = &WitnessPat<Cx>> { diff --git a/compiler/rustc_pattern_analysis/src/rustc.rs b/compiler/rustc_pattern_analysis/src/rustc.rs index 87e70d68c1b..27b25802427 100644 --- a/compiler/rustc_pattern_analysis/src/rustc.rs +++ b/compiler/rustc_pattern_analysis/src/rustc.rs @@ -766,7 +766,7 @@ impl<'p, 'tcx> RustcMatchCheckCtxt<'p, 'tcx> { let mut subpatterns = pat.iter_fields().map(|p| Box::new(cx.hoist_witness_pat(p))); let kind = match pat.ctor() { Bool(b) => PatKind::Constant { value: mir::Const::from_bool(cx.tcx, *b) }, - IntRange(range) => return self.hoist_pat_range(range, pat.ty()), + IntRange(range) => return self.hoist_pat_range(range, *pat.ty()), Struct | Variant(_) | UnionField => match pat.ty().kind() { ty::Tuple(..) => PatKind::Leaf { subpatterns: subpatterns @@ -785,7 +785,7 @@ impl<'p, 'tcx> RustcMatchCheckCtxt<'p, 'tcx> { RustcMatchCheckCtxt::variant_index_for_adt(&pat.ctor(), *adt_def); let variant = &adt_def.variant(variant_index); let subpatterns = cx - .list_variant_nonhidden_fields(pat.ty(), variant) + .list_variant_nonhidden_fields(*pat.ty(), variant) .zip(subpatterns) .map(|((field, _ty), pattern)| FieldPat { field, pattern }) .collect(); @@ -796,7 +796,7 @@ impl<'p, 'tcx> RustcMatchCheckCtxt<'p, 'tcx> { PatKind::Leaf { subpatterns } } } - _ => bug!("unexpected ctor for type {:?} {:?}", pat.ctor(), pat.ty()), + _ => bug!("unexpected ctor for type {:?} {:?}", pat.ctor(), *pat.ty()), }, // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should // be careful to reconstruct the correct constant pattern here. However a string @@ -961,21 +961,21 @@ impl<'p, 'tcx> TypeCx for RustcMatchCheckCtxt<'p, 'tcx> { self.tcx.features().exhaustive_patterns } - fn ctor_arity(&self, ctor: &crate::constructor::Constructor<Self>, ty: Self::Ty) -> usize { - self.ctor_arity(ctor, ty) + fn ctor_arity(&self, ctor: &crate::constructor::Constructor<Self>, ty: &Self::Ty) -> usize { + self.ctor_arity(ctor, *ty) } fn ctor_sub_tys( &self, ctor: &crate::constructor::Constructor<Self>, - ty: Self::Ty, + ty: &Self::Ty, ) -> &[Self::Ty] { - self.ctor_sub_tys(ctor, ty) + self.ctor_sub_tys(ctor, *ty) } fn ctors_for_ty( &self, - ty: Self::Ty, + ty: &Self::Ty, ) -> Result<crate::constructor::ConstructorSet<Self>, Self::Error> { - self.ctors_for_ty(ty) + self.ctors_for_ty(*ty) } fn debug_pat( @@ -994,7 +994,7 @@ impl<'p, 'tcx> TypeCx for RustcMatchCheckCtxt<'p, 'tcx> { overlaps_on: IntRange, overlaps_with: &[&crate::pat::DeconstructedPat<'_, Self>], ) { - let overlap_as_pat = self.hoist_pat_range(&overlaps_on, pat.ty()); + let overlap_as_pat = self.hoist_pat_range(&overlaps_on, *pat.ty()); let overlaps: Vec<_> = overlaps_with .iter() .map(|pat| pat.data().unwrap().span) diff --git a/compiler/rustc_pattern_analysis/src/usefulness.rs b/compiler/rustc_pattern_analysis/src/usefulness.rs index dac354a1c52..d7852a2b2cb 100644 --- a/compiler/rustc_pattern_analysis/src/usefulness.rs +++ b/compiler/rustc_pattern_analysis/src/usefulness.rs @@ -736,13 +736,14 @@ pub(crate) struct PlaceCtxt<'a, Cx: TypeCx> { #[derivative(Debug = "ignore")] pub(crate) mcx: MatchCtxt<'a, Cx>, /// Type of the place under investigation. - pub(crate) ty: Cx::Ty, + #[derivative(Clone(clone_with = "Clone::clone"))] // See rust-derivative#90 + pub(crate) ty: &'a Cx::Ty, } impl<'a, Cx: TypeCx> PlaceCtxt<'a, Cx> { /// A `PlaceCtxt` when code other than `is_useful` needs one. #[cfg_attr(not(feature = "rustc"), allow(dead_code))] - pub(crate) fn new_dummy(mcx: MatchCtxt<'a, Cx>, ty: Cx::Ty) -> Self { + pub(crate) fn new_dummy(mcx: MatchCtxt<'a, Cx>, ty: &'a Cx::Ty) -> Self { PlaceCtxt { mcx, ty } } @@ -1023,8 +1024,8 @@ impl<'p, Cx: TypeCx> Matrix<'p, Cx> { matrix } - fn head_ty(&self) -> Option<Cx::Ty> { - self.place_ty.first().copied() + fn head_ty(&self) -> Option<&Cx::Ty> { + self.place_ty.first() } fn column_count(&self) -> usize { self.place_ty.len() @@ -1058,7 +1059,7 @@ impl<'p, Cx: TypeCx> Matrix<'p, Cx> { let ctor_sub_tys = pcx.ctor_sub_tys(ctor); let arity = ctor_sub_tys.len(); let specialized_place_ty = - ctor_sub_tys.iter().chain(self.place_ty[1..].iter()).copied().collect(); + ctor_sub_tys.iter().chain(self.place_ty[1..].iter()).cloned().collect(); let ctor_sub_validity = self.place_validity[0].specialize(ctor); let specialized_place_validity = std::iter::repeat(ctor_sub_validity) .take(arity) @@ -1214,7 +1215,7 @@ impl<Cx: TypeCx> WitnessStack<Cx> { let len = self.0.len(); let arity = ctor.arity(pcx); let fields = self.0.drain((len - arity)..).rev().collect(); - let pat = WitnessPat::new(ctor.clone(), fields, pcx.ty); + let pat = WitnessPat::new(ctor.clone(), fields, pcx.ty.clone()); self.0.push(pat); } } @@ -1410,7 +1411,7 @@ fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>( return Ok(WitnessMatrix::empty()); } - let Some(ty) = matrix.head_ty() else { + let Some(ty) = matrix.head_ty().cloned() else { // The base case: there are no columns in the matrix. We are morally pattern-matching on (). // A row is useful iff it has no (unguarded) rows above it. let mut useful = true; // Whether the next row is useful. @@ -1431,7 +1432,7 @@ fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>( }; debug!("ty: {ty:?}"); - let pcx = &PlaceCtxt { mcx, ty }; + let pcx = &PlaceCtxt { mcx, ty: &ty }; let ctors_for_ty = pcx.ctors_for_ty()?; // Whether the place/column we are inspecting is known to contain valid data. |
