diff options
| author | Ralf Jung <post@ralfj.de> | 2023-07-25 22:35:07 +0200 |
|---|---|---|
| committer | Ralf Jung <post@ralfj.de> | 2023-07-25 22:35:07 +0200 |
| commit | da3f0d0eb7ced3d4967dcac01ddd2edef8f43b85 (patch) | |
| tree | 0e08e8b822bbf9def79446e844cb41fb756a9842 /compiler/rustc_const_eval | |
| parent | 77ff1b83cd3279c348312e1e1f9bf6a1c1174178 (diff) | |
| download | rust-da3f0d0eb7ced3d4967dcac01ddd2edef8f43b85.tar.gz rust-da3f0d0eb7ced3d4967dcac01ddd2edef8f43b85.zip | |
make MPlaceTy non-Copy
Diffstat (limited to 'compiler/rustc_const_eval')
7 files changed, 16 insertions, 16 deletions
diff --git a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs index b21cb984de6..55d719d2703 100644 --- a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs +++ b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs @@ -58,7 +58,7 @@ fn eval_body_using_ecx<'mir, 'tcx>( ecx.push_stack_frame( cid.instance, body, - &ret.into(), + &ret.clone().into(), StackPopCleanup::Root { cleanup: false }, )?; @@ -356,7 +356,7 @@ pub fn eval_to_allocation_raw_provider<'tcx>( // Since evaluation had no errors, validate the resulting constant. // This is a separate `try` block to provide more targeted error reporting. let validation: Result<_, InterpErrorInfo<'_>> = try { - let mut ref_tracking = RefTracking::new(mplace); + let mut ref_tracking = RefTracking::new(mplace.clone()); let mut inner = false; while let Some((mplace, path)) = ref_tracking.todo.pop() { let mode = match tcx.static_mutability(cid.instance.def_id()) { diff --git a/compiler/rustc_const_eval/src/const_eval/valtrees.rs b/compiler/rustc_const_eval/src/const_eval/valtrees.rs index be5eb1558cf..8f68f837759 100644 --- a/compiler/rustc_const_eval/src/const_eval/valtrees.rs +++ b/compiler/rustc_const_eval/src/const_eval/valtrees.rs @@ -21,7 +21,7 @@ fn branches<'tcx>( ) -> ValTreeCreationResult<'tcx> { let place = match variant { Some(variant) => ecx.project_downcast(place, variant).unwrap(), - None => *place, + None => place.clone(), }; let variant = variant.map(|variant| Some(ty::ValTree::Leaf(ScalarInt::from(variant.as_u32())))); debug!(?place, ?variant); @@ -290,7 +290,7 @@ pub fn valtree_to_const_value<'tcx>( debug!(?place); valtree_into_mplace(&mut ecx, &mut place, valtree); - dump_place(&ecx, place.into()); + dump_place(&ecx, place.clone().into()); intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &place).unwrap(); match ty.kind() { @@ -352,7 +352,7 @@ fn valtree_into_mplace<'tcx>( debug!(?pointee_place); valtree_into_mplace(ecx, &mut pointee_place, valtree); - dump_place(ecx, pointee_place.into()); + dump_place(ecx, pointee_place.clone().into()); intern_const_alloc_recursive(ecx, InternKind::Constant, &pointee_place).unwrap(); let imm = match inner_ty.kind() { @@ -389,7 +389,7 @@ fn valtree_into_mplace<'tcx>( Some(variant_idx), ) } - _ => (*place, branches, None), + _ => (place.clone(), branches, None), }; debug!(?place_adjusted, ?branches); diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs index e1088e72eb6..910c3ca5d0a 100644 --- a/compiler/rustc_const_eval/src/interpret/intern.rs +++ b/compiler/rustc_const_eval/src/interpret/intern.rs @@ -358,7 +358,7 @@ pub fn intern_const_alloc_recursive< Some(ret.layout.ty), ); - ref_tracking.track((*ret, base_intern_mode), || ()); + ref_tracking.track((ret.clone(), base_intern_mode), || ()); while let Some(((mplace, mode), _)) = ref_tracking.todo.pop() { let res = InternVisitor { @@ -464,7 +464,7 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>> ) -> InterpResult<'tcx, ()>, ) -> InterpResult<'tcx, ConstAllocation<'tcx>> { let dest = self.allocate(layout, MemoryKind::Stack)?; - f(self, &dest.into())?; + f(self, &dest.clone().into())?; let mut alloc = self.memory.alloc_map.remove(&dest.ptr.provenance.unwrap()).unwrap().1; alloc.mutability = Mutability::Not; Ok(self.tcx.mk_const_alloc(alloc)) diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index 32a2f648321..d1427b09632 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -476,7 +476,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { if let Some(val) = self.read_immediate_from_mplace_raw(mplace)? { Right(val) } else { - Left(*mplace) + Left(mplace.clone()) } } Right(val) => Right(val), diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs index 306ad20c883..96a960118ce 100644 --- a/compiler/rustc_const_eval/src/interpret/place.rs +++ b/compiler/rustc_const_eval/src/interpret/place.rs @@ -81,7 +81,7 @@ pub struct MemPlace<Prov: Provenance = AllocId> { } /// A MemPlace with its layout. Constructing it is only possible in this module. -#[derive(Copy, Clone, Hash, Eq, PartialEq, Debug)] +#[derive(Clone, Hash, Eq, PartialEq, Debug)] pub struct MPlaceTy<'tcx, Prov: Provenance = AllocId> { mplace: MemPlace<Prov>, pub layout: TyAndLayout<'tcx>, @@ -452,7 +452,7 @@ where } let mplace = self.ref_to_mplace(&val)?; - self.check_mplace(mplace)?; + self.check_mplace(&mplace)?; Ok(mplace) } @@ -483,7 +483,7 @@ where } /// Check if this mplace is dereferenceable and sufficiently aligned. - pub fn check_mplace(&self, mplace: MPlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> { + pub fn check_mplace(&self, mplace: &MPlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> { let (size, _align) = self .size_and_align_of_mplace(&mplace)? .unwrap_or((mplace.layout.size, mplace.layout.align.abi)); diff --git a/compiler/rustc_const_eval/src/interpret/terminator.rs b/compiler/rustc_const_eval/src/interpret/terminator.rs index f934cca2517..d0191ea978a 100644 --- a/compiler/rustc_const_eval/src/interpret/terminator.rs +++ b/compiler/rustc_const_eval/src/interpret/terminator.rs @@ -634,7 +634,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { // Ensure the return place is aligned and dereferenceable, and protect it for // in-place return value passing. if let Either::Left(mplace) = destination.as_mplace_or_local() { - self.check_mplace(mplace)?; + self.check_mplace(&mplace)?; } else { // Nothing to do for locals, they are always properly allocated and aligned. } diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs index a82c98e7205..4fd5fd13c3c 100644 --- a/compiler/rustc_const_eval/src/interpret/validity.rs +++ b/compiler/rustc_const_eval/src/interpret/validity.rs @@ -136,19 +136,19 @@ pub struct RefTracking<T, PATH = ()> { pub todo: Vec<(T, PATH)>, } -impl<T: Copy + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH> { +impl<T: Clone + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH> { pub fn empty() -> Self { RefTracking { seen: FxHashSet::default(), todo: vec![] } } pub fn new(op: T) -> Self { let mut ref_tracking_for_consts = - RefTracking { seen: FxHashSet::default(), todo: vec![(op, PATH::default())] }; + RefTracking { seen: FxHashSet::default(), todo: vec![(op.clone(), PATH::default())] }; ref_tracking_for_consts.seen.insert(op); ref_tracking_for_consts } pub fn track(&mut self, op: T, path: impl FnOnce() -> PATH) { - if self.seen.insert(op) { + if self.seen.insert(op.clone()) { trace!("Recursing below ptr {:#?}", op); let path = path(); // Remember to come back to this later. |
