about summary refs log tree commit diff
path: root/compiler/rustc_const_eval/src/interpret/intern.rs
diff options
context:
space:
mode:
authorRalf Jung <post@ralfj.de>2023-11-25 18:41:53 +0100
committerRalf Jung <post@ralfj.de>2023-12-07 17:46:36 +0100
commitcb863033423d895c0530ab749e4a3bca494b5c27 (patch)
treeadea7e97f6421b3c59f62a6203157096ab801cd3 /compiler/rustc_const_eval/src/interpret/intern.rs
parent85a4bd8f5873aa3ec5eb38baf63b89aa9bd21a7b (diff)
downloadrust-cb863033423d895c0530ab749e4a3bca494b5c27.tar.gz
rust-cb863033423d895c0530ab749e4a3bca494b5c27.zip
ctfe interpreter: extend provenance so that it can track whether a pointer is immutable
Diffstat (limited to 'compiler/rustc_const_eval/src/interpret/intern.rs')
-rw-r--r--compiler/rustc_const_eval/src/interpret/intern.rs24
1 files changed, 13 insertions, 11 deletions
diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs
index 881df9b04ab..7931789e436 100644
--- a/compiler/rustc_const_eval/src/interpret/intern.rs
+++ b/compiler/rustc_const_eval/src/interpret/intern.rs
@@ -18,7 +18,7 @@ use super::validity::RefTracking;
 use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
 use rustc_errors::ErrorGuaranteed;
 use rustc_hir as hir;
-use rustc_middle::mir::interpret::InterpResult;
+use rustc_middle::mir::interpret::{CtfeProvenance, InterpResult};
 use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
 
 use rustc_ast::Mutability;
@@ -34,7 +34,7 @@ pub trait CompileTimeMachine<'mir, 'tcx: 'mir, T> = Machine<
         'mir,
         'tcx,
         MemoryKind = T,
-        Provenance = AllocId,
+        Provenance = CtfeProvenance,
         ExtraFnVal = !,
         FrameExtra = (),
         AllocExtra = (),
@@ -135,7 +135,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
         alloc.mutability = Mutability::Not;
     };
     // link the alloc id to the actual allocation
-    leftover_allocations.extend(alloc.provenance().ptrs().iter().map(|&(_, alloc_id)| alloc_id));
+    leftover_allocations.extend(alloc.provenance().ptrs().iter().map(|&(_, prov)| prov.alloc_id()));
     let alloc = tcx.mk_const_alloc(alloc);
     tcx.set_alloc_id_memory(alloc_id, alloc);
     None
@@ -178,10 +178,10 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
                 tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind()
             {
                 let ptr = mplace.meta().unwrap_meta().to_pointer(&tcx)?;
-                if let Some(alloc_id) = ptr.provenance {
+                if let Some(prov) = ptr.provenance {
                     // Explicitly choose const mode here, since vtables are immutable, even
                     // if the reference of the fat pointer is mutable.
-                    self.intern_shallow(alloc_id, InternMode::Const, None);
+                    self.intern_shallow(prov.alloc_id(), InternMode::Const, None);
                 } else {
                     // Validation will error (with a better message) on an invalid vtable pointer.
                     // Let validation show the error message, but make sure it *does* error.
@@ -191,7 +191,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
             }
             // Check if we have encountered this pointer+layout combination before.
             // Only recurse for allocation-backed pointers.
-            if let Some(alloc_id) = mplace.ptr().provenance {
+            if let Some(prov) = mplace.ptr().provenance {
                 // Compute the mode with which we intern this. Our goal here is to make as many
                 // statics as we can immutable so they can be placed in read-only memory by LLVM.
                 let ref_mode = match self.mode {
@@ -234,7 +234,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
                         InternMode::Const
                     }
                 };
-                match self.intern_shallow(alloc_id, ref_mode, Some(referenced_ty)) {
+                match self.intern_shallow(prov.alloc_id(), ref_mode, Some(referenced_ty)) {
                     // No need to recurse, these are interned already and statics may have
                     // cycles, so we don't want to recurse there
                     Some(IsStaticOrFn) => {}
@@ -353,7 +353,7 @@ pub fn intern_const_alloc_recursive<
         leftover_allocations,
         // The outermost allocation must exist, because we allocated it with
         // `Memory::allocate`.
-        ret.ptr().provenance.unwrap(),
+        ret.ptr().provenance.unwrap().alloc_id(),
         base_intern_mode,
         Some(ret.layout.ty),
     );
@@ -431,7 +431,8 @@ pub fn intern_const_alloc_recursive<
             }
             let alloc = tcx.mk_const_alloc(alloc);
             tcx.set_alloc_id_memory(alloc_id, alloc);
-            for &(_, alloc_id) in alloc.inner().provenance().ptrs().iter() {
+            for &(_, prov) in alloc.inner().provenance().ptrs().iter() {
+                let alloc_id = prov.alloc_id();
                 if leftover_allocations.insert(alloc_id) {
                     todo.push(alloc_id);
                 }
@@ -503,10 +504,11 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
         // `allocate` picks a fresh AllocId that we will associate with its data below.
         let dest = self.allocate(layout, MemoryKind::Stack)?;
         f(self, &dest.clone().into())?;
-        let mut alloc = self.memory.alloc_map.remove(&dest.ptr().provenance.unwrap()).unwrap().1;
+        let mut alloc =
+            self.memory.alloc_map.remove(&dest.ptr().provenance.unwrap().alloc_id()).unwrap().1;
         alloc.mutability = Mutability::Not;
         let alloc = self.tcx.mk_const_alloc(alloc);
-        let alloc_id = dest.ptr().provenance.unwrap(); // this was just allocated, it must have provenance
+        let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance
         self.tcx.set_alloc_id_memory(alloc_id, alloc);
         Ok(alloc_id)
     }