about summary refs log tree commit diff
path: root/compiler
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2021-06-29 15:52:21 +0000
committerbors <bors@rust-lang.org>2021-06-29 15:52:21 +0000
commite98897e5dc9898707bf4331c43b2e76ab7e282fe (patch)
treeb676c09db0f7bbb80996a1e354dc58cc4ec010d5 /compiler
parent8971fff984e7a45ca6cdcd146816b4896a4ab1ea (diff)
parent97772bb1f230f4981c9af6614df1ebc09b12c4f6 (diff)
downloadrust-e98897e5dc9898707bf4331c43b2e76ab7e282fe.tar.gz
rust-e98897e5dc9898707bf4331c43b2e76ab7e282fe.zip
Auto merge of #86475 - crlf0710:miri_vtable_refactor, r=bjorn3
Change vtable memory representation to use tcx allocated allocations.

This fixes https://github.com/rust-lang/rust/issues/86324. However i suspect there's more to change before it can land.

r? `@bjorn3`
cc `@rust-lang/miri`
Diffstat (limited to 'compiler')
-rw-r--r--compiler/rustc_codegen_cranelift/src/common.rs2
-rw-r--r--compiler/rustc_codegen_cranelift/src/constant.rs2
-rw-r--r--compiler/rustc_codegen_cranelift/src/lib.rs2
-rw-r--r--compiler/rustc_codegen_cranelift/src/unsize.rs4
-rw-r--r--compiler/rustc_codegen_cranelift/src/vtable.rs106
-rw-r--r--compiler/rustc_codegen_llvm/src/common.rs4
-rw-r--r--compiler/rustc_codegen_ssa/src/meth.rs43
-rw-r--r--compiler/rustc_codegen_ssa/src/traits/consts.rs2
-rw-r--r--compiler/rustc_middle/src/ty/context.rs6
-rw-r--r--compiler/rustc_middle/src/ty/mod.rs18
-rw-r--r--compiler/rustc_middle/src/ty/vtable.rs106
-rw-r--r--compiler/rustc_mir/src/interpret/eval_context.rs6
-rw-r--r--compiler/rustc_mir/src/interpret/intern.rs1
-rw-r--r--compiler/rustc_mir/src/interpret/memory.rs4
-rw-r--r--compiler/rustc_mir/src/interpret/terminator.rs4
-rw-r--r--compiler/rustc_mir/src/interpret/traits.rs79
16 files changed, 145 insertions, 244 deletions
diff --git a/compiler/rustc_codegen_cranelift/src/common.rs b/compiler/rustc_codegen_cranelift/src/common.rs
index 488ff6e1349..a8a0bb52a24 100644
--- a/compiler/rustc_codegen_cranelift/src/common.rs
+++ b/compiler/rustc_codegen_cranelift/src/common.rs
@@ -233,7 +233,7 @@ pub(crate) struct FunctionCx<'m, 'clif, 'tcx: 'm> {
     pub(crate) module: &'m mut dyn Module,
     pub(crate) tcx: TyCtxt<'tcx>,
     pub(crate) pointer_type: Type, // Cached from module
-    pub(crate) vtables: FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), DataId>,
+    pub(crate) vtables: FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), Pointer>,
     pub(crate) constants_cx: ConstantCx,
 
     pub(crate) instance: Instance<'tcx>,
diff --git a/compiler/rustc_codegen_cranelift/src/constant.rs b/compiler/rustc_codegen_cranelift/src/constant.rs
index 3ba12c4e96d..a87b3703949 100644
--- a/compiler/rustc_codegen_cranelift/src/constant.rs
+++ b/compiler/rustc_codegen_cranelift/src/constant.rs
@@ -249,7 +249,7 @@ pub(crate) fn codegen_const_value<'tcx>(
     }
 }
 
-fn pointer_for_allocation<'tcx>(
+pub(crate) fn pointer_for_allocation<'tcx>(
     fx: &mut FunctionCx<'_, '_, 'tcx>,
     alloc: &'tcx Allocation,
 ) -> crate::pointer::Pointer {
diff --git a/compiler/rustc_codegen_cranelift/src/lib.rs b/compiler/rustc_codegen_cranelift/src/lib.rs
index 6aadaf8a7ca..b817bf4aff7 100644
--- a/compiler/rustc_codegen_cranelift/src/lib.rs
+++ b/compiler/rustc_codegen_cranelift/src/lib.rs
@@ -98,7 +98,7 @@ mod prelude {
     pub(crate) use cranelift_codegen::isa::{self, CallConv};
     pub(crate) use cranelift_codegen::Context;
     pub(crate) use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext, Variable};
-    pub(crate) use cranelift_module::{self, DataContext, DataId, FuncId, Linkage, Module};
+    pub(crate) use cranelift_module::{self, DataContext, FuncId, Linkage, Module};
 
     pub(crate) use crate::abi::*;
     pub(crate) use crate::base::{codegen_operand, codegen_place};
diff --git a/compiler/rustc_codegen_cranelift/src/unsize.rs b/compiler/rustc_codegen_cranelift/src/unsize.rs
index 042583cd572..b9d379c6117 100644
--- a/compiler/rustc_codegen_cranelift/src/unsize.rs
+++ b/compiler/rustc_codegen_cranelift/src/unsize.rs
@@ -31,9 +31,7 @@ pub(crate) fn unsized_info<'tcx>(
             // change to the vtable.
             old_info.expect("unsized_info: missing old info for trait upcast")
         }
-        (_, &ty::Dynamic(ref data, ..)) => {
-            crate::vtable::get_vtable(fx, fx.layout_of(source), data.principal())
-        }
+        (_, &ty::Dynamic(ref data, ..)) => crate::vtable::get_vtable(fx, source, data.principal()),
         _ => bug!("unsized_info: invalid unsizing {:?} -> {:?}", source, target),
     }
 }
diff --git a/compiler/rustc_codegen_cranelift/src/vtable.rs b/compiler/rustc_codegen_cranelift/src/vtable.rs
index 4d1ee47b41e..12f7092d935 100644
--- a/compiler/rustc_codegen_cranelift/src/vtable.rs
+++ b/compiler/rustc_codegen_cranelift/src/vtable.rs
@@ -4,7 +4,7 @@
 // FIXME dedup this logic between miri, cg_llvm and cg_clif
 
 use crate::prelude::*;
-use ty::VtblEntry;
+use super::constant::pointer_for_allocation;
 
 fn vtable_memflags() -> MemFlags {
     let mut flags = MemFlags::trusted(); // A vtable access is always aligned and will never trap.
@@ -66,105 +66,19 @@ pub(crate) fn get_ptr_and_method_ref<'tcx>(
 
 pub(crate) fn get_vtable<'tcx>(
     fx: &mut FunctionCx<'_, '_, 'tcx>,
-    layout: TyAndLayout<'tcx>,
+    ty: Ty<'tcx>,
     trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
 ) -> Value {
-    let data_id = if let Some(data_id) = fx.vtables.get(&(layout.ty, trait_ref)) {
-        *data_id
+    let vtable_ptr = if let Some(vtable_ptr) = fx.vtables.get(&(ty, trait_ref)) {
+        *vtable_ptr
     } else {
-        let data_id = build_vtable(fx, layout, trait_ref);
-        fx.vtables.insert((layout.ty, trait_ref), data_id);
-        data_id
-    };
-
-    let local_data_id = fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
-    fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
-}
-
-fn build_vtable<'tcx>(
-    fx: &mut FunctionCx<'_, '_, 'tcx>,
-    layout: TyAndLayout<'tcx>,
-    trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
-) -> DataId {
-    let tcx = fx.tcx;
-    let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
+        let vtable_alloc_id = fx.tcx.vtable_allocation(ty, trait_ref);
+        let vtable_allocation = fx.tcx.global_alloc(vtable_alloc_id).unwrap_memory();
+        let vtable_ptr = pointer_for_allocation(fx, vtable_allocation);
 
-    let drop_in_place_fn = import_function(
-        tcx,
-        fx.module,
-        Instance::resolve_drop_in_place(tcx, layout.ty).polymorphize(fx.tcx),
-    );
-
-    let vtable_entries = if let Some(trait_ref) = trait_ref {
-        tcx.vtable_entries(trait_ref.with_self_ty(tcx, layout.ty))
-    } else {
-        ty::COMMON_VTABLE_ENTRIES
+        fx.vtables.insert((ty, trait_ref), vtable_ptr);
+        vtable_ptr
     };
 
-    let mut data_ctx = DataContext::new();
-    let mut data = ::std::iter::repeat(0u8)
-        .take(vtable_entries.len() * usize_size)
-        .collect::<Vec<u8>>()
-        .into_boxed_slice();
-
-    for (idx, entry) in vtable_entries.iter().enumerate() {
-        match entry {
-            VtblEntry::MetadataSize => {
-                write_usize(fx.tcx, &mut data, idx, layout.size.bytes());
-            }
-            VtblEntry::MetadataAlign => {
-                write_usize(fx.tcx, &mut data, idx, layout.align.abi.bytes());
-            }
-            VtblEntry::MetadataDropInPlace | VtblEntry::Vacant | VtblEntry::Method(_, _) => {}
-        }
-    }
-    data_ctx.define(data);
-
-    for (idx, entry) in vtable_entries.iter().enumerate() {
-        match entry {
-            VtblEntry::MetadataDropInPlace => {
-                let func_ref = fx.module.declare_func_in_data(drop_in_place_fn, &mut data_ctx);
-                data_ctx.write_function_addr((idx * usize_size) as u32, func_ref);
-            }
-            VtblEntry::Method(def_id, substs) => {
-                let func_id = import_function(
-                    tcx,
-                    fx.module,
-                    Instance::resolve_for_vtable(tcx, ParamEnv::reveal_all(), *def_id, substs)
-                        .unwrap()
-                        .polymorphize(fx.tcx),
-                );
-                let func_ref = fx.module.declare_func_in_data(func_id, &mut data_ctx);
-                data_ctx.write_function_addr((idx * usize_size) as u32, func_ref);
-            }
-            VtblEntry::MetadataSize | VtblEntry::MetadataAlign | VtblEntry::Vacant => {}
-        }
-    }
-
-    data_ctx.set_align(fx.tcx.data_layout.pointer_align.pref.bytes());
-
-    let data_id = fx.module.declare_anonymous_data(false, false).unwrap();
-
-    fx.module.define_data(data_id, &data_ctx).unwrap();
-
-    data_id
-}
-
-fn write_usize(tcx: TyCtxt<'_>, buf: &mut [u8], idx: usize, num: u64) {
-    let pointer_size =
-        tcx.layout_of(ParamEnv::reveal_all().and(tcx.types.usize)).unwrap().size.bytes() as usize;
-    let target = &mut buf[idx * pointer_size..(idx + 1) * pointer_size];
-
-    match tcx.data_layout.endian {
-        rustc_target::abi::Endian::Little => match pointer_size {
-            4 => target.copy_from_slice(&(num as u32).to_le_bytes()),
-            8 => target.copy_from_slice(&(num as u64).to_le_bytes()),
-            _ => todo!("pointer size {} is not yet supported", pointer_size),
-        },
-        rustc_target::abi::Endian::Big => match pointer_size {
-            4 => target.copy_from_slice(&(num as u32).to_be_bytes()),
-            8 => target.copy_from_slice(&(num as u64).to_be_bytes()),
-            _ => todo!("pointer size {} is not yet supported", pointer_size),
-        },
-    }
+    vtable_ptr.get_addr(fx)
 }
diff --git a/compiler/rustc_codegen_llvm/src/common.rs b/compiler/rustc_codegen_llvm/src/common.rs
index 58af9d4cd04..df5ad8ecc27 100644
--- a/compiler/rustc_codegen_llvm/src/common.rs
+++ b/compiler/rustc_codegen_llvm/src/common.rs
@@ -282,6 +282,10 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
         }
     }
 
+    fn const_data_from_alloc(&self, alloc: &Allocation) -> Self::Value {
+        const_alloc_to_llvm(self, alloc)
+    }
+
     fn from_const_alloc(
         &self,
         layout: TyAndLayout<'tcx>,
diff --git a/compiler/rustc_codegen_ssa/src/meth.rs b/compiler/rustc_codegen_ssa/src/meth.rs
index 4f0de729704..63245a94c8e 100644
--- a/compiler/rustc_codegen_ssa/src/meth.rs
+++ b/compiler/rustc_codegen_ssa/src/meth.rs
@@ -1,6 +1,6 @@
 use crate::traits::*;
 
-use rustc_middle::ty::{self, Instance, Ty, VtblEntry, COMMON_VTABLE_ENTRIES};
+use rustc_middle::ty::{self, Ty};
 use rustc_target::abi::call::FnAbi;
 
 #[derive(Copy, Clone, Debug)]
@@ -70,48 +70,13 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>(
         return val;
     }
 
-    // Not in the cache; build it.
-    let nullptr = cx.const_null(cx.type_i8p_ext(cx.data_layout().instruction_address_space));
-
-    let vtable_entries = if let Some(trait_ref) = trait_ref {
-        tcx.vtable_entries(trait_ref.with_self_ty(tcx, ty))
-    } else {
-        COMMON_VTABLE_ENTRIES
-    };
-
-    let layout = cx.layout_of(ty);
-    // /////////////////////////////////////////////////////////////////////////////////////////////
-    // If you touch this code, be sure to also make the corresponding changes to
-    // `get_vtable` in `rust_mir/interpret/traits.rs`.
-    // /////////////////////////////////////////////////////////////////////////////////////////////
-    let components: Vec<_> = vtable_entries
-        .iter()
-        .map(|entry| match entry {
-            VtblEntry::MetadataDropInPlace => {
-                cx.get_fn_addr(Instance::resolve_drop_in_place(cx.tcx(), ty))
-            }
-            VtblEntry::MetadataSize => cx.const_usize(layout.size.bytes()),
-            VtblEntry::MetadataAlign => cx.const_usize(layout.align.abi.bytes()),
-            VtblEntry::Vacant => nullptr,
-            VtblEntry::Method(def_id, substs) => cx.get_fn_addr(
-                ty::Instance::resolve_for_vtable(
-                    cx.tcx(),
-                    ty::ParamEnv::reveal_all(),
-                    *def_id,
-                    substs,
-                )
-                .unwrap()
-                .polymorphize(cx.tcx()),
-            ),
-        })
-        .collect();
-
-    let vtable_const = cx.const_struct(&components, false);
+    let vtable_alloc_id = tcx.vtable_allocation(ty, trait_ref);
+    let vtable_allocation = tcx.global_alloc(vtable_alloc_id).unwrap_memory();
+    let vtable_const = cx.const_data_from_alloc(vtable_allocation);
     let align = cx.data_layout().pointer_align.abi;
     let vtable = cx.static_addr_of(vtable_const, align, Some("vtable"));
 
     cx.create_vtable_metadata(ty, vtable);
-
     cx.vtables().borrow_mut().insert((ty, trait_ref), vtable);
     vtable
 }
diff --git a/compiler/rustc_codegen_ssa/src/traits/consts.rs b/compiler/rustc_codegen_ssa/src/traits/consts.rs
index 6b58dea794b..20f66187123 100644
--- a/compiler/rustc_codegen_ssa/src/traits/consts.rs
+++ b/compiler/rustc_codegen_ssa/src/traits/consts.rs
@@ -26,6 +26,8 @@ pub trait ConstMethods<'tcx>: BackendTypes {
     fn const_to_opt_uint(&self, v: Self::Value) -> Option<u64>;
     fn const_to_opt_u128(&self, v: Self::Value, sign_ext: bool) -> Option<u128>;
 
+    fn const_data_from_alloc(&self, alloc: &Allocation) -> Self::Value;
+
     fn scalar_to_backend(&self, cv: Scalar, layout: &abi::Scalar, llty: Self::Type) -> Self::Value;
     fn from_const_alloc(
         &self,
diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs
index 892996189a6..16d2ac262d0 100644
--- a/compiler/rustc_middle/src/ty/context.rs
+++ b/compiler/rustc_middle/src/ty/context.rs
@@ -11,7 +11,7 @@ use crate::middle;
 use crate::middle::cstore::{CrateStoreDyn, EncodedMetadata};
 use crate::middle::resolve_lifetime::{self, LifetimeScopeForPath, ObjectLifetimeDefault};
 use crate::middle::stability;
-use crate::mir::interpret::{self, Allocation, ConstValue, Scalar};
+use crate::mir::interpret::{self, AllocId, Allocation, ConstValue, Scalar};
 use crate::mir::{Body, Field, Local, Place, PlaceElem, ProjectionKind, Promoted};
 use crate::thir::Thir;
 use crate::traits;
@@ -1044,6 +1044,9 @@ pub struct GlobalCtxt<'tcx> {
     output_filenames: Arc<OutputFilenames>,
 
     pub main_def: Option<MainDefinition>,
+
+    pub(super) vtables_cache:
+        Lock<FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), AllocId>>,
 }
 
 impl<'tcx> TyCtxt<'tcx> {
@@ -1201,6 +1204,7 @@ impl<'tcx> TyCtxt<'tcx> {
             alloc_map: Lock::new(interpret::AllocMap::new()),
             output_filenames: Arc::new(output_filenames),
             main_def: resolutions.main_def,
+            vtables_cache: Default::default(),
         }
     }
 
diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs
index a2abbec7492..859a940a625 100644
--- a/compiler/rustc_middle/src/ty/mod.rs
+++ b/compiler/rustc_middle/src/ty/mod.rs
@@ -18,6 +18,7 @@ pub use adt::*;
 pub use assoc::*;
 pub use closure::*;
 pub use generics::*;
+pub use vtable::*;
 
 use crate::hir::exports::ExportMap;
 use crate::ich::StableHashingContext;
@@ -94,6 +95,7 @@ pub mod relate;
 pub mod subst;
 pub mod trait_def;
 pub mod util;
+pub mod vtable;
 pub mod walk;
 
 mod adt;
@@ -2009,19 +2011,3 @@ impl<'tcx> fmt::Debug for SymbolName<'tcx> {
         fmt::Display::fmt(&self.name, fmt)
     }
 }
-
-#[derive(Clone, Copy, Debug, PartialEq, HashStable)]
-pub enum VtblEntry<'tcx> {
-    MetadataDropInPlace,
-    MetadataSize,
-    MetadataAlign,
-    Vacant,
-    Method(DefId, SubstsRef<'tcx>),
-}
-
-pub const COMMON_VTABLE_ENTRIES: &[VtblEntry<'_>] =
-    &[VtblEntry::MetadataDropInPlace, VtblEntry::MetadataSize, VtblEntry::MetadataAlign];
-
-pub const COMMON_VTABLE_ENTRIES_DROPINPLACE: usize = 0;
-pub const COMMON_VTABLE_ENTRIES_SIZE: usize = 1;
-pub const COMMON_VTABLE_ENTRIES_ALIGN: usize = 2;
diff --git a/compiler/rustc_middle/src/ty/vtable.rs b/compiler/rustc_middle/src/ty/vtable.rs
new file mode 100644
index 00000000000..3a35d8c88a4
--- /dev/null
+++ b/compiler/rustc_middle/src/ty/vtable.rs
@@ -0,0 +1,106 @@
+use std::convert::TryFrom;
+
+use crate::mir::interpret::{alloc_range, AllocId, Allocation, Pointer, Scalar};
+use crate::ty::fold::TypeFoldable;
+use crate::ty::{self, DefId, SubstsRef, Ty, TyCtxt};
+use rustc_ast::Mutability;
+
+#[derive(Clone, Copy, Debug, PartialEq, HashStable)]
+pub enum VtblEntry<'tcx> {
+    MetadataDropInPlace,
+    MetadataSize,
+    MetadataAlign,
+    Vacant,
+    Method(DefId, SubstsRef<'tcx>),
+}
+
+pub const COMMON_VTABLE_ENTRIES: &[VtblEntry<'_>] =
+    &[VtblEntry::MetadataDropInPlace, VtblEntry::MetadataSize, VtblEntry::MetadataAlign];
+
+pub const COMMON_VTABLE_ENTRIES_DROPINPLACE: usize = 0;
+pub const COMMON_VTABLE_ENTRIES_SIZE: usize = 1;
+pub const COMMON_VTABLE_ENTRIES_ALIGN: usize = 2;
+
+impl<'tcx> TyCtxt<'tcx> {
+    /// Retrieves an allocation that represents the contents of a vtable.
+    /// There's a cache within `TyCtxt` so it will be deduplicated.
+    pub fn vtable_allocation(
+        self,
+        ty: Ty<'tcx>,
+        poly_trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
+    ) -> AllocId {
+        let tcx = self;
+        let vtables_cache = tcx.vtables_cache.lock();
+        if let Some(alloc_id) = vtables_cache.get(&(ty, poly_trait_ref)).cloned() {
+            return alloc_id;
+        }
+        drop(vtables_cache);
+
+        // See https://github.com/rust-lang/rust/pull/86475#discussion_r655162674
+        assert!(
+            !ty.needs_subst() && !poly_trait_ref.map_or(false, |trait_ref| trait_ref.needs_subst())
+        );
+        let param_env = ty::ParamEnv::reveal_all();
+        let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
+            let trait_ref = poly_trait_ref.with_self_ty(tcx, ty);
+            let trait_ref = tcx.erase_regions(trait_ref);
+
+            tcx.vtable_entries(trait_ref)
+        } else {
+            COMMON_VTABLE_ENTRIES
+        };
+
+        let layout =
+            tcx.layout_of(param_env.and(ty)).expect("failed to build vtable representation");
+        assert!(!layout.is_unsized(), "can't create a vtable for an unsized type");
+        let size = layout.size.bytes();
+        let align = layout.align.abi.bytes();
+
+        let ptr_size = tcx.data_layout.pointer_size;
+        let ptr_align = tcx.data_layout.pointer_align.abi;
+
+        let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap();
+        let mut vtable = Allocation::uninit(vtable_size, ptr_align);
+
+        // No need to do any alignment checks on the memory accesses below, because we know the
+        // allocation is correctly aligned as we created it above. Also we're only offsetting by
+        // multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`.
+
+        for (idx, entry) in vtable_entries.iter().enumerate() {
+            let idx: u64 = u64::try_from(idx).unwrap();
+            let scalar = match entry {
+                VtblEntry::MetadataDropInPlace => {
+                    let instance = ty::Instance::resolve_drop_in_place(tcx, ty);
+                    let fn_alloc_id = tcx.create_fn_alloc(instance);
+                    let fn_ptr = Pointer::from(fn_alloc_id);
+                    fn_ptr.into()
+                }
+                VtblEntry::MetadataSize => Scalar::from_uint(size, ptr_size).into(),
+                VtblEntry::MetadataAlign => Scalar::from_uint(align, ptr_size).into(),
+                VtblEntry::Vacant => continue,
+                VtblEntry::Method(def_id, substs) => {
+                    // See https://github.com/rust-lang/rust/pull/86475#discussion_r655162674
+                    assert!(!substs.needs_subst());
+
+                    // Prepare the fn ptr we write into the vtable.
+                    let instance =
+                        ty::Instance::resolve_for_vtable(tcx, param_env, *def_id, substs)
+                            .expect("resolution failed during building vtable representation")
+                            .polymorphize(tcx);
+                    let fn_alloc_id = tcx.create_fn_alloc(instance);
+                    let fn_ptr = Pointer::from(fn_alloc_id);
+                    fn_ptr.into()
+                }
+            };
+            vtable
+                .write_scalar(&tcx, alloc_range(ptr_size * idx, ptr_size), scalar)
+                .expect("failed to build vtable representation");
+        }
+
+        vtable.mutability = Mutability::Not;
+        let alloc_id = tcx.create_memory_alloc(tcx.intern_const_alloc(vtable));
+        let mut vtables_cache = self.vtables_cache.lock();
+        vtables_cache.insert((ty, poly_trait_ref), alloc_id);
+        alloc_id
+    }
+}
diff --git a/compiler/rustc_mir/src/interpret/eval_context.rs b/compiler/rustc_mir/src/interpret/eval_context.rs
index 6f7519e6156..801e4b1e478 100644
--- a/compiler/rustc_mir/src/interpret/eval_context.rs
+++ b/compiler/rustc_mir/src/interpret/eval_context.rs
@@ -2,7 +2,6 @@ use std::cell::Cell;
 use std::fmt;
 use std::mem;
 
-use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
 use rustc_hir::{self as hir, def_id::DefId, definitions::DefPathData};
 use rustc_index::vec::IndexVec;
@@ -40,10 +39,6 @@ pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
 
     /// The virtual memory system.
     pub memory: Memory<'mir, 'tcx, M>,
-
-    /// A cache for deduplicating vtables
-    pub(super) vtables:
-        FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), Pointer<M::PointerTag>>,
 }
 
 // The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread
@@ -393,7 +388,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
             tcx: tcx.at(root_span),
             param_env,
             memory: Memory::new(tcx, memory_extra),
-            vtables: FxHashMap::default(),
         }
     }
 
diff --git a/compiler/rustc_mir/src/interpret/intern.rs b/compiler/rustc_mir/src/interpret/intern.rs
index 23c0fe97c5f..d5fec457fa1 100644
--- a/compiler/rustc_mir/src/interpret/intern.rs
+++ b/compiler/rustc_mir/src/interpret/intern.rs
@@ -107,7 +107,6 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
     match kind {
         MemoryKind::Stack
         | MemoryKind::Machine(const_eval::MemoryKind::Heap)
-        | MemoryKind::Vtable
         | MemoryKind::CallerLocation => {}
     }
     // Set allocation mutability as appropriate. This is used by LLVM to put things into
diff --git a/compiler/rustc_mir/src/interpret/memory.rs b/compiler/rustc_mir/src/interpret/memory.rs
index 77de19ac674..94506808a68 100644
--- a/compiler/rustc_mir/src/interpret/memory.rs
+++ b/compiler/rustc_mir/src/interpret/memory.rs
@@ -27,8 +27,6 @@ use crate::util::pretty;
 pub enum MemoryKind<T> {
     /// Stack memory. Error if deallocated except during a stack pop.
     Stack,
-    /// Memory backing vtables. Error if ever deallocated.
-    Vtable,
     /// Memory allocated by `caller_location` intrinsic. Error if ever deallocated.
     CallerLocation,
     /// Additional memory kinds a machine wishes to distinguish from the builtin ones.
@@ -40,7 +38,6 @@ impl<T: MayLeak> MayLeak for MemoryKind<T> {
     fn may_leak(self) -> bool {
         match self {
             MemoryKind::Stack => false,
-            MemoryKind::Vtable => true,
             MemoryKind::CallerLocation => true,
             MemoryKind::Machine(k) => k.may_leak(),
         }
@@ -51,7 +48,6 @@ impl<T: fmt::Display> fmt::Display for MemoryKind<T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match self {
             MemoryKind::Stack => write!(f, "stack variable"),
-            MemoryKind::Vtable => write!(f, "vtable"),
             MemoryKind::CallerLocation => write!(f, "caller location"),
             MemoryKind::Machine(m) => write!(f, "{}", m),
         }
diff --git a/compiler/rustc_mir/src/interpret/terminator.rs b/compiler/rustc_mir/src/interpret/terminator.rs
index a5bdeb55e78..aea9933b337 100644
--- a/compiler/rustc_mir/src/interpret/terminator.rs
+++ b/compiler/rustc_mir/src/interpret/terminator.rs
@@ -459,7 +459,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
                 };
                 // Find and consult vtable
                 let vtable = receiver_place.vtable();
-                let drop_fn = self.get_vtable_slot(vtable, u64::try_from(idx).unwrap())?;
+                let fn_val = self.get_vtable_slot(vtable, u64::try_from(idx).unwrap())?;
 
                 // `*mut receiver_place.layout.ty` is almost the layout that we
                 // want for args[0]: We have to project to field 0 because we want
@@ -472,7 +472,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
                     OpTy::from(ImmTy::from_immediate(receiver_place.ptr.into(), this_receiver_ptr));
                 trace!("Patched self operand to {:#?}", args[0]);
                 // recurse with concrete function
-                self.eval_fn_call(drop_fn, caller_abi, &args, ret, unwind)
+                self.eval_fn_call(fn_val, caller_abi, &args, ret, unwind)
             }
         }
     }
diff --git a/compiler/rustc_mir/src/interpret/traits.rs b/compiler/rustc_mir/src/interpret/traits.rs
index 072c252be2f..5332e615bc8 100644
--- a/compiler/rustc_mir/src/interpret/traits.rs
+++ b/compiler/rustc_mir/src/interpret/traits.rs
@@ -2,13 +2,13 @@ use std::convert::TryFrom;
 
 use rustc_middle::mir::interpret::{InterpResult, Pointer, PointerArithmetic, Scalar};
 use rustc_middle::ty::{
-    self, Instance, Ty, VtblEntry, COMMON_VTABLE_ENTRIES, COMMON_VTABLE_ENTRIES_ALIGN,
+    self, Ty, COMMON_VTABLE_ENTRIES, COMMON_VTABLE_ENTRIES_ALIGN,
     COMMON_VTABLE_ENTRIES_DROPINPLACE, COMMON_VTABLE_ENTRIES_SIZE,
 };
-use rustc_target::abi::{Align, LayoutOf, Size};
+use rustc_target::abi::{Align, Size};
 
 use super::util::ensure_monomorphic_enough;
-use super::{FnVal, InterpCx, Machine, MemoryKind};
+use super::{FnVal, InterpCx, Machine};
 
 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
     /// Creates a dynamic vtable for the given type and vtable origin. This is used only for
@@ -30,78 +30,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
         ensure_monomorphic_enough(*self.tcx, ty)?;
         ensure_monomorphic_enough(*self.tcx, poly_trait_ref)?;
 
-        if let Some(&vtable) = self.vtables.get(&(ty, poly_trait_ref)) {
-            // This means we guarantee that there are no duplicate vtables, we will
-            // always use the same vtable for the same (Type, Trait) combination.
-            // That's not what happens in rustc, but emulating per-crate deduplication
-            // does not sound like it actually makes anything any better.
-            return Ok(vtable);
-        }
-
-        let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
-            let trait_ref = poly_trait_ref.with_self_ty(*self.tcx, ty);
-            let trait_ref = self.tcx.erase_regions(trait_ref);
-
-            self.tcx.vtable_entries(trait_ref)
-        } else {
-            COMMON_VTABLE_ENTRIES
-        };
-
-        let layout = self.layout_of(ty)?;
-        assert!(!layout.is_unsized(), "can't create a vtable for an unsized type");
-        let size = layout.size.bytes();
-        let align = layout.align.abi.bytes();
-
-        let tcx = *self.tcx;
-        let ptr_size = self.pointer_size();
-        let ptr_align = tcx.data_layout.pointer_align.abi;
-        // /////////////////////////////////////////////////////////////////////////////////////////
-        // If you touch this code, be sure to also make the corresponding changes to
-        // `get_vtable` in `rust_codegen_llvm/meth.rs`.
-        // /////////////////////////////////////////////////////////////////////////////////////////
-        let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap();
-        let vtable = self.memory.allocate(vtable_size, ptr_align, MemoryKind::Vtable);
-
-        let drop = Instance::resolve_drop_in_place(tcx, ty);
-        let drop = self.memory.create_fn_alloc(FnVal::Instance(drop));
-
-        // No need to do any alignment checks on the memory accesses below, because we know the
-        // allocation is correctly aligned as we created it above. Also we're only offsetting by
-        // multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`.
-        let scalars = vtable_entries
-            .iter()
-            .map(|entry| -> InterpResult<'tcx, _> {
-                match entry {
-                    VtblEntry::MetadataDropInPlace => Ok(Some(drop.into())),
-                    VtblEntry::MetadataSize => Ok(Some(Scalar::from_uint(size, ptr_size).into())),
-                    VtblEntry::MetadataAlign => Ok(Some(Scalar::from_uint(align, ptr_size).into())),
-                    VtblEntry::Vacant => Ok(None),
-                    VtblEntry::Method(def_id, substs) => {
-                        // Prepare the fn ptr we write into the vtable.
-                        let instance =
-                            ty::Instance::resolve_for_vtable(tcx, self.param_env, *def_id, substs)
-                                .ok_or_else(|| err_inval!(TooGeneric))?;
-                        let fn_ptr = self.memory.create_fn_alloc(FnVal::Instance(instance));
-                        Ok(Some(fn_ptr.into()))
-                    }
-                }
-            })
-            .collect::<Result<Vec<_>, _>>()?;
-        let mut vtable_alloc =
-            self.memory.get_mut(vtable.into(), vtable_size, ptr_align)?.expect("not a ZST");
-        for (idx, scalar) in scalars.into_iter().enumerate() {
-            if let Some(scalar) = scalar {
-                let idx: u64 = u64::try_from(idx).unwrap();
-                vtable_alloc.write_ptr_sized(ptr_size * idx, scalar)?;
-            }
-        }
-
-        M::after_static_mem_initialized(self, vtable, vtable_size)?;
+        let vtable_allocation = self.tcx.vtable_allocation(ty, poly_trait_ref);
 
-        self.memory.mark_immutable(vtable.alloc_id)?;
-        assert!(self.vtables.insert((ty, poly_trait_ref), vtable).is_none());
+        let vtable_ptr = self.memory.global_base_pointer(Pointer::from(vtable_allocation))?;
 
-        Ok(vtable)
+        Ok(vtable_ptr)
     }
 
     /// Resolves the function at the specified slot in the provided