about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--compiler/rustc_codegen_cranelift/src/vtable.rs2
-rw-r--r--compiler/rustc_codegen_ssa/src/meth.rs2
-rw-r--r--compiler/rustc_const_eval/src/interpret/traits.rs2
-rw-r--r--compiler/rustc_middle/src/query/mod.rs7
-rw-r--r--compiler/rustc_middle/src/ty/mod.rs1
-rw-r--r--compiler/rustc_middle/src/ty/print/pretty.rs1
-rw-r--r--compiler/rustc_middle/src/ty/vtable.rs125
-rw-r--r--compiler/rustc_query_impl/src/keys.rs11
-rw-r--r--src/test/incremental/reorder_vtable.rs16
9 files changed, 98 insertions, 69 deletions
diff --git a/compiler/rustc_codegen_cranelift/src/vtable.rs b/compiler/rustc_codegen_cranelift/src/vtable.rs
index f97d416b66f..36b3725ef42 100644
--- a/compiler/rustc_codegen_cranelift/src/vtable.rs
+++ b/compiler/rustc_codegen_cranelift/src/vtable.rs
@@ -68,7 +68,7 @@ pub(crate) fn get_vtable<'tcx>(
     ty: Ty<'tcx>,
     trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
 ) -> Value {
-    let alloc_id = fx.tcx.vtable_allocation(ty, trait_ref);
+    let alloc_id = fx.tcx.vtable_allocation((ty, trait_ref));
     let data_id =
         data_id_for_alloc_id(&mut fx.constants_cx, &mut *fx.module, alloc_id, Mutability::Not);
     let local_data_id = fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
diff --git a/compiler/rustc_codegen_ssa/src/meth.rs b/compiler/rustc_codegen_ssa/src/meth.rs
index efeec5b7284..3267d3206f7 100644
--- a/compiler/rustc_codegen_ssa/src/meth.rs
+++ b/compiler/rustc_codegen_ssa/src/meth.rs
@@ -72,7 +72,7 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>(
         return val;
     }
 
-    let vtable_alloc_id = tcx.vtable_allocation(ty, trait_ref);
+    let vtable_alloc_id = tcx.vtable_allocation((ty, trait_ref));
     let vtable_allocation = tcx.global_alloc(vtable_alloc_id).unwrap_memory();
     let vtable_const = cx.const_data_from_alloc(vtable_allocation);
     let align = cx.data_layout().pointer_align.abi;
diff --git a/compiler/rustc_const_eval/src/interpret/traits.rs b/compiler/rustc_const_eval/src/interpret/traits.rs
index a6ba00ec695..131674decc9 100644
--- a/compiler/rustc_const_eval/src/interpret/traits.rs
+++ b/compiler/rustc_const_eval/src/interpret/traits.rs
@@ -30,7 +30,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
         ensure_monomorphic_enough(*self.tcx, ty)?;
         ensure_monomorphic_enough(*self.tcx, poly_trait_ref)?;
 
-        let vtable_allocation = self.tcx.vtable_allocation(ty, poly_trait_ref);
+        let vtable_allocation = self.tcx.vtable_allocation((ty, poly_trait_ref));
 
         let vtable_ptr = self.memory.global_base_pointer(Pointer::from(vtable_allocation))?;
 
diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs
index b4f7a9fa8e9..40d049eb72c 100644
--- a/compiler/rustc_middle/src/query/mod.rs
+++ b/compiler/rustc_middle/src/query/mod.rs
@@ -1012,6 +1012,13 @@ rustc_queries! {
             key.1, key.0 }
     }
 
+    query vtable_allocation(key: (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>)) -> mir::interpret::AllocId {
+        desc { |tcx| "vtable const allocation for <{} as {}>",
+            key.0,
+            key.1.map(|trait_ref| format!("{}", trait_ref)).unwrap_or("_".to_owned())
+        }
+    }
+
     query codegen_fulfill_obligation(
         key: (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>)
     ) -> Result<ImplSource<'tcx, ()>, ErrorReported> {
diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs
index 8991ad32ae8..5e8630eb966 100644
--- a/compiler/rustc_middle/src/ty/mod.rs
+++ b/compiler/rustc_middle/src/ty/mod.rs
@@ -2050,6 +2050,7 @@ pub fn provide(providers: &mut ty::query::Providers) {
         trait_impls_of: trait_def::trait_impls_of_provider,
         type_uninhabited_from: inhabitedness::type_uninhabited_from,
         const_param_default: consts::const_param_default,
+        vtable_allocation: vtable::vtable_allocation_provider,
         ..*providers
     };
 }
diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs
index f8a476266d6..c60c76c4710 100644
--- a/compiler/rustc_middle/src/ty/print/pretty.rs
+++ b/compiler/rustc_middle/src/ty/print/pretty.rs
@@ -2192,6 +2192,7 @@ forward_display_to_print! {
     // because `for<'tcx>` isn't possible yet.
     ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>,
     ty::Binder<'tcx, ty::TraitRef<'tcx>>,
+    ty::Binder<'tcx, ty::ExistentialTraitRef<'tcx>>,
     ty::Binder<'tcx, TraitRefPrintOnlyTraitPath<'tcx>>,
     ty::Binder<'tcx, ty::FnSig<'tcx>>,
     ty::Binder<'tcx, ty::TraitPredicate<'tcx>>,
diff --git a/compiler/rustc_middle/src/ty/vtable.rs b/compiler/rustc_middle/src/ty/vtable.rs
index 967149dc53c..f766cad2b3d 100644
--- a/compiler/rustc_middle/src/ty/vtable.rs
+++ b/compiler/rustc_middle/src/ty/vtable.rs
@@ -43,77 +43,72 @@ pub const COMMON_VTABLE_ENTRIES_DROPINPLACE: usize = 0;
 pub const COMMON_VTABLE_ENTRIES_SIZE: usize = 1;
 pub const COMMON_VTABLE_ENTRIES_ALIGN: usize = 2;
 
-impl<'tcx> TyCtxt<'tcx> {
-    /// Retrieves an allocation that represents the contents of a vtable.
-    /// There's a cache within `TyCtxt` so it will be deduplicated.
-    pub fn vtable_allocation(
-        self,
-        ty: Ty<'tcx>,
-        poly_trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
-    ) -> AllocId {
-        let tcx = self;
+/// Retrieves an allocation that represents the contents of a vtable.
+/// Since this is a query, allocations are cached and not duplicated.
+pub(super) fn vtable_allocation_provider<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    key: (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>),
+) -> AllocId {
+    let (ty, poly_trait_ref) = key;
 
-        let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
-            let trait_ref = poly_trait_ref.with_self_ty(tcx, ty);
-            let trait_ref = tcx.erase_regions(trait_ref);
+    let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
+        let trait_ref = poly_trait_ref.with_self_ty(tcx, ty);
+        let trait_ref = tcx.erase_regions(trait_ref);
 
-            tcx.vtable_entries(trait_ref)
-        } else {
-            COMMON_VTABLE_ENTRIES
-        };
-
-        let layout = tcx
-            .layout_of(ty::ParamEnv::reveal_all().and(ty))
-            .expect("failed to build vtable representation");
-        assert!(!layout.is_unsized(), "can't create a vtable for an unsized type");
-        let size = layout.size.bytes();
-        let align = layout.align.abi.bytes();
+        tcx.vtable_entries(trait_ref)
+    } else {
+        COMMON_VTABLE_ENTRIES
+    };
 
-        let ptr_size = tcx.data_layout.pointer_size;
-        let ptr_align = tcx.data_layout.pointer_align.abi;
+    let layout = tcx
+        .layout_of(ty::ParamEnv::reveal_all().and(ty))
+        .expect("failed to build vtable representation");
+    assert!(!layout.is_unsized(), "can't create a vtable for an unsized type");
+    let size = layout.size.bytes();
+    let align = layout.align.abi.bytes();
 
-        let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap();
-        let mut vtable =
-            Allocation::uninit(vtable_size, ptr_align, /* panic_on_fail */ true).unwrap();
+    let ptr_size = tcx.data_layout.pointer_size;
+    let ptr_align = tcx.data_layout.pointer_align.abi;
 
-        // No need to do any alignment checks on the memory accesses below, because we know the
-        // allocation is correctly aligned as we created it above. Also we're only offsetting by
-        // multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`.
+    let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap();
+    let mut vtable = Allocation::uninit(vtable_size, ptr_align, /* panic_on_fail */ true).unwrap();
 
-        for (idx, entry) in vtable_entries.iter().enumerate() {
-            let idx: u64 = u64::try_from(idx).unwrap();
-            let scalar = match entry {
-                VtblEntry::MetadataDropInPlace => {
-                    let instance = ty::Instance::resolve_drop_in_place(tcx, ty);
-                    let fn_alloc_id = tcx.create_fn_alloc(instance);
-                    let fn_ptr = Pointer::from(fn_alloc_id);
-                    ScalarMaybeUninit::from_pointer(fn_ptr, &tcx)
-                }
-                VtblEntry::MetadataSize => Scalar::from_uint(size, ptr_size).into(),
-                VtblEntry::MetadataAlign => Scalar::from_uint(align, ptr_size).into(),
-                VtblEntry::Vacant => continue,
-                VtblEntry::Method(instance) => {
-                    // Prepare the fn ptr we write into the vtable.
-                    let instance = instance.polymorphize(tcx);
-                    let fn_alloc_id = tcx.create_fn_alloc(instance);
-                    let fn_ptr = Pointer::from(fn_alloc_id);
-                    ScalarMaybeUninit::from_pointer(fn_ptr, &tcx)
-                }
-                VtblEntry::TraitVPtr(trait_ref) => {
-                    let super_trait_ref = trait_ref.map_bound(|trait_ref| {
-                        ty::ExistentialTraitRef::erase_self_ty(tcx, trait_ref)
-                    });
-                    let supertrait_alloc_id = self.vtable_allocation(ty, Some(super_trait_ref));
-                    let vptr = Pointer::from(supertrait_alloc_id);
-                    ScalarMaybeUninit::from_pointer(vptr, &tcx)
-                }
-            };
-            vtable
-                .write_scalar(&tcx, alloc_range(ptr_size * idx, ptr_size), scalar)
-                .expect("failed to build vtable representation");
-        }
+    // No need to do any alignment checks on the memory accesses below, because we know the
+    // allocation is correctly aligned as we created it above. Also we're only offsetting by
+    // multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`.
 
-        vtable.mutability = Mutability::Not;
-        tcx.create_memory_alloc(tcx.intern_const_alloc(vtable))
+    for (idx, entry) in vtable_entries.iter().enumerate() {
+        let idx: u64 = u64::try_from(idx).unwrap();
+        let scalar = match entry {
+            VtblEntry::MetadataDropInPlace => {
+                let instance = ty::Instance::resolve_drop_in_place(tcx, ty);
+                let fn_alloc_id = tcx.create_fn_alloc(instance);
+                let fn_ptr = Pointer::from(fn_alloc_id);
+                ScalarMaybeUninit::from_pointer(fn_ptr, &tcx)
+            }
+            VtblEntry::MetadataSize => Scalar::from_uint(size, ptr_size).into(),
+            VtblEntry::MetadataAlign => Scalar::from_uint(align, ptr_size).into(),
+            VtblEntry::Vacant => continue,
+            VtblEntry::Method(instance) => {
+                // Prepare the fn ptr we write into the vtable.
+                let instance = instance.polymorphize(tcx);
+                let fn_alloc_id = tcx.create_fn_alloc(instance);
+                let fn_ptr = Pointer::from(fn_alloc_id);
+                ScalarMaybeUninit::from_pointer(fn_ptr, &tcx)
+            }
+            VtblEntry::TraitVPtr(trait_ref) => {
+                let super_trait_ref = trait_ref
+                    .map_bound(|trait_ref| ty::ExistentialTraitRef::erase_self_ty(tcx, trait_ref));
+                let supertrait_alloc_id = tcx.vtable_allocation((ty, Some(super_trait_ref)));
+                let vptr = Pointer::from(supertrait_alloc_id);
+                ScalarMaybeUninit::from_pointer(vptr, &tcx)
+            }
+        };
+        vtable
+            .write_scalar(&tcx, alloc_range(ptr_size * idx, ptr_size), scalar)
+            .expect("failed to build vtable representation");
     }
+
+    vtable.mutability = Mutability::Not;
+    tcx.create_memory_alloc(tcx.intern_const_alloc(vtable))
 }
diff --git a/compiler/rustc_query_impl/src/keys.rs b/compiler/rustc_query_impl/src/keys.rs
index 563a3cf1438..34489287596 100644
--- a/compiler/rustc_query_impl/src/keys.rs
+++ b/compiler/rustc_query_impl/src/keys.rs
@@ -72,6 +72,17 @@ impl<'tcx> Key for mir::interpret::GlobalId<'tcx> {
     }
 }
 
+impl<'tcx> Key for (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>) {
+    #[inline(always)]
+    fn query_crate_is_local(&self) -> bool {
+        true
+    }
+
+    fn default_span(&self, _: TyCtxt<'_>) -> Span {
+        DUMMY_SP
+    }
+}
+
 impl<'tcx> Key for mir::interpret::LitToConstInput<'tcx> {
     #[inline(always)]
     fn query_crate_is_local(&self) -> bool {
diff --git a/src/test/incremental/reorder_vtable.rs b/src/test/incremental/reorder_vtable.rs
index a550fe6f01c..8dacba63351 100644
--- a/src/test/incremental/reorder_vtable.rs
+++ b/src/test/incremental/reorder_vtable.rs
@@ -1,5 +1,10 @@
 // revisions:rpass1 rpass2
 
+// This test case makes sure re-order the methods in a vtable will
+// trigger recompilation of codegen units that instantiate it.
+//
+// See https://github.com/rust-lang/rust/issues/89598
+
 trait Foo {
     #[cfg(rpass1)]
     fn method1(&self) -> u32;
@@ -16,12 +21,21 @@ impl Foo for u32 {
 }
 
 fn main() {
+    // Before #89598 was fixed, the vtable allocation would be cached during
+    // a MIR optimization pass and then the codegen pass for the main object
+    // file would not register a dependency on it (because of the missing
+    // dep-tracking).
+    //
+    // In the rpass2 session, the main object file would not be re-compiled,
+    // thus the mod1::foo(x) call would pass in an outdated vtable, while the
+    // mod1 object would expect the new, re-ordered vtable, resulting in a
+    // call to the wrong method.
     let x: &dyn Foo = &0u32;
     assert_eq!(mod1::foo(x), 17);
 }
 
 mod mod1 {
-    pub fn foo(x: &dyn super::Foo) -> u32 {
+    pub(super) fn foo(x: &dyn super::Foo) -> u32 {
         x.method1()
     }
 }