about summary refs log tree commit diff
path: root/compiler/rustc_codegen_ssa/src
diff options
context:
space:
mode:
authorJubilee <46493976+workingjubilee@users.noreply.github.com>2024-03-11 09:29:38 -0700
committerGitHub <noreply@github.com>2024-03-11 09:29:38 -0700
commit028e2600c9500736e9d7eff3a5f3271cc4cbd56e (patch)
treec32a3654a9e40326464050fda99e53d7b53ee6c8 /compiler/rustc_codegen_ssa/src
parenta11e6c38b5901b25ae8c541a7414d50843afa25d (diff)
parenta7cd803d029d71ab4d111fca43ce33ba55fe9841 (diff)
downloadrust-028e2600c9500736e9d7eff3a5f3271cc4cbd56e.tar.gz
rust-028e2600c9500736e9d7eff3a5f3271cc4cbd56e.zip
Rollup merge of #122320 - erikdesjardins:vtable, r=nikic
Use ptradd for vtable indexing

Extension of #121665.

After this, the only remaining usages of GEP are [this](https://github.com/rust-lang/rust/blob/cd81f5b27ee00b49d413db50b5e6af871cebcf23/compiler/rustc_codegen_llvm/src/intrinsic.rs#L909-L920) kinda janky Emscription EH code, which I'll change in a future PR, and array indexing / pointer offsets, where there isn't yet a canonical `ptradd` form. (Out of curiosity I tried converting the latter to `ptradd(ptr, mul(size, index))`, but that causes codegen regressions right now.)

r? `@nikic`
Diffstat (limited to 'compiler/rustc_codegen_ssa/src')
-rw-r--r--compiler/rustc_codegen_ssa/src/base.rs13
-rw-r--r--compiler/rustc_codegen_ssa/src/meth.rs19
2 files changed, 17 insertions, 15 deletions
diff --git a/compiler/rustc_codegen_ssa/src/base.rs b/compiler/rustc_codegen_ssa/src/base.rs
index 5cba14a5dda..c316d19e041 100644
--- a/compiler/rustc_codegen_ssa/src/base.rs
+++ b/compiler/rustc_codegen_ssa/src/base.rs
@@ -165,14 +165,11 @@ pub fn unsized_info<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
                 cx.tcx().vtable_trait_upcasting_coercion_new_vptr_slot((source, target));
 
             if let Some(entry_idx) = vptr_entry_idx {
-                let ptr_ty = cx.type_ptr();
-                let ptr_align = cx.tcx().data_layout.pointer_align.abi;
-                let gep = bx.inbounds_gep(
-                    ptr_ty,
-                    old_info,
-                    &[bx.const_usize(u64::try_from(entry_idx).unwrap())],
-                );
-                let new_vptr = bx.load(ptr_ty, gep, ptr_align);
+                let ptr_size = bx.data_layout().pointer_size;
+                let ptr_align = bx.data_layout().pointer_align.abi;
+                let vtable_byte_offset = u64::try_from(entry_idx).unwrap() * ptr_size.bytes();
+                let gep = bx.inbounds_ptradd(old_info, bx.const_usize(vtable_byte_offset));
+                let new_vptr = bx.load(bx.type_ptr(), gep, ptr_align);
                 bx.nonnull_metadata(new_vptr);
                 // VTable loads are invariant.
                 bx.set_invariant_load(new_vptr);
diff --git a/compiler/rustc_codegen_ssa/src/meth.rs b/compiler/rustc_codegen_ssa/src/meth.rs
index 12146a54d3b..4f7dc9968a1 100644
--- a/compiler/rustc_codegen_ssa/src/meth.rs
+++ b/compiler/rustc_codegen_ssa/src/meth.rs
@@ -20,9 +20,13 @@ impl<'a, 'tcx> VirtualIndex {
         ty: Ty<'tcx>,
         fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
     ) -> Bx::Value {
-        // Load the data pointer from the object.
+        // Load the function pointer from the object.
         debug!("get_fn({llvtable:?}, {ty:?}, {self:?})");
+
         let llty = bx.fn_ptr_backend_type(fn_abi);
+        let ptr_size = bx.data_layout().pointer_size;
+        let ptr_align = bx.data_layout().pointer_align.abi;
+        let vtable_byte_offset = self.0 * ptr_size.bytes();
 
         if bx.cx().sess().opts.unstable_opts.virtual_function_elimination
             && bx.cx().sess().lto() == Lto::Fat
@@ -30,12 +34,10 @@ impl<'a, 'tcx> VirtualIndex {
             let typeid = bx
                 .typeid_metadata(typeid_for_trait_ref(bx.tcx(), expect_dyn_trait_in_self(ty)))
                 .unwrap();
-            let vtable_byte_offset = self.0 * bx.data_layout().pointer_size.bytes();
             let func = bx.type_checked_load(llvtable, vtable_byte_offset, typeid);
             func
         } else {
-            let ptr_align = bx.tcx().data_layout.pointer_align.abi;
-            let gep = bx.inbounds_gep(llty, llvtable, &[bx.const_usize(self.0)]);
+            let gep = bx.inbounds_ptradd(llvtable, bx.const_usize(vtable_byte_offset));
             let ptr = bx.load(llty, gep, ptr_align);
             bx.nonnull_metadata(ptr);
             // VTable loads are invariant.
@@ -53,9 +55,12 @@ impl<'a, 'tcx> VirtualIndex {
         debug!("get_int({:?}, {:?})", llvtable, self);
 
         let llty = bx.type_isize();
-        let usize_align = bx.tcx().data_layout.pointer_align.abi;
-        let gep = bx.inbounds_gep(llty, llvtable, &[bx.const_usize(self.0)]);
-        let ptr = bx.load(llty, gep, usize_align);
+        let ptr_size = bx.data_layout().pointer_size;
+        let ptr_align = bx.data_layout().pointer_align.abi;
+        let vtable_byte_offset = self.0 * ptr_size.bytes();
+
+        let gep = bx.inbounds_ptradd(llvtable, bx.const_usize(vtable_byte_offset));
+        let ptr = bx.load(llty, gep, ptr_align);
         // VTable loads are invariant.
         bx.set_invariant_load(ptr);
         ptr