about summary refs log tree commit diff
path: root/compiler/rustc_codegen_ssa
diff options
context:
space:
mode:
authorNikita Popov <nikita.ppv@gmail.com>2021-07-04 18:53:04 +0200
committerNikita Popov <nikita.ppv@gmail.com>2021-07-09 22:14:44 +0200
commit4560efe46c476cf794fa0ce4efb8db5aa55a3351 (patch)
tree945df75cbe56cff80f8e3f0e931a00dc7ce73ae1 /compiler/rustc_codegen_ssa
parent33e9a6b565ddd7f20a5fd3f455eb2f3109d41801 (diff)
downloadrust-4560efe46c476cf794fa0ce4efb8db5aa55a3351.tar.gz
rust-4560efe46c476cf794fa0ce4efb8db5aa55a3351.zip
Pass type when creating load
This makes load generation compatible with opaque pointers.

The generation of nontemporal copies still accesses the pointer
element type, as fixing this requires more movement.
Diffstat (limited to 'compiler/rustc_codegen_ssa')
-rw-r--r--compiler/rustc_codegen_ssa/src/meth.rs10
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/block.rs14
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/operand.rs9
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/place.rs2
-rw-r--r--compiler/rustc_codegen_ssa/src/traits/builder.rs4
5 files changed, 26 insertions, 13 deletions
diff --git a/compiler/rustc_codegen_ssa/src/meth.rs b/compiler/rustc_codegen_ssa/src/meth.rs
index 63245a94c8e..b392b2c4ab8 100644
--- a/compiler/rustc_codegen_ssa/src/meth.rs
+++ b/compiler/rustc_codegen_ssa/src/meth.rs
@@ -20,10 +20,11 @@ impl<'a, 'tcx> VirtualIndex {
         // Load the data pointer from the object.
         debug!("get_fn({:?}, {:?})", llvtable, self);
 
-        let llvtable = bx.pointercast(llvtable, bx.type_ptr_to(bx.fn_ptr_backend_type(fn_abi)));
+        let llty = bx.fn_ptr_backend_type(fn_abi);
+        let llvtable = bx.pointercast(llvtable, bx.type_ptr_to(llty));
         let ptr_align = bx.tcx().data_layout.pointer_align.abi;
         let gep = bx.inbounds_gep(llvtable, &[bx.const_usize(self.0)]);
-        let ptr = bx.load(gep, ptr_align);
+        let ptr = bx.load(llty, gep, ptr_align);
         bx.nonnull_metadata(ptr);
         // Vtable loads are invariant.
         bx.set_invariant_load(ptr);
@@ -38,10 +39,11 @@ impl<'a, 'tcx> VirtualIndex {
         // Load the data pointer from the object.
         debug!("get_int({:?}, {:?})", llvtable, self);
 
-        let llvtable = bx.pointercast(llvtable, bx.type_ptr_to(bx.type_isize()));
+        let llty = bx.type_isize();
+        let llvtable = bx.pointercast(llvtable, bx.type_ptr_to(llty));
         let usize_align = bx.tcx().data_layout.pointer_align.abi;
         let gep = bx.inbounds_gep(llvtable, &[bx.const_usize(self.0)]);
-        let ptr = bx.load(gep, usize_align);
+        let ptr = bx.load(llty, gep, usize_align);
         // Vtable loads are invariant.
         bx.set_invariant_load(ptr);
         ptr
diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs
index 2cb28d7361c..b584801a62d 100644
--- a/compiler/rustc_codegen_ssa/src/mir/block.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/block.rs
@@ -260,7 +260,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
             PassMode::Direct(_) | PassMode::Pair(..) => {
                 let op = self.codegen_consume(&mut bx, mir::Place::return_place().as_ref());
                 if let Ref(llval, _, align) = op.val {
-                    bx.load(llval, align)
+                    bx.load(bx.backend_type(op.layout), llval, align)
                 } else {
                     op.immediate_or_packed_pair(&mut bx)
                 }
@@ -287,8 +287,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                         llval
                     }
                 };
-                let addr = bx.pointercast(llslot, bx.type_ptr_to(bx.cast_backend_type(&cast_ty)));
-                bx.load(addr, self.fn_abi.ret.layout.align.abi)
+                let ty = bx.cast_backend_type(&cast_ty);
+                let addr = bx.pointercast(llslot, bx.type_ptr_to(ty));
+                bx.load(ty, addr, self.fn_abi.ret.layout.align.abi)
             }
         };
         bx.ret(llval);
@@ -1086,15 +1087,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
         if by_ref && !arg.is_indirect() {
             // Have to load the argument, maybe while casting it.
             if let PassMode::Cast(ty) = arg.mode {
-                let addr = bx.pointercast(llval, bx.type_ptr_to(bx.cast_backend_type(&ty)));
-                llval = bx.load(addr, align.min(arg.layout.align.abi));
+                let llty = bx.cast_backend_type(&ty);
+                let addr = bx.pointercast(llval, bx.type_ptr_to(llty));
+                llval = bx.load(llty, addr, align.min(arg.layout.align.abi));
             } else {
                 // We can't use `PlaceRef::load` here because the argument
                 // may have a type we don't treat as immediate, but the ABI
                 // used for this call is passing it by-value. In that case,
                 // the load would just produce `OperandValue::Ref` instead
                 // of the `OperandValue::Immediate` we need for the call.
-                llval = bx.load(llval, align);
+                llval = bx.load(bx.backend_type(arg.layout), llval, align);
                 if let abi::Abi::Scalar(ref scalar) = arg.layout.abi {
                     if scalar.is_bool() {
                         bx.range_metadata(llval, 0..2);
diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs
index 25e84c38ed3..3481b36bcc0 100644
--- a/compiler/rustc_codegen_ssa/src/mir/operand.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs
@@ -289,6 +289,15 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
         }
         match self {
             OperandValue::Ref(r, None, source_align) => {
+                if flags.contains(MemFlags::NONTEMPORAL) {
+                    // HACK(nox): This is inefficient but there is no nontemporal memcpy.
+                    // FIXME: Don't access pointer element type.
+                    let ty = bx.element_type(bx.val_ty(r));
+                    let val = bx.load(ty, r, source_align);
+                    let ptr = bx.pointercast(dest.llval, bx.type_ptr_to(ty));
+                    bx.store_with_flags(val, ptr, dest.align, flags);
+                    return;
+                }
                 base::memcpy_ty(bx, dest.llval, dest.align, r, source_align, dest.layout, flags)
             }
             OperandValue::Ref(_, Some(_), _) => {
diff --git a/compiler/rustc_codegen_ssa/src/mir/place.rs b/compiler/rustc_codegen_ssa/src/mir/place.rs
index a9e7ebf6d43..9906cf59e9a 100644
--- a/compiler/rustc_codegen_ssa/src/mir/place.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/place.rs
@@ -407,7 +407,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
         let layout = bx.layout_of(target_ty.ty);
 
         PlaceRef {
-            llval: bx.load(self.llval, self.align),
+            llval: bx.load(bx.backend_type(layout), self.llval, self.align),
             llextra: None,
             layout,
             align: layout.align.abi,
diff --git a/compiler/rustc_codegen_ssa/src/traits/builder.rs b/compiler/rustc_codegen_ssa/src/traits/builder.rs
index c5d7c4f86c5..f0c232a97bc 100644
--- a/compiler/rustc_codegen_ssa/src/traits/builder.rs
+++ b/compiler/rustc_codegen_ssa/src/traits/builder.rs
@@ -137,8 +137,8 @@ pub trait BuilderMethods<'a, 'tcx>:
     fn dynamic_alloca(&mut self, ty: Self::Type, align: Align) -> Self::Value;
     fn array_alloca(&mut self, ty: Self::Type, len: Self::Value, align: Align) -> Self::Value;
 
-    fn load(&mut self, ptr: Self::Value, align: Align) -> Self::Value;
-    fn volatile_load(&mut self, ptr: Self::Value) -> Self::Value;
+    fn load(&mut self, ty: Self::Type, ptr: Self::Value, align: Align) -> Self::Value;
+    fn volatile_load(&mut self, ty: Self::Type, ptr: Self::Value) -> Self::Value;
     fn atomic_load(
         &mut self,
         ty: Self::Type,