about summary refs log tree commit diff
path: root/compiler/rustc_target/src/callconv
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_target/src/callconv')
-rw-r--r--compiler/rustc_target/src/callconv/arm.rs2
-rw-r--r--compiler/rustc_target/src/callconv/loongarch.rs2
-rw-r--r--compiler/rustc_target/src/callconv/mips.rs2
-rw-r--r--compiler/rustc_target/src/callconv/mips64.rs4
-rw-r--r--compiler/rustc_target/src/callconv/mod.rs2
-rw-r--r--compiler/rustc_target/src/callconv/nvptx64.rs4
-rw-r--r--compiler/rustc_target/src/callconv/powerpc64.rs2
-rw-r--r--compiler/rustc_target/src/callconv/riscv.rs2
-rw-r--r--compiler/rustc_target/src/callconv/sparc.rs2
-rw-r--r--compiler/rustc_target/src/callconv/sparc64.rs2
-rw-r--r--compiler/rustc_target/src/callconv/xtensa.rs2
11 files changed, 13 insertions, 13 deletions
diff --git a/compiler/rustc_target/src/callconv/arm.rs b/compiler/rustc_target/src/callconv/arm.rs
index 70830fa07b6..abc9a404e2e 100644
--- a/compiler/rustc_target/src/callconv/arm.rs
+++ b/compiler/rustc_target/src/callconv/arm.rs
@@ -77,7 +77,7 @@ where
         }
     }
 
-    let align = arg.layout.align.abi.bytes();
+    let align = arg.layout.align.bytes();
     let total = arg.layout.size;
     arg.cast_to(Uniform::consecutive(if align <= 4 { Reg::i32() } else { Reg::i64() }, total));
 }
diff --git a/compiler/rustc_target/src/callconv/loongarch.rs b/compiler/rustc_target/src/callconv/loongarch.rs
index 9213d73e24e..bc3c9601fa3 100644
--- a/compiler/rustc_target/src/callconv/loongarch.rs
+++ b/compiler/rustc_target/src/callconv/loongarch.rs
@@ -322,7 +322,7 @@ fn classify_arg<'a, Ty, C>(
     }
 
     let total = arg.layout.size;
-    let align = arg.layout.align.abi.bits();
+    let align = arg.layout.align.bits();
 
     // "Scalars wider than 2✕XLEN are passed by reference and are replaced in
     // the argument list with the address."
diff --git a/compiler/rustc_target/src/callconv/mips.rs b/compiler/rustc_target/src/callconv/mips.rs
index 48a01da865b..8ffd7bd1778 100644
--- a/compiler/rustc_target/src/callconv/mips.rs
+++ b/compiler/rustc_target/src/callconv/mips.rs
@@ -24,7 +24,7 @@ where
     }
     let dl = cx.data_layout();
     let size = arg.layout.size;
-    let align = arg.layout.align.max(dl.i32_align).min(dl.i64_align).abi;
+    let align = arg.layout.align.abi.max(dl.i32_align).min(dl.i64_align);
 
     if arg.layout.is_aggregate() {
         let pad_i32 = !offset.is_aligned(align);
diff --git a/compiler/rustc_target/src/callconv/mips64.rs b/compiler/rustc_target/src/callconv/mips64.rs
index 0209838bec1..8386a15933c 100644
--- a/compiler/rustc_target/src/callconv/mips64.rs
+++ b/compiler/rustc_target/src/callconv/mips64.rs
@@ -110,9 +110,9 @@ where
                 // We only care about aligned doubles
                 if let BackendRepr::Scalar(scalar) = field.backend_repr {
                     if scalar.primitive() == Primitive::Float(Float::F64) {
-                        if offset.is_aligned(dl.f64_align.abi) {
+                        if offset.is_aligned(dl.f64_align) {
                             // Insert enough integers to cover [last_offset, offset)
-                            assert!(last_offset.is_aligned(dl.f64_align.abi));
+                            assert!(last_offset.is_aligned(dl.f64_align));
                             for _ in 0..((offset - last_offset).bits() / 64)
                                 .min((prefix.len() - prefix_index) as u64)
                             {
diff --git a/compiler/rustc_target/src/callconv/mod.rs b/compiler/rustc_target/src/callconv/mod.rs
index 7a7c63c475b..c59af581a1f 100644
--- a/compiler/rustc_target/src/callconv/mod.rs
+++ b/compiler/rustc_target/src/callconv/mod.rs
@@ -332,7 +332,7 @@ impl CastTarget {
         self.prefix
             .iter()
             .filter_map(|x| x.map(|reg| reg.align(cx)))
-            .fold(cx.data_layout().aggregate_align.abi.max(self.rest.align(cx)), |acc, align| {
+            .fold(cx.data_layout().aggregate_align.max(self.rest.align(cx)), |acc, align| {
                 acc.max(align)
             })
     }
diff --git a/compiler/rustc_target/src/callconv/nvptx64.rs b/compiler/rustc_target/src/callconv/nvptx64.rs
index 44977de7fcb..dc32dd87a7e 100644
--- a/compiler/rustc_target/src/callconv/nvptx64.rs
+++ b/compiler/rustc_target/src/callconv/nvptx64.rs
@@ -21,7 +21,7 @@ fn classify_arg<Ty>(arg: &mut ArgAbi<'_, Ty>) {
 
 /// the pass mode used for aggregates in arg and ret position
 fn classify_aggregate<Ty>(arg: &mut ArgAbi<'_, Ty>) {
-    let align_bytes = arg.layout.align.abi.bytes();
+    let align_bytes = arg.layout.align.bytes();
     let size = arg.layout.size;
 
     let reg = match align_bytes {
@@ -60,7 +60,7 @@ where
     //     "`extern \"ptx-kernel\"` doesn't allow passing types other than primitives and structs"
     // );
 
-    let align_bytes = arg.layout.align.abi.bytes();
+    let align_bytes = arg.layout.align.bytes();
 
     let unit = match align_bytes {
         1 => Reg::i8(),
diff --git a/compiler/rustc_target/src/callconv/powerpc64.rs b/compiler/rustc_target/src/callconv/powerpc64.rs
index 89ec85e4b66..be1d13816ef 100644
--- a/compiler/rustc_target/src/callconv/powerpc64.rs
+++ b/compiler/rustc_target/src/callconv/powerpc64.rs
@@ -89,7 +89,7 @@ where
         // Aggregates larger than i64 should be padded at the tail to fill out a whole number
         // of i64s or i128s, depending on the aggregate alignment. Always use an array for
         // this, even if there is only a single element.
-        let reg = if arg.layout.align.abi.bytes() > 8 { Reg::i128() } else { Reg::i64() };
+        let reg = if arg.layout.align.bytes() > 8 { Reg::i128() } else { Reg::i64() };
         arg.cast_to(Uniform::consecutive(
             reg,
             size.align_to(Align::from_bytes(reg.size.bytes()).unwrap()),
diff --git a/compiler/rustc_target/src/callconv/riscv.rs b/compiler/rustc_target/src/callconv/riscv.rs
index 161e2c1645f..16de3fe070d 100644
--- a/compiler/rustc_target/src/callconv/riscv.rs
+++ b/compiler/rustc_target/src/callconv/riscv.rs
@@ -328,7 +328,7 @@ fn classify_arg<'a, Ty, C>(
     }
 
     let total = arg.layout.size;
-    let align = arg.layout.align.abi.bits();
+    let align = arg.layout.align.bits();
 
     // "Scalars wider than 2✕XLEN are passed by reference and are replaced in
     // the argument list with the address."
diff --git a/compiler/rustc_target/src/callconv/sparc.rs b/compiler/rustc_target/src/callconv/sparc.rs
index 48a01da865b..8ffd7bd1778 100644
--- a/compiler/rustc_target/src/callconv/sparc.rs
+++ b/compiler/rustc_target/src/callconv/sparc.rs
@@ -24,7 +24,7 @@ where
     }
     let dl = cx.data_layout();
     let size = arg.layout.size;
-    let align = arg.layout.align.max(dl.i32_align).min(dl.i64_align).abi;
+    let align = arg.layout.align.abi.max(dl.i32_align).min(dl.i64_align);
 
     if arg.layout.is_aggregate() {
         let pad_i32 = !offset.is_aligned(align);
diff --git a/compiler/rustc_target/src/callconv/sparc64.rs b/compiler/rustc_target/src/callconv/sparc64.rs
index ecc9067ced3..62c8ed1dc21 100644
--- a/compiler/rustc_target/src/callconv/sparc64.rs
+++ b/compiler/rustc_target/src/callconv/sparc64.rs
@@ -29,7 +29,7 @@ where
 
     data.has_float = true;
 
-    if !data.last_offset.is_aligned(dl.f64_align.abi) && data.last_offset < offset {
+    if !data.last_offset.is_aligned(dl.f64_align) && data.last_offset < offset {
         if data.prefix_index == data.prefix.len() {
             return data;
         }
diff --git a/compiler/rustc_target/src/callconv/xtensa.rs b/compiler/rustc_target/src/callconv/xtensa.rs
index a73a70a1a0c..561ee98787d 100644
--- a/compiler/rustc_target/src/callconv/xtensa.rs
+++ b/compiler/rustc_target/src/callconv/xtensa.rs
@@ -48,7 +48,7 @@ where
     }
 
     let size = arg.layout.size.bits();
-    let needed_align = arg.layout.align.abi.bits();
+    let needed_align = arg.layout.align.bits();
     let mut must_use_stack = false;
 
     // Determine the number of GPRs needed to pass the current argument