about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/generic/mod.rs52
-rw-r--r--src/test/ui/deriving/deriving-all-codegen.stdout26
2 files changed, 39 insertions, 39 deletions
diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs
index 7ff75592a52..076b627ca79 100644
--- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs
@@ -1013,20 +1013,25 @@ impl<'a> MethodDef<'a> {
     /// }
     /// ```
     /// But if the struct is `repr(packed)`, we can't use something like
-    /// `&self.x` on a packed type (as required for e.g. `Debug` and `Hash`)
-    /// because that might cause an unaligned ref. So we use let-destructuring
-    /// instead. If the struct impls `Copy`:
+    /// `&self.x` because that might cause an unaligned ref. So for any trait
+    /// method that takes a reference, if the struct impls `Copy` then we use a
+    /// local block to force a copy:
     /// ```
     /// # struct A { x: u8, y: u8 }
     /// impl PartialEq for A {
     ///     fn eq(&self, other: &A) -> bool {
-    ///         let Self { x: __self_0_0, y: __self_0_1 } = *self;
-    ///         let Self { x: __self_1_0, y: __self_1_1 } = *other;
-    ///         __self_0_0 == __self_1_0 && __self_0_1 == __self_1_1
+    ///         // Desugars to `{ self.x }.eq(&{ other.y }) && ...`
+    ///         { self.x } == { other.y } && { self.y } == { other.y }
+    ///     }
+    /// }
+    /// impl Hash for A {
+    ///     fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
+    ///         ::core::hash::Hash::hash(&{ self.x }, state);
+    ///         ::core::hash::Hash::hash(&{ self.y }, state)
     ///     }
     /// }
     /// ```
-    /// If it doesn't impl `Copy`:
+    /// If the struct doesn't impl `Copy`, we use let-destructuring with `ref`:
     /// ```
     /// # struct A { x: u8, y: u8 }
     /// impl PartialEq for A {
@@ -1038,7 +1043,7 @@ impl<'a> MethodDef<'a> {
     /// }
     /// ```
     /// This latter case only works if the fields match the alignment required
-    /// by the `packed(N)` attribute.
+    /// by the `packed(N)` attribute. (We'll get errors later on if not.)
     fn expand_struct_method_body<'b>(
         &self,
         cx: &mut ExtCtxt<'_>,
@@ -1065,9 +1070,14 @@ impl<'a> MethodDef<'a> {
 
         if !is_packed {
             let selflike_fields =
-                trait_.create_struct_field_access_fields(cx, selflike_args, struct_def);
+                trait_.create_struct_field_access_fields(cx, selflike_args, struct_def, false);
+            mk_body(cx, selflike_fields)
+        } else if always_copy {
+            let selflike_fields =
+                trait_.create_struct_field_access_fields(cx, selflike_args, struct_def, true);
             mk_body(cx, selflike_fields)
         } else {
+            // Neither packed nor copy. Need to use ref patterns.
             let prefixes: Vec<_> =
                 (0..selflike_args.len()).map(|i| format!("__self_{}", i)).collect();
             let addr_of = always_copy;
@@ -1536,6 +1546,7 @@ impl<'a> TraitDef<'a> {
         cx: &mut ExtCtxt<'_>,
         selflike_args: &[P<Expr>],
         struct_def: &'a VariantData,
+        copy: bool,
     ) -> Vec<FieldInfo> {
         self.create_fields(struct_def, |i, struct_field, sp| {
             selflike_args
@@ -1545,18 +1556,21 @@ impl<'a> TraitDef<'a> {
                     // `unwrap_or_else` case otherwise the hygiene is wrong and we get
                     // "field `0` of struct `Point` is private" errors on tuple
                     // structs.
-                    cx.expr_addr_of(
+                    let mut field_expr = cx.expr(
                         sp,
-                        cx.expr(
-                            sp,
-                            ast::ExprKind::Field(
-                                selflike_arg.clone(),
-                                struct_field.ident.unwrap_or_else(|| {
-                                    Ident::from_str_and_span(&i.to_string(), struct_field.span)
-                                }),
-                            ),
+                        ast::ExprKind::Field(
+                            selflike_arg.clone(),
+                            struct_field.ident.unwrap_or_else(|| {
+                                Ident::from_str_and_span(&i.to_string(), struct_field.span)
+                            }),
                         ),
-                    )
+                    );
+                    if copy {
+                        field_expr = cx.expr_block(
+                            cx.block(struct_field.span, vec![cx.stmt_expr(field_expr)]),
+                        );
+                    }
+                    cx.expr_addr_of(sp, field_expr)
                 })
                 .collect()
         })
diff --git a/src/test/ui/deriving/deriving-all-codegen.stdout b/src/test/ui/deriving/deriving-all-codegen.stdout
index e129f25b0dd..542911537be 100644
--- a/src/test/ui/deriving/deriving-all-codegen.stdout
+++ b/src/test/ui/deriving/deriving-all-codegen.stdout
@@ -441,9 +441,8 @@ impl ::core::marker::Copy for PackedCopy { }
 #[allow(unused_qualifications)]
 impl ::core::fmt::Debug for PackedCopy {
     fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
-        let Self(__self_0_0) = *self;
         ::core::fmt::Formatter::debug_tuple_field1_finish(f, "PackedCopy",
-            &&__self_0_0)
+            &&{ self.0 })
     }
 }
 #[automatically_derived]
@@ -458,8 +457,7 @@ impl ::core::default::Default for PackedCopy {
 #[allow(unused_qualifications)]
 impl ::core::hash::Hash for PackedCopy {
     fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
-        let Self(__self_0_0) = *self;
-        ::core::hash::Hash::hash(&__self_0_0, state)
+        ::core::hash::Hash::hash(&{ self.0 }, state)
     }
 }
 impl ::core::marker::StructuralPartialEq for PackedCopy {}
@@ -467,17 +465,9 @@ impl ::core::marker::StructuralPartialEq for PackedCopy {}
 #[allow(unused_qualifications)]
 impl ::core::cmp::PartialEq for PackedCopy {
     #[inline]
-    fn eq(&self, other: &PackedCopy) -> bool {
-        let Self(__self_0_0) = *self;
-        let Self(__self_1_0) = *other;
-        __self_0_0 == __self_1_0
-    }
+    fn eq(&self, other: &PackedCopy) -> bool { { self.0 } == { other.0 } }
     #[inline]
-    fn ne(&self, other: &PackedCopy) -> bool {
-        let Self(__self_0_0) = *self;
-        let Self(__self_1_0) = *other;
-        __self_0_0 != __self_1_0
-    }
+    fn ne(&self, other: &PackedCopy) -> bool { { self.0 } != { other.0 } }
 }
 impl ::core::marker::StructuralEq for PackedCopy {}
 #[automatically_derived]
@@ -496,9 +486,7 @@ impl ::core::cmp::PartialOrd for PackedCopy {
     #[inline]
     fn partial_cmp(&self, other: &PackedCopy)
         -> ::core::option::Option<::core::cmp::Ordering> {
-        let Self(__self_0_0) = *self;
-        let Self(__self_1_0) = *other;
-        ::core::cmp::PartialOrd::partial_cmp(&__self_0_0, &__self_1_0)
+        ::core::cmp::PartialOrd::partial_cmp(&{ self.0 }, &{ other.0 })
     }
 }
 #[automatically_derived]
@@ -506,9 +494,7 @@ impl ::core::cmp::PartialOrd for PackedCopy {
 impl ::core::cmp::Ord for PackedCopy {
     #[inline]
     fn cmp(&self, other: &PackedCopy) -> ::core::cmp::Ordering {
-        let Self(__self_0_0) = *self;
-        let Self(__self_1_0) = *other;
-        ::core::cmp::Ord::cmp(&__self_0_0, &__self_1_0)
+        ::core::cmp::Ord::cmp(&{ self.0 }, &{ other.0 })
     }
 }