about summary refs log tree commit diff
diff options
context:
space:
mode:
authorEduard-Mihai Burtescu <edy.burt@gmail.com>2017-10-08 23:08:47 +0300
committerEduard-Mihai Burtescu <edy.burt@gmail.com>2017-11-19 02:43:55 +0200
commitc4d9ada70108210a2a2f7d3025a0d693fc3e3e9d (patch)
tree5a501d8e781bd884e04b68ba1c3f2820825cba08
parentcdeb4b0d258c19f57ee6fb089126656e18324367 (diff)
downloadrust-c4d9ada70108210a2a2f7d3025a0d693fc3e3e9d.tar.gz
rust-c4d9ada70108210a2a2f7d3025a0d693fc3e3e9d.zip
rustc: place ZSTs first during struct field reordering.
-rw-r--r--src/librustc/ty/layout.rs41
1 files changed, 19 insertions, 22 deletions
diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs
index 21ba7995332..70c41e7402d 100644
--- a/src/librustc/ty/layout.rs
+++ b/src/librustc/ty/layout.rs
@@ -964,40 +964,37 @@ impl<'a, 'tcx> CachedLayout {
             let mut align = base_align;
             let mut primitive_align = base_align;
             let mut sized = true;
+            let mut offsets = vec![Size::from_bytes(0); fields.len()];
+            let mut inverse_memory_index: Vec<u32> = (0..fields.len() as u32).collect();
 
             // Anything with repr(C) or repr(packed) doesn't optimize.
-            // Neither do  1-member and 2-member structs.
-            // In addition, code in trans assume that 2-element structs can become pairs.
-            // It's easier to just short-circuit here.
-            let (mut optimize, sort_ascending) = match kind {
+            let optimize = match kind {
                 StructKind::AlwaysSized |
-                StructKind::MaybeUnsized => (fields.len() > 2, false),
-                StructKind::EnumVariant(discr) => {
-                    (discr.size().bytes() == 1, true)
+                StructKind::MaybeUnsized |
+                StructKind::EnumVariant(I8) => {
+                    (repr.flags & ReprFlags::IS_UNOPTIMISABLE).is_empty()
                 }
+                StructKind::EnumVariant(_) => false
             };
-
-            optimize &= (repr.flags & ReprFlags::IS_UNOPTIMISABLE).is_empty();
-
-            let mut offsets = vec![Size::from_bytes(0); fields.len()];
-            let mut inverse_memory_index: Vec<u32> = (0..fields.len() as u32).collect();
-
             if optimize {
                 let end = if let StructKind::MaybeUnsized = kind {
                     fields.len() - 1
                 } else {
                     fields.len()
                 };
-                if end > 0 {
-                    let optimizing  = &mut inverse_memory_index[..end];
-                    if sort_ascending {
+                let optimizing = &mut inverse_memory_index[..end];
+                match kind {
+                    StructKind::AlwaysSized |
+                    StructKind::MaybeUnsized => {
+                        optimizing.sort_by_key(|&x| {
+                            // Place ZSTs first to avoid "interesting offsets",
+                            // especially with only one or two non-ZST fields.
+                            let f = &fields[x as usize];
+                            (!f.is_zst(), cmp::Reverse(f.align.abi()))
+                        })
+                    }
+                    StructKind::EnumVariant(_) => {
                         optimizing.sort_by_key(|&x| fields[x as usize].align.abi());
-                    } else {
-                        optimizing.sort_by(| &a, &b | {
-                            let a = fields[a as usize].align.abi();
-                            let b = fields[b as usize].align.abi();
-                            b.cmp(&a)
-                        });
                     }
                 }
             }