about summary refs log tree commit diff
path: root/compiler/rustc_data_structures/src
diff options
context:
space:
mode:
authorMark Rousskov <mark.simulacrum@gmail.com>2021-10-23 12:11:05 -0400
committerMark Rousskov <mark.simulacrum@gmail.com>2021-10-23 12:11:05 -0400
commit3cd5c95ab0618924d790fcd19f31883e020cc90f (patch)
tree22683d93dbd341c6343f21f417fd58271cfedb7a /compiler/rustc_data_structures/src
parent514b3877956dc594823106b66c164f8cdbc8b3da (diff)
downloadrust-3cd5c95ab0618924d790fcd19f31883e020cc90f.tar.gz
rust-3cd5c95ab0618924d790fcd19f31883e020cc90f.zip
Specialize HashStable for [u8] slices
Particularly for ctfe-stress-4, the hashing of byte slices as part of the
MIR Allocation is quite hot. Previously, we were falling back on byte-by-byte
copying of the slice into the SipHash buffer (64 bytes long) before hashing a 64
byte chunk, and then doing that again and again.

This should hopefully be an improvement for that code.
Diffstat (limited to 'compiler/rustc_data_structures/src')
-rw-r--r--compiler/rustc_data_structures/src/stable_hasher.rs7
1 files changed, 7 insertions, 0 deletions
diff --git a/compiler/rustc_data_structures/src/stable_hasher.rs b/compiler/rustc_data_structures/src/stable_hasher.rs
index 354f9dd93cc..f800ec6a6a1 100644
--- a/compiler/rustc_data_structures/src/stable_hasher.rs
+++ b/compiler/rustc_data_structures/src/stable_hasher.rs
@@ -301,6 +301,13 @@ impl<T: HashStable<CTX>, CTX> HashStable<CTX> for [T] {
     }
 }
 
+impl<CTX> HashStable<CTX> for [u8] {
+    fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
+        self.len().hash_stable(ctx, hasher);
+        hasher.write(self);
+    }
+}
+
 impl<T: HashStable<CTX>, CTX> HashStable<CTX> for Vec<T> {
     #[inline]
     fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {