about summary refs log tree commit diff
path: root/library/std/src
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2022-08-20 07:59:48 +0000
committerbors <bors@rust-lang.org>2022-08-20 07:59:48 +0000
commit36e530cb08950f1d03ab733e43ecec2802d099cf (patch)
treee065b491027f2d360e8085f023daffcbe49c89a5 /library/std/src
parente1b28cd2f16bd5b832183d7968cae3bb9213e78d (diff)
parent60edec9ddf9f59c6e5ec86456070440031a87943 (diff)
downloadrust-36e530cb08950f1d03ab733e43ecec2802d099cf.tar.gz
rust-36e530cb08950f1d03ab733e43ecec2802d099cf.zip
Auto merge of #100793 - matthiaskrgr:rollup-dy7rfdh, r=matthiaskrgr
Rollup of 10 pull requests

Successful merges:

 - #100186 (Mention `as_mut` alongside `as_ref` in borrowck error message)
 - #100383 (Mitigate stale data reads on SGX platform)
 - #100507 (suggest `once_cell::Lazy` for non-const statics)
 - #100617 (Suggest the right help message for as_ref)
 - #100667 (Migrate "invalid variable declaration" errors to SessionDiagnostic)
 - #100709 (Migrate typeck's `used` expected symbol diagnostic to `SessionDiagnostic`)
 - #100723 (Add the diagnostic translation lints to crates that don't emit them)
 - #100729 (Avoid zeroing a 1kb stack buffer on every call to `std::sys::windows::fill_utf16_buf`)
 - #100750 (improved diagnostic for function defined with `def`, `fun`, `func`, or `function` instead of `fn`)
 - #100763 (triagebot: Autolabel `A-rustdoc-json`)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
Diffstat (limited to 'library/std/src')
-rw-r--r--library/std/src/sys/sgx/abi/usercalls/alloc.rs159
-rw-r--r--library/std/src/sys/sgx/abi/usercalls/tests.rs30
-rw-r--r--library/std/src/sys/windows/mod.rs18
3 files changed, 179 insertions, 28 deletions
diff --git a/library/std/src/sys/sgx/abi/usercalls/alloc.rs b/library/std/src/sys/sgx/abi/usercalls/alloc.rs
index 66fa1efbf10..34634da44de 100644
--- a/library/std/src/sys/sgx/abi/usercalls/alloc.rs
+++ b/library/std/src/sys/sgx/abi/usercalls/alloc.rs
@@ -305,6 +305,34 @@ where
     }
 }
 
+// Split a memory region ptr..ptr + len into three parts:
+//   +--------+
+//   | small0 | Chunk smaller than 8 bytes
+//   +--------+
+//   |   big  | Chunk 8-byte aligned, and size a multiple of 8 bytes
+//   +--------+
+//   | small1 | Chunk smaller than 8 bytes
+//   +--------+
+fn region_as_aligned_chunks(ptr: *const u8, len: usize) -> (usize, usize, usize) {
+    let small0_size = if ptr as usize % 8 == 0 { 0 } else { 8 - ptr as usize % 8 };
+    let small1_size = (len - small0_size as usize) % 8;
+    let big_size = len - small0_size as usize - small1_size as usize;
+
+    (small0_size, big_size, small1_size)
+}
+
+unsafe fn copy_quadwords(src: *const u8, dst: *mut u8, len: usize) {
+    unsafe {
+        asm!(
+            "rep movsq (%rsi), (%rdi)",
+            inout("rcx") len / 8 => _,
+            inout("rdi") dst => _,
+            inout("rsi") src => _,
+            options(att_syntax, nostack, preserves_flags)
+        );
+    }
+}
+
 /// Copies `len` bytes of data from enclave pointer `src` to userspace `dst`
 ///
 /// This function mitigates stale data vulnerabilities by ensuring all writes to untrusted memory are either:
@@ -343,17 +371,6 @@ pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize)
         }
     }
 
-    unsafe fn copy_aligned_quadwords_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
-        unsafe {
-            asm!(
-                "rep movsq (%rsi), (%rdi)",
-                inout("rcx") len / 8 => _,
-                inout("rdi") dst => _,
-                inout("rsi") src => _,
-                options(att_syntax, nostack, preserves_flags)
-            );
-        }
-    }
     assert!(!src.is_null());
     assert!(!dst.is_null());
     assert!(is_enclave_range(src, len));
@@ -370,7 +387,7 @@ pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize)
     } else if len % 8 == 0 && dst as usize % 8 == 0 {
         // Copying 8-byte aligned quadwords: copy quad word per quad word
         unsafe {
-            copy_aligned_quadwords_to_userspace(src, dst, len);
+            copy_quadwords(src, dst, len);
         }
     } else {
         // Split copies into three parts:
@@ -381,20 +398,16 @@ pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize)
         //   +--------+
         //   | small1 | Chunk smaller than 8 bytes
         //   +--------+
+        let (small0_size, big_size, small1_size) = region_as_aligned_chunks(dst, len);
 
         unsafe {
             // Copy small0
-            let small0_size = (8 - dst as usize % 8) as u8;
-            let small0_src = src;
-            let small0_dst = dst;
-            copy_bytewise_to_userspace(small0_src as _, small0_dst, small0_size as _);
+            copy_bytewise_to_userspace(src, dst, small0_size as _);
 
             // Copy big
-            let small1_size = ((len - small0_size as usize) % 8) as u8;
-            let big_size = len - small0_size as usize - small1_size as usize;
             let big_src = src.offset(small0_size as _);
             let big_dst = dst.offset(small0_size as _);
-            copy_aligned_quadwords_to_userspace(big_src as _, big_dst, big_size);
+            copy_quadwords(big_src as _, big_dst, big_size);
 
             // Copy small1
             let small1_src = src.offset(big_size as isize + small0_size as isize);
@@ -404,6 +417,106 @@ pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize)
     }
 }
 
+/// Copies `len` bytes of data from userspace pointer `src` to enclave pointer `dst`
+///
+/// This function mitigates AEPIC leak vulnerabilities by ensuring all reads from untrusted memory are 8-byte aligned
+///
+/// # Panics
+/// This function panics if:
+///
+/// * The `src` pointer is null
+/// * The `dst` pointer is null
+/// * The `src` memory range is not in user memory
+/// * The `dst` memory range is not in enclave memory
+///
+/// # References
+///  - https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00657.html
+///  - https://www.intel.com/content/www/us/en/developer/articles/technical/software-security-guidance/advisory-guidance/stale-data-read-from-xapic.html
+pub(crate) unsafe fn copy_from_userspace(src: *const u8, dst: *mut u8, len: usize) {
+    // Copies memory region `src..src + len` to the enclave at `dst`. The source memory region
+    // is:
+    //  - strictly less than 8 bytes in size and may be
+    //  - located at a misaligned memory location
+    fn copy_misaligned_chunk_to_enclave(src: *const u8, dst: *mut u8, len: usize) {
+        let mut tmp_buff = [0u8; 16];
+
+        unsafe {
+            // Compute an aligned memory region to read from
+            // +--------+ <-- aligned_src + aligned_len (8B-aligned)
+            // |  pad1  |
+            // +--------+ <-- src + len (misaligned)
+            // |        |
+            // |        |
+            // |        |
+            // +--------+ <-- src (misaligned)
+            // |  pad0  |
+            // +--------+ <-- aligned_src (8B-aligned)
+            let pad0_size = src as usize % 8;
+            let aligned_src = src.sub(pad0_size);
+
+            let pad1_size = 8 - (src.add(len) as usize % 8);
+            let aligned_len = pad0_size + len + pad1_size;
+
+            debug_assert!(len < 8);
+            debug_assert_eq!(aligned_src as usize % 8, 0);
+            debug_assert_eq!(aligned_len % 8, 0);
+            debug_assert!(aligned_len <= 16);
+
+            // Copy the aligned buffer to a temporary buffer
+            // Note: copying from a slightly different memory location is a bit odd. In this case it
+            // can't lead to page faults or inadvertent copying from the enclave as we only ensured
+            // that the `src` pointer is aligned at an 8 byte boundary. As pages are 4096 bytes
+            // aligned, `aligned_src` must be on the same page as `src`. A similar argument can be made
+            // for `src + len`
+            copy_quadwords(aligned_src as _, tmp_buff.as_mut_ptr(), aligned_len);
+
+            // Copy the correct parts of the temporary buffer to the destination
+            ptr::copy(tmp_buff.as_ptr().add(pad0_size), dst, len);
+        }
+    }
+
+    assert!(!src.is_null());
+    assert!(!dst.is_null());
+    assert!(is_user_range(src, len));
+    assert!(is_enclave_range(dst, len));
+    assert!(!(src as usize).overflowing_add(len + 8).1);
+    assert!(!(dst as usize).overflowing_add(len + 8).1);
+
+    if len < 8 {
+        copy_misaligned_chunk_to_enclave(src, dst, len);
+    } else if len % 8 == 0 && src as usize % 8 == 0 {
+        // Copying 8-byte aligned quadwords: copy quad word per quad word
+        unsafe {
+            copy_quadwords(src, dst, len);
+        }
+    } else {
+        // Split copies into three parts:
+        //   +--------+
+        //   | small0 | Chunk smaller than 8 bytes
+        //   +--------+
+        //   |   big  | Chunk 8-byte aligned, and size a multiple of 8 bytes
+        //   +--------+
+        //   | small1 | Chunk smaller than 8 bytes
+        //   +--------+
+        let (small0_size, big_size, small1_size) = region_as_aligned_chunks(dst, len);
+
+        unsafe {
+            // Copy small0
+            copy_misaligned_chunk_to_enclave(src, dst, small0_size);
+
+            // Copy big
+            let big_src = src.add(small0_size);
+            let big_dst = dst.add(small0_size);
+            copy_quadwords(big_src, big_dst, big_size);
+
+            // Copy small1
+            let small1_src = src.add(big_size + small0_size);
+            let small1_dst = dst.add(big_size + small0_size);
+            copy_misaligned_chunk_to_enclave(small1_src, small1_dst, small1_size);
+        }
+    }
+}
+
 #[unstable(feature = "sgx_platform", issue = "56975")]
 impl<T: ?Sized> UserRef<T>
 where
@@ -468,7 +581,7 @@ where
     pub fn copy_to_enclave(&self, dest: &mut T) {
         unsafe {
             assert_eq!(mem::size_of_val(dest), mem::size_of_val(&*self.0.get()));
-            ptr::copy(
+            copy_from_userspace(
                 self.0.get() as *const T as *const u8,
                 dest as *mut T as *mut u8,
                 mem::size_of_val(dest),
@@ -494,7 +607,11 @@ where
 {
     /// Copies the value from user memory into enclave memory.
     pub fn to_enclave(&self) -> T {
-        unsafe { ptr::read(self.0.get()) }
+        unsafe {
+            let mut data: T = mem::MaybeUninit::uninit().assume_init();
+            copy_from_userspace(self.0.get() as _, &mut data as *mut T as _, mem::size_of::<T>());
+            data
+        }
     }
 }
 
diff --git a/library/std/src/sys/sgx/abi/usercalls/tests.rs b/library/std/src/sys/sgx/abi/usercalls/tests.rs
index cbf7d7d54f7..4320f0bccd1 100644
--- a/library/std/src/sys/sgx/abi/usercalls/tests.rs
+++ b/library/std/src/sys/sgx/abi/usercalls/tests.rs
@@ -1,8 +1,8 @@
-use super::alloc::copy_to_userspace;
 use super::alloc::User;
+use super::alloc::{copy_from_userspace, copy_to_userspace};
 
 #[test]
-fn test_copy_function() {
+fn test_copy_to_userspace_function() {
     let mut src = [0u8; 100];
     let mut dst = User::<[u8]>::uninitialized(100);
 
@@ -28,3 +28,29 @@ fn test_copy_function() {
         }
     }
 }
+
+#[test]
+fn test_copy_from_userspace_function() {
+    let mut dst = [0u8; 100];
+    let mut src = User::<[u8]>::uninitialized(100);
+
+    src.copy_from_enclave(&[0u8; 100]);
+
+    for size in 0..48 {
+        // For all possible alignment
+        for offset in 0..8 {
+            // overwrite complete dst
+            dst = [0u8; 100];
+
+            // Copy src[0..size] to dst + offset
+            unsafe { copy_from_userspace(src.as_ptr().offset(offset), dst.as_mut_ptr(), size) };
+
+            // Verify copy
+            for byte in 0..size {
+                unsafe {
+                    assert_eq!(dst[byte as usize], *src.as_ptr().offset(offset + byte as isize));
+                }
+            }
+        }
+    }
+}
diff --git a/library/std/src/sys/windows/mod.rs b/library/std/src/sys/windows/mod.rs
index b3f6d2d0aae..a9846a48488 100644
--- a/library/std/src/sys/windows/mod.rs
+++ b/library/std/src/sys/windows/mod.rs
@@ -2,6 +2,7 @@
 
 use crate::ffi::{CStr, OsStr, OsString};
 use crate::io::ErrorKind;
+use crate::mem::MaybeUninit;
 use crate::os::windows::ffi::{OsStrExt, OsStringExt};
 use crate::path::PathBuf;
 use crate::time::Duration;
@@ -204,8 +205,8 @@ where
     // This initial size also works around `GetFullPathNameW` returning
     // incorrect size hints for some short paths:
     // https://github.com/dylni/normpath/issues/5
-    let mut stack_buf = [0u16; 512];
-    let mut heap_buf = Vec::new();
+    let mut stack_buf: [MaybeUninit<u16>; 512] = MaybeUninit::uninit_array();
+    let mut heap_buf: Vec<MaybeUninit<u16>> = Vec::new();
     unsafe {
         let mut n = stack_buf.len();
         loop {
@@ -214,6 +215,11 @@ where
             } else {
                 let extra = n - heap_buf.len();
                 heap_buf.reserve(extra);
+                // We used `reserve` and not `reserve_exact`, so in theory we
+                // may have gotten more than requested. If so, we'd like to use
+                // it... so long as we won't cause overflow.
+                n = heap_buf.capacity().min(c::DWORD::MAX as usize);
+                // Safety: MaybeUninit<u16> does not need initialization
                 heap_buf.set_len(n);
                 &mut heap_buf[..]
             };
@@ -228,13 +234,13 @@ where
             // error" is still 0 then we interpret it as a 0 length buffer and
             // not an actual error.
             c::SetLastError(0);
-            let k = match f1(buf.as_mut_ptr(), n as c::DWORD) {
+            let k = match f1(buf.as_mut_ptr().cast::<u16>(), n as c::DWORD) {
                 0 if c::GetLastError() == 0 => 0,
                 0 => return Err(crate::io::Error::last_os_error()),
                 n => n,
             } as usize;
             if k == n && c::GetLastError() == c::ERROR_INSUFFICIENT_BUFFER {
-                n *= 2;
+                n = n.saturating_mul(2).min(c::DWORD::MAX as usize);
             } else if k > n {
                 n = k;
             } else if k == n {
@@ -244,7 +250,9 @@ where
                 // Therefore k never equals n.
                 unreachable!();
             } else {
-                return Ok(f2(&buf[..k]));
+                // Safety: First `k` values are initialized.
+                let slice: &[u16] = MaybeUninit::slice_assume_init_ref(&buf[..k]);
+                return Ok(f2(slice));
             }
         }
     }