about summary refs log tree commit diff
path: root/library/std/src/sys/sgx/abi/usercalls/alloc.rs
diff options
context:
space:
mode:
Diffstat (limited to 'library/std/src/sys/sgx/abi/usercalls/alloc.rs')
-rw-r--r--library/std/src/sys/sgx/abi/usercalls/alloc.rs120
1 files changed, 110 insertions, 10 deletions
diff --git a/library/std/src/sys/sgx/abi/usercalls/alloc.rs b/library/std/src/sys/sgx/abi/usercalls/alloc.rs
index 3792a3820a5..ea24fedd0eb 100644
--- a/library/std/src/sys/sgx/abi/usercalls/alloc.rs
+++ b/library/std/src/sys/sgx/abi/usercalls/alloc.rs
@@ -1,13 +1,16 @@
 #![allow(unused)]
 
+use crate::arch::asm;
 use crate::cell::UnsafeCell;
+use crate::cmp;
+use crate::convert::TryInto;
 use crate::mem;
 use crate::ops::{CoerceUnsized, Deref, DerefMut, Index, IndexMut};
 use crate::ptr::{self, NonNull};
 use crate::slice;
 use crate::slice::SliceIndex;
 
-use super::super::mem::is_user_range;
+use super::super::mem::{is_enclave_range, is_user_range};
 use fortanix_sgx_abi::*;
 
 /// A type that can be safely read from or written to userspace.
@@ -210,7 +213,9 @@ where
         unsafe {
             // Mustn't call alloc with size 0.
             let ptr = if size > 0 {
-                rtunwrap!(Ok, super::alloc(size, T::align_of())) as _
+                // `copy_to_userspace` is more efficient when data is 8-byte aligned
+                let alignment = cmp::max(T::align_of(), 8);
+                rtunwrap!(Ok, super::alloc(size, alignment)) as _
             } else {
                 T::align_of() as _ // dangling pointer ok for size 0
             };
@@ -225,13 +230,9 @@ where
     /// Copies `val` into freshly allocated space in user memory.
     pub fn new_from_enclave(val: &T) -> Self {
         unsafe {
-            let ret = Self::new_uninit_bytes(mem::size_of_val(val));
-            ptr::copy(
-                val as *const T as *const u8,
-                ret.0.as_ptr() as *mut u8,
-                mem::size_of_val(val),
-            );
-            ret
+            let mut user = Self::new_uninit_bytes(mem::size_of_val(val));
+            user.copy_from_enclave(val);
+            user
         }
     }
 
@@ -304,6 +305,105 @@ where
     }
 }
 
+/// Copies `len` bytes of data from enclave pointer `src` to userspace `dst`
+///
+/// This function mitigates stale data vulnerabilities by ensuring all writes to untrusted memory are either:
+///  - preceded by the VERW instruction and followed by the MFENCE; LFENCE instruction sequence
+///  - or are in multiples of 8 bytes, aligned to an 8-byte boundary
+///
+/// # Panics
+/// This function panics if:
+///
+/// * The `src` pointer is null
+/// * The `dst` pointer is null
+/// * The `src` memory range is not in enclave memory
+/// * The `dst` memory range is not in user memory
+///
+/// # References
+///  - https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00615.html
+///  - https://www.intel.com/content/www/us/en/developer/articles/technical/software-security-guidance/technical-documentation/processor-mmio-stale-data-vulnerabilities.html#inpage-nav-3-2-2
+pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
+    unsafe fn copy_bytewise_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
+        unsafe {
+            let mut seg_sel: u16 = 0;
+            for off in 0..len {
+                asm!("
+                    mov %ds, ({seg_sel})
+                    verw ({seg_sel})
+                    movb {val}, ({dst})
+                    mfence
+                    lfence
+                    ",
+                    val = in(reg_byte) *src.offset(off as isize),
+                    dst = in(reg) dst.offset(off as isize),
+                    seg_sel = in(reg) &mut seg_sel,
+                    options(nostack, att_syntax)
+                );
+            }
+        }
+    }
+
+    unsafe fn copy_aligned_quadwords_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
+        unsafe {
+            asm!(
+                "rep movsq (%rsi), (%rdi)",
+                inout("rcx") len / 8 => _,
+                inout("rdi") dst => _,
+                inout("rsi") src => _,
+                options(att_syntax, nostack, preserves_flags)
+            );
+        }
+    }
+    assert!(!src.is_null());
+    assert!(!dst.is_null());
+    assert!(is_enclave_range(src, len));
+    assert!(is_user_range(dst, len));
+    assert!(len < isize::MAX as usize);
+    assert!(!(src as usize).overflowing_add(len).1);
+    assert!(!(dst as usize).overflowing_add(len).1);
+
+    if len < 8 {
+        // Can't align on 8 byte boundary: copy safely byte per byte
+        unsafe {
+            copy_bytewise_to_userspace(src, dst, len);
+        }
+    } else if len % 8 == 0 && dst as usize % 8 == 0 {
+        // Copying 8-byte aligned quadwords: copy quad word per quad word
+        unsafe {
+            copy_aligned_quadwords_to_userspace(src, dst, len);
+        }
+    } else {
+        // Split copies into three parts:
+        //   +--------+
+        //   | small0 | Chunk smaller than 8 bytes
+        //   +--------+
+        //   |   big  | Chunk 8-byte aligned, and size a multiple of 8 bytes
+        //   +--------+
+        //   | small1 | Chunk smaller than 8 bytes
+        //   +--------+
+
+        unsafe {
+            // Copy small0
+            let small0_size = (8 - dst as usize % 8) as u8;
+            let small0_src = src;
+            let small0_dst = dst;
+            copy_bytewise_to_userspace(small0_src as _, small0_dst, small0_size as _);
+
+            // Copy big
+            let small1_size = ((len - small0_size as usize) % 8) as u8;
+            let big_size = len - small0_size as usize - small1_size as usize;
+            let big_src = src.offset(small0_size as _);
+            let big_dst = dst.offset(small0_size as _);
+            copy_aligned_quadwords_to_userspace(big_src as _, big_dst, big_size);
+
+            // Copy small1
+            let small1_src = src.offset(big_size as isize + small0_size as isize);
+            let small1_dst = dst.offset(big_size as isize + small0_size as isize);
+            copy_bytewise_to_userspace(small1_src, small1_dst, small1_size as _);
+        }
+    }
+}
+
 #[unstable(feature = "sgx_platform", issue = "56975")]
 impl<T: ?Sized> UserRef<T>
 where
@@ -352,7 +452,7 @@ where
     pub fn copy_from_enclave(&mut self, val: &T) {
         unsafe {
             assert_eq!(mem::size_of_val(val), mem::size_of_val(&*self.0.get()));
-            ptr::copy(
+            copy_to_userspace(
                 val as *const T as *const u8,
                 self.0.get() as *mut T as *mut u8,
                 mem::size_of_val(val),