1
1
#![allow(unused)]
2
2
3
+ use crate::arch::asm;
3
4
use crate::cell::UnsafeCell;
5
+ use crate::cmp;
6
+ use crate::convert::TryInto;
4
7
use crate::mem;
5
8
use crate::ops::{CoerceUnsized, Deref, DerefMut, Index, IndexMut};
6
9
use crate::ptr::{self, NonNull};
7
10
use crate::slice;
8
11
use crate::slice::SliceIndex;
9
12
10
- use super::super::mem::is_user_range;
13
+ use super::super::mem::{is_enclave_range, is_user_range} ;
11
14
use fortanix_sgx_abi::*;
12
15
13
16
/// A type that can be safely read from or written to userspace.
@@ -210,7 +213,9 @@ where
210
213
unsafe {
211
214
// Mustn't call alloc with size 0.
212
215
let ptr = if size > 0 {
213
- rtunwrap!(Ok, super::alloc(size, T::align_of())) as _
216
+ // `copy_to_userspace` is more efficient when data is 8-byte aligned
217
+ let alignment = cmp::max(T::align_of(), 8);
218
+ rtunwrap!(Ok, super::alloc(size, alignment)) as _
214
219
} else {
215
220
T::align_of() as _ // dangling pointer ok for size 0
216
221
};
@@ -225,13 +230,9 @@ where
225
230
/// Copies `val` into freshly allocated space in user memory.
226
231
pub fn new_from_enclave(val: &T) -> Self {
227
232
unsafe {
228
- let ret = Self::new_uninit_bytes(mem::size_of_val(val));
229
- ptr::copy(
230
- val as *const T as *const u8,
231
- ret.0.as_ptr() as *mut u8,
232
- mem::size_of_val(val),
233
- );
234
- ret
233
+ let mut user = Self::new_uninit_bytes(mem::size_of_val(val));
234
+ user.copy_from_enclave(val);
235
+ user
235
236
}
236
237
}
237
238
@@ -304,6 +305,105 @@ where
304
305
}
305
306
}
306
307
308
+ /// Copies `len` bytes of data from enclave pointer `src` to userspace `dst`
309
+ ///
310
+ /// This function mitigates stale data vulnerabilities by ensuring all writes to untrusted memory are either:
311
+ /// - preceded by the VERW instruction and followed by the MFENCE; LFENCE instruction sequence
312
+ /// - or are in multiples of 8 bytes, aligned to an 8-byte boundary
313
+ ///
314
+ /// # Panics
315
+ /// This function panics if:
316
+ ///
317
+ /// * The `src` pointer is null
318
+ /// * The `dst` pointer is null
319
+ /// * The `src` memory range is not in enclave memory
320
+ /// * The `dst` memory range is not in user memory
321
+ ///
322
+ /// # References
323
+ /// - https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00615.html
324
+ /// - https://www.intel.com/content/www/us/en/developer/articles/technical/software-security-guidance/technical-documentation/processor-mmio-stale-data-vulnerabilities.html#inpage-nav-3-2-2
325
+ pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
326
+ unsafe fn copy_bytewise_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
327
+ unsafe {
328
+ let mut seg_sel: u16 = 0;
329
+ for off in 0..len {
330
+ asm!("
331
+ mov %ds, ({seg_sel})
332
+ verw ({seg_sel})
333
+ movb {val}, ({dst})
334
+ mfence
335
+ lfence
336
+ ",
337
+ val = in(reg_byte) *src.offset(off as isize),
338
+ dst = in(reg) dst.offset(off as isize),
339
+ seg_sel = in(reg) &mut seg_sel,
340
+ options(nostack, att_syntax)
341
+ );
342
+ }
343
+ }
344
+ }
345
+
346
+ unsafe fn copy_aligned_quadwords_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
347
+ unsafe {
348
+ asm!(
349
+ "rep movsq (%rsi), (%rdi)",
350
+ inout("rcx") len / 8 => _,
351
+ inout("rdi") dst => _,
352
+ inout("rsi") src => _,
353
+ options(att_syntax, nostack, preserves_flags)
354
+ );
355
+ }
356
+ }
357
+ assert!(!src.is_null());
358
+ assert!(!dst.is_null());
359
+ assert!(is_enclave_range(src, len));
360
+ assert!(is_user_range(dst, len));
361
+ assert!(len < isize::MAX as usize);
362
+ assert!(!(src as usize).overflowing_add(len).1);
363
+ assert!(!(dst as usize).overflowing_add(len).1);
364
+
365
+ if len < 8 {
366
+ // Can't align on 8 byte boundary: copy safely byte per byte
367
+ unsafe {
368
+ copy_bytewise_to_userspace(src, dst, len);
369
+ }
370
+ } else if len % 8 == 0 && dst as usize % 8 == 0 {
371
+ // Copying 8-byte aligned quadwords: copy quad word per quad word
372
+ unsafe {
373
+ copy_aligned_quadwords_to_userspace(src, dst, len);
374
+ }
375
+ } else {
376
+ // Split copies into three parts:
377
+ // +--------+
378
+ // | small0 | Chunk smaller than 8 bytes
379
+ // +--------+
380
+ // | big | Chunk 8-byte aligned, and size a multiple of 8 bytes
381
+ // +--------+
382
+ // | small1 | Chunk smaller than 8 bytes
383
+ // +--------+
384
+
385
+ unsafe {
386
+ // Copy small0
387
+ let small0_size = (8 - dst as usize % 8) as u8;
388
+ let small0_src = src;
389
+ let small0_dst = dst;
390
+ copy_bytewise_to_userspace(small0_src as _, small0_dst, small0_size as _);
391
+
392
+ // Copy big
393
+ let small1_size = ((len - small0_size as usize) % 8) as u8;
394
+ let big_size = len - small0_size as usize - small1_size as usize;
395
+ let big_src = src.offset(small0_size as _);
396
+ let big_dst = dst.offset(small0_size as _);
397
+ copy_aligned_quadwords_to_userspace(big_src as _, big_dst, big_size);
398
+
399
+ // Copy small1
400
+ let small1_src = src.offset(big_size as isize + small0_size as isize);
401
+ let small1_dst = dst.offset(big_size as isize + small0_size as isize);
402
+ copy_bytewise_to_userspace(small1_src, small1_dst, small1_size as _);
403
+ }
404
+ }
405
+ }
406
+
307
407
#[unstable(feature = "sgx_platform", issue = "56975")]
308
408
impl<T: ?Sized> UserRef<T>
309
409
where
@@ -352,7 +452,7 @@ where
352
452
pub fn copy_from_enclave(&mut self, val: &T) {
353
453
unsafe {
354
454
assert_eq!(mem::size_of_val(val), mem::size_of_val(&*self.0.get()));
355
- ptr::copy (
455
+ copy_to_userspace (
356
456
val as *const T as *const u8,
357
457
self.0.get() as *mut T as *mut u8,
358
458
mem::size_of_val(val),
0 commit comments