|
1 | 1 | #![allow(unused)]
|
2 | 2 |
|
| 3 | +use crate::arch::asm; |
3 | 4 | use crate::cell::UnsafeCell;
|
| 5 | +use crate::convert::TryInto; |
4 | 6 | use crate::mem;
|
5 | 7 | use crate::ops::{CoerceUnsized, Deref, DerefMut, Index, IndexMut};
|
6 | 8 | use crate::ptr::{self, NonNull};
|
7 | 9 | use crate::slice;
|
8 | 10 | use crate::slice::SliceIndex;
|
9 | 11 |
|
10 |
| -use super::super::mem::is_user_range; |
| 12 | +use super::super::mem::{is_enclave_range, is_user_range}; |
11 | 13 | use fortanix_sgx_abi::*;
|
12 | 14 |
|
13 | 15 | /// A type that can be safely read from or written to userspace.
|
@@ -300,6 +302,100 @@ where
|
300 | 302 | }
|
301 | 303 | }
|
302 | 304 |
|
| 305 | +/// Copies `len` bytes of data from enclave pointer `src` to userspace `dst` |
| 306 | +/// |
| 307 | +/// This function mitigates stale data vulnerabilities |
| 308 | +/// https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00615.html |
| 309 | +/// |
| 310 | +/// # Panics |
| 311 | +/// This function panics if: |
| 312 | +/// |
| 313 | +/// * The `src` pointer is null |
| 314 | +/// * The `dst` pointer is null |
| 315 | +/// * The `src` memory range is not in enclave memory |
| 316 | +/// * The `dst` memory range is not in user memory |
| 317 | +unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize) { |
| 318 | + unsafe fn copy_bytewise_to_userspace(src: *const u8, dst: *mut u8, len: usize) { |
| 319 | + unsafe { |
| 320 | + let seg_sel: u16 = 0; |
| 321 | + for off in 0..len { |
| 322 | + asm!(" |
| 323 | + mov %ds, ({seg_sel}) |
| 324 | + verw ({seg_sel}) |
| 325 | + movb {val}, ({dst}) |
| 326 | + mfence |
| 327 | + lfence |
| 328 | + ", |
| 329 | + val = in(reg_byte) *src.offset(off as isize), |
| 330 | + dst = in(reg) dst.offset(off as isize), |
| 331 | + seg_sel = in(reg) &seg_sel, |
| 332 | + options(nostack, att_syntax) |
| 333 | + ); |
| 334 | + } |
| 335 | + } |
| 336 | + } |
| 337 | + |
| 338 | + unsafe fn copy_aligned_quadwords_to_userspace(src: *const u8, dst: *mut u8, len: usize) { |
| 339 | + unsafe { |
| 340 | + asm!( |
| 341 | + "rep movsq (%rsi), (%rdi)", |
| 342 | + inout("rcx") len / 8 => _, |
| 343 | + inout("rdi") dst => _, |
| 344 | + inout("rsi") src => _, |
| 345 | + options(att_syntax, nostack, preserves_flags) |
| 346 | + ); |
| 347 | + } |
| 348 | + } |
| 349 | + assert!(!src.is_null()); |
| 350 | + assert!(!dst.is_null()); |
| 351 | + assert!(is_enclave_range(src, len)); |
| 352 | + assert!(is_user_range(dst, len)); |
| 353 | + assert!(len < isize::MAX as usize); |
| 354 | + assert!(!(src as usize).overflowing_add(len).1); |
| 355 | + assert!(!(dst as usize).overflowing_add(len).1); |
| 356 | + |
| 357 | + if len < 8 { |
| 358 | + // Can't align on 8 byte boundary: copy safely byte per byte |
| 359 | + unsafe { |
| 360 | + copy_bytewise_to_userspace(src, dst, len); |
| 361 | + } |
| 362 | + } else if len % 8 == 0 && dst as usize % 8 == 0 { |
| 363 | + // Copying 8-byte aligned quadwords: copy quad word per quad word |
| 364 | + unsafe { |
| 365 | + copy_aligned_quadwords_to_userspace(src, dst, len); |
| 366 | + } |
| 367 | + } else { |
| 368 | + // Split copies into three parts: |
| 369 | + // +--------+ |
| 370 | + // | small0 | Chunk smaller than 8 bytes |
| 371 | + // +--------+ |
| 372 | + // | big | Chunk 8-byte aligned, and size a multiple of 8 bytes |
| 373 | + // +--------+ |
| 374 | + // | small1 | Chunk smaller than 8 bytes |
| 375 | + // +--------+ |
| 376 | + |
| 377 | + unsafe { |
| 378 | + // Copy small0 |
| 379 | + let small0_size = (8 - dst as usize % 8) as u8; |
| 380 | + let small0_src = src; |
| 381 | + let small0_dst = dst; |
| 382 | + copy_bytewise_to_userspace(small0_src as _, small0_dst, small0_size as _); |
| 383 | + |
| 384 | + // Copy big |
| 385 | + let small1_size = ((len - small0_size as usize) % 8) as u8; |
| 386 | + let big_size = len - small0_size as usize - small1_size as usize; |
| 387 | + let big_src = src.offset(small0_size as _); |
| 388 | + let big_dst = dst.offset(small0_size as _); |
| 389 | + copy_aligned_quadwords_to_userspace(big_src as _, big_dst, big_size); |
| 390 | + |
| 391 | + // Copy small1 |
| 392 | + let small1_src = src.offset(big_size as isize + small0_size as isize); |
| 393 | + let small1_dst = dst.offset(big_size as isize + small0_size as isize); |
| 394 | + copy_bytewise_to_userspace(small1_src, small1_dst, small1_size as _); |
| 395 | + } |
| 396 | + } |
| 397 | +} |
| 398 | + |
303 | 399 | #[unstable(feature = "sgx_platform", issue = "56975")]
|
304 | 400 | impl<T: ?Sized> UserRef<T>
|
305 | 401 | where
|
@@ -348,7 +444,7 @@ where
|
348 | 444 | pub fn copy_from_enclave(&mut self, val: &T) {
|
349 | 445 | unsafe {
|
350 | 446 | assert_eq!(mem::size_of_val(val), mem::size_of_val(&*self.0.get()));
|
351 |
| - ptr::copy( |
| 447 | + copy_to_userspace( |
352 | 448 | val as *const T as *const u8,
|
353 | 449 | self.0.get() as *mut T as *mut u8,
|
354 | 450 | mem::size_of_val(val),
|
|
0 commit comments