@@ -9,31 +9,52 @@ use super::SIZE_4K;
9
9
use crate :: arch:: shared:: decrypt;
10
10
11
11
static SHARED_MEMORY_ALLOCATOR : LockedHeap = LockedHeap :: empty ( ) ;
12
+ static PRIVATE_SHADOW_ALLOCATOR : LockedHeap = LockedHeap :: empty ( ) ;
12
13
13
14
pub fn init_shared_memory ( start : u64 , size : usize ) {
15
+ let shadow_size = size / 2 ;
16
+ let shared_start = start + shadow_size as u64 ;
17
+ let shared_size = size - shadow_size;
18
+
14
19
// Set the shared memory region to be shared
15
- decrypt ( start , size ) ;
20
+ decrypt ( shared_start , shared_size ) ;
16
21
// Initialize the shared memory allocator
17
22
unsafe {
18
- SHARED_MEMORY_ALLOCATOR . lock ( ) . init ( start as * mut u8 , size) ;
23
+ SHARED_MEMORY_ALLOCATOR
24
+ . lock ( )
25
+ . init ( shared_start as * mut u8 , shared_size) ;
26
+ PRIVATE_SHADOW_ALLOCATOR
27
+ . lock ( )
28
+ . init ( start as * mut u8 , shadow_size) ;
19
29
}
20
30
}
21
31
22
32
pub struct SharedMemory {
23
33
addr : usize ,
34
+ shadow_addr : usize ,
24
35
size : usize ,
25
36
}
26
37
27
38
impl SharedMemory {
28
39
pub fn new ( num_page : usize ) -> Option < Self > {
29
40
let addr = unsafe { alloc_shared_pages ( num_page) ? } ;
41
+ let shadow_addr = unsafe { alloc_private_shadow_pages ( num_page) ? } ;
30
42
31
43
Some ( Self {
32
44
addr,
45
+ shadow_addr,
33
46
size : num_page * SIZE_4K ,
34
47
} )
35
48
}
36
49
50
+ pub fn copy_to_private_shadow ( & mut self ) -> & [ u8 ] {
51
+ let shadow =
52
+ unsafe { core:: slice:: from_raw_parts_mut ( self . shadow_addr as * mut u8 , self . size ) } ;
53
+ shadow. copy_from_slice ( self . as_bytes ( ) ) ;
54
+
55
+ shadow
56
+ }
57
+
37
58
pub fn as_bytes ( & self ) -> & [ u8 ] {
38
59
unsafe { core:: slice:: from_raw_parts ( self . addr as * const u8 , self . size ) }
39
60
}
@@ -45,24 +66,15 @@ impl SharedMemory {
45
66
46
67
impl Drop for SharedMemory {
47
68
fn drop ( & mut self ) {
69
+ unsafe { free_private_shadow_pages ( self . shadow_addr , self . size / SIZE_4K ) }
48
70
unsafe { free_shared_pages ( self . addr , self . size / SIZE_4K ) }
49
71
}
50
72
}
51
73
52
74
/// # Safety
53
75
/// The caller needs to explicitly call the `free_shared_pages` function after use
54
76
pub unsafe fn alloc_shared_pages ( num : usize ) -> Option < usize > {
55
- let size = SIZE_4K . checked_mul ( num) ?;
56
-
57
- let addr = SHARED_MEMORY_ALLOCATOR
58
- . lock ( )
59
- . allocate_first_fit ( Layout :: from_size_align ( size, SIZE_4K ) . ok ( ) ?)
60
- . map ( |ptr| ptr. as_ptr ( ) as usize )
61
- . ok ( ) ?;
62
-
63
- core:: slice:: from_raw_parts_mut ( addr as * mut u8 , size) . fill ( 0 ) ;
64
-
65
- Some ( addr)
77
+ allocator_alloc ( & SHARED_MEMORY_ALLOCATOR , num)
66
78
}
67
79
68
80
/// # Safety
@@ -74,16 +86,46 @@ pub unsafe fn alloc_shared_page() -> Option<usize> {
74
86
/// # Safety
75
87
/// The caller needs to ensure the correctness of the addr and page num
76
88
pub unsafe fn free_shared_pages ( addr : usize , num : usize ) {
77
- let size = SIZE_4K . checked_mul ( num) . expect ( "Invalid page num" ) ;
78
-
79
- SHARED_MEMORY_ALLOCATOR . lock ( ) . deallocate (
80
- NonNull :: new ( addr as * mut u8 ) . unwrap ( ) ,
81
- Layout :: from_size_align ( size, SIZE_4K ) . unwrap ( ) ,
82
- ) ;
89
+ allocator_free ( & SHARED_MEMORY_ALLOCATOR , addr, num)
83
90
}
84
91
85
92
/// # Safety
86
93
/// The caller needs to ensure the correctness of the addr
87
94
pub unsafe fn free_shared_page ( addr : usize ) {
88
95
free_shared_pages ( addr, 1 )
89
96
}
97
+
98
+ /// # Safety
99
+ /// The caller needs to explicitly call the `free_private_shadow_pages` function after use
100
+ unsafe fn alloc_private_shadow_pages ( num : usize ) -> Option < usize > {
101
+ allocator_alloc ( & PRIVATE_SHADOW_ALLOCATOR , num)
102
+ }
103
+
104
+ /// # Safety
105
+ /// The caller needs to ensure the correctness of the addr and page num
106
+ unsafe fn free_private_shadow_pages ( addr : usize , num : usize ) {
107
+ allocator_free ( & PRIVATE_SHADOW_ALLOCATOR , addr, num)
108
+ }
109
+
110
+ unsafe fn allocator_alloc ( allocator : & LockedHeap , num : usize ) -> Option < usize > {
111
+ let size = SIZE_4K . checked_mul ( num) ?;
112
+
113
+ let addr = allocator
114
+ . lock ( )
115
+ . allocate_first_fit ( Layout :: from_size_align ( size, SIZE_4K ) . ok ( ) ?)
116
+ . map ( |ptr| ptr. as_ptr ( ) as usize )
117
+ . ok ( ) ?;
118
+
119
+ core:: slice:: from_raw_parts_mut ( addr as * mut u8 , size) . fill ( 0 ) ;
120
+
121
+ Some ( addr)
122
+ }
123
+
124
+ unsafe fn allocator_free ( allocator : & LockedHeap , addr : usize , num : usize ) {
125
+ let size = SIZE_4K . checked_mul ( num) . expect ( "Invalid page num" ) ;
126
+
127
+ allocator. lock ( ) . deallocate (
128
+ NonNull :: new ( addr as * mut u8 ) . unwrap ( ) ,
129
+ Layout :: from_size_align ( size, SIZE_4K ) . unwrap ( ) ,
130
+ ) ;
131
+ }
0 commit comments