9
9
// except according to those terms.
10
10
11
11
use num:: NonZeroUsize ;
12
+ use slice;
13
+ use str;
12
14
13
- use super :: waitqueue:: { WaitVariable , WaitQueue , SpinMutex , NotifiedTcs , try_lock_or_false} ;
15
+ use super :: waitqueue:: {
16
+ try_lock_or_false, NotifiedTcs , SpinMutex , SpinMutexGuard , WaitQueue , WaitVariable ,
17
+ } ;
18
+ use mem;
14
19
15
20
pub struct RWLock {
16
21
readers : SpinMutex < WaitVariable < Option < NonZeroUsize > > > ,
17
22
writer : SpinMutex < WaitVariable < bool > > ,
18
23
}
19
24
25
+ // Below is to check at compile time, that RWLock has size of 128 bytes.
26
+ #[ allow( dead_code) ]
27
+ unsafe fn rw_lock_size_assert ( r : RWLock ) {
28
+ mem:: transmute :: < RWLock , [ u8 ; 128 ] > ( r) ;
29
+ }
30
+
20
31
//unsafe impl Send for RWLock {}
21
32
//unsafe impl Sync for RWLock {} // FIXME
22
33
23
34
impl RWLock {
24
35
pub const fn new ( ) -> RWLock {
25
36
RWLock {
26
37
readers : SpinMutex :: new ( WaitVariable :: new ( None ) ) ,
27
- writer : SpinMutex :: new ( WaitVariable :: new ( false ) )
38
+ writer : SpinMutex :: new ( WaitVariable :: new ( false ) ) ,
28
39
}
29
40
}
30
41
@@ -89,9 +100,11 @@ impl RWLock {
89
100
}
90
101
91
102
#[ inline]
92
- pub unsafe fn read_unlock ( & self ) {
93
- let mut rguard = self . readers . lock ( ) ;
94
- let wguard = self . writer . lock ( ) ;
103
+ unsafe fn __read_unlock (
104
+ & self ,
105
+ mut rguard : SpinMutexGuard < WaitVariable < Option < NonZeroUsize > > > ,
106
+ wguard : SpinMutexGuard < WaitVariable < bool > > ,
107
+ ) {
95
108
* rguard. lock_var_mut ( ) = NonZeroUsize :: new ( rguard. lock_var ( ) . unwrap ( ) . get ( ) - 1 ) ;
96
109
if rguard. lock_var ( ) . is_some ( ) {
97
110
// There are other active readers
@@ -107,9 +120,18 @@ impl RWLock {
107
120
}
108
121
109
122
#[ inline]
110
- pub unsafe fn write_unlock ( & self ) {
123
+ pub unsafe fn read_unlock ( & self ) {
111
124
let rguard = self . readers . lock ( ) ;
112
125
let wguard = self . writer . lock ( ) ;
126
+ self . __read_unlock ( rguard, wguard) ;
127
+ }
128
+
129
+ #[ inline]
130
+ unsafe fn __write_unlock (
131
+ & self ,
132
+ rguard : SpinMutexGuard < WaitVariable < Option < NonZeroUsize > > > ,
133
+ wguard : SpinMutexGuard < WaitVariable < bool > > ,
134
+ ) {
113
135
if let Err ( mut wguard) = WaitQueue :: notify_one ( wguard) {
114
136
// No writers waiting, release the write lock
115
137
* wguard. lock_var_mut ( ) = false ;
@@ -128,6 +150,109 @@ impl RWLock {
128
150
}
129
151
}
130
152
153
+ #[ inline]
154
+ pub unsafe fn write_unlock ( & self ) {
155
+ let rguard = self . readers . lock ( ) ;
156
+ let wguard = self . writer . lock ( ) ;
157
+ self . __write_unlock ( rguard, wguard) ;
158
+ }
159
+
160
+ #[ inline]
161
+ unsafe fn unlock ( & self ) {
162
+ let rguard = self . readers . lock ( ) ;
163
+ let wguard = self . writer . lock ( ) ;
164
+ if * wguard. lock_var ( ) == true {
165
+ self . __write_unlock ( rguard, wguard) ;
166
+ } else {
167
+ self . __read_unlock ( rguard, wguard) ;
168
+ }
169
+ }
170
+
131
171
#[ inline]
132
172
pub unsafe fn destroy ( & self ) { }
133
173
}
174
+
175
+ const EINVAL : i32 = 22 ;
176
+
177
+ #[ no_mangle]
178
+ pub unsafe extern "C" fn __rust_rwlock_rdlock ( p : * mut RWLock ) -> i32 {
179
+ if p. is_null ( ) {
180
+ return EINVAL ;
181
+ }
182
+ ( * p) . read ( ) ;
183
+ return 0 ;
184
+ }
185
+
186
+ #[ no_mangle]
187
+ pub unsafe extern "C" fn __rust_rwlock_wrlock ( p : * mut RWLock ) -> i32 {
188
+ if p. is_null ( ) {
189
+ return EINVAL ;
190
+ }
191
+ ( * p) . write ( ) ;
192
+ return 0 ;
193
+ }
194
+ #[ no_mangle]
195
+ pub unsafe extern "C" fn __rust_rwlock_unlock ( p : * mut RWLock ) -> i32 {
196
+ if p. is_null ( ) {
197
+ return EINVAL ;
198
+ }
199
+ ( * p) . unlock ( ) ;
200
+ return 0 ;
201
+ }
202
+
203
+ #[ no_mangle]
204
+ pub unsafe extern "C" fn __rust_print_err ( m : * mut u8 , s : i32 ) {
205
+ if s < 0 {
206
+ return ;
207
+ }
208
+ let buf = slice:: from_raw_parts ( m as * const u8 , s as _ ) ;
209
+ if let Ok ( s) = str:: from_utf8 ( & buf[ ..buf. iter ( ) . position ( |& b| b == 0 ) . unwrap_or ( buf. len ( ) ) ] ) {
210
+ eprint ! ( "{}" , s) ;
211
+ }
212
+ }
213
+
214
+ #[ no_mangle]
215
+ pub unsafe extern "C" fn __rust_abort ( ) {
216
+ :: sys:: abort_internal ( ) ;
217
+ }
218
+
219
+ #[ cfg( test) ]
220
+ mod tests {
221
+
222
+ use super :: * ;
223
+ use core:: array:: FixedSizeArray ;
224
+ use mem:: MaybeUninit ;
225
+ use { mem, ptr} ;
226
+
227
+ // The below test verifies that the bytes of initialized RWLock are the ones
228
+ // we use in libunwind.
229
+ // If they change we need to update src/UnwindRustSgx.h in libunwind.
230
+ #[ test]
231
+ fn test_c_rwlock_initializer ( ) {
232
+ const RWLOCK_INIT : & [ u8 ] = & [
233
+ 0x1 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
234
+ 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
235
+ 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
236
+ 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
237
+ 0x3 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
238
+ 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
239
+ 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
240
+ 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
241
+ 0x1 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
242
+ 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
243
+ 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
244
+ 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
245
+ 0x3 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
246
+ 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
247
+ 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
248
+ 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 , 0x0 ,
249
+ ] ;
250
+
251
+ let mut init = MaybeUninit :: < RWLock > :: zeroed ( ) ;
252
+ init. set ( RWLock :: new ( ) ) ;
253
+ assert_eq ! (
254
+ mem:: transmute:: <_, [ u8 ; 128 ] >( init. into_inner( ) ) . as_slice( ) ,
255
+ RWLOCK_INIT
256
+ ) ;
257
+ }
258
+ }
0 commit comments