1
1
use crate :: cell:: UnsafeCell ;
2
2
use crate :: ptr;
3
3
use crate :: sync:: atomic:: {
4
- AtomicBool , AtomicPtr , AtomicU32 ,
4
+ AtomicPtr , AtomicU32 ,
5
5
Ordering :: { AcqRel , Acquire , Relaxed , Release } ,
6
6
} ;
7
7
use crate :: sys:: c;
8
8
9
9
#[ cfg( test) ]
10
10
mod tests;
11
11
12
- /// An optimization hint. The compiler is often smart enough to know if an atomic
13
- /// is never set and can remove dead code based on that fact.
14
- static HAS_DTORS : AtomicBool = AtomicBool :: new ( false ) ;
15
-
16
12
// Using a per-thread list avoids the problems in synchronizing global state.
17
13
#[ thread_local]
18
14
#[ cfg( target_thread_local) ]
@@ -24,12 +20,11 @@ static DESTRUCTORS: crate::cell::RefCell<Vec<(*mut u8, unsafe extern "C" fn(*mut
24
20
#[ inline( never) ]
25
21
#[ cfg( target_thread_local) ]
26
22
pub unsafe fn register_keyless_dtor ( t : * mut u8 , dtor : unsafe extern "C" fn ( * mut u8 ) ) {
23
+ dtors_used ( ) ;
27
24
match DESTRUCTORS . try_borrow_mut ( ) {
28
25
Ok ( mut dtors) => dtors. push ( ( t, dtor) ) ,
29
26
Err ( _) => rtabort ! ( "global allocator may not use TLS" ) ,
30
27
}
31
-
32
- HAS_DTORS . store ( true , Relaxed ) ;
33
28
}
34
29
35
30
#[ inline( never) ] // See comment above
@@ -130,6 +125,7 @@ impl StaticKey {
130
125
#[ cold]
131
126
unsafe fn init ( & ' static self ) -> Key {
132
127
if self . dtor . is_some ( ) {
128
+ dtors_used ( ) ;
133
129
let mut pending = c:: FALSE ;
134
130
let r = c:: InitOnceBeginInitialize ( self . once . get ( ) , 0 , & mut pending, ptr:: null_mut ( ) ) ;
135
131
assert_eq ! ( r, c:: TRUE ) ;
@@ -215,7 +211,6 @@ unsafe fn register_dtor(key: &'static StaticKey) {
215
211
Err ( new) => head = new,
216
212
}
217
213
}
218
- HAS_DTORS . store ( true , Release ) ;
219
214
}
220
215
221
216
// -------------------------------------------------------------------------
@@ -281,17 +276,16 @@ unsafe fn register_dtor(key: &'static StaticKey) {
281
276
// the address of the symbol to ensure it sticks around.
282
277
283
278
#[ link_section = ".CRT$XLB" ]
284
- #[ allow( dead_code, unused_variables) ]
285
- #[ used] // we don't want LLVM eliminating this symbol for any reason, and
286
- // when the symbol makes it to the linker the linker will take over
287
279
pub static p_thread_callback: unsafe extern "system" fn ( c:: LPVOID , c:: DWORD , c:: LPVOID ) =
288
280
on_tls_callback;
289
281
290
- #[ allow( dead_code, unused_variables) ]
291
- unsafe extern "system" fn on_tls_callback ( h : c:: LPVOID , dwReason : c:: DWORD , pv : c:: LPVOID ) {
292
- if !HAS_DTORS . load ( Acquire ) {
293
- return ;
294
- }
282
+ fn dtors_used ( ) {
283
+ // we don't want LLVM eliminating p_thread_callback when destructors are used.
284
+ // when the symbol makes it to the linker the linker will take over
285
+ unsafe { crate :: intrinsics:: volatile_load ( & p_thread_callback) } ;
286
+ }
287
+
288
+ unsafe extern "system" fn on_tls_callback ( _h : c:: LPVOID , dwReason : c:: DWORD , _pv : c:: LPVOID ) {
295
289
if dwReason == c:: DLL_THREAD_DETACH || dwReason == c:: DLL_PROCESS_DETACH {
296
290
#[ cfg( not( target_thread_local) ) ]
297
291
run_dtors ( ) ;
@@ -313,7 +307,7 @@ unsafe extern "system" fn on_tls_callback(h: c::LPVOID, dwReason: c::DWORD, pv:
313
307
unsafe fn reference_tls_used ( ) { }
314
308
}
315
309
316
- #[ allow ( dead_code ) ] // actually called below
310
+ #[ cfg ( not ( target_thread_local ) ) ]
317
311
unsafe fn run_dtors ( ) {
318
312
for _ in 0 ..5 {
319
313
let mut any_run = false ;
0 commit comments