Skip to content

Commit ad6fd9b

Browse files
authored
Rollup merge of #79039 - thomcc:weakly-relaxing, r=Amanieu
Tighten the bounds on atomic Ordering in std::sys::unix::weak::Weak This moves reading this from multiple SeqCst reads to Relaxed read + Acquire fence if we are actually going to use the data. Would love to avoid the Acquire fence, but doing so would need Ordering::Consume, which neither Rust, nor LLVM supports (a shame, since this fence is hardly free on ARM, which is what I was hoping to improve). r? ``@Amanieu`` (Sorry for always picking you, but I know a lot of people wouldn't feel comfortable reviewing atomic ordering changes)
2 parents 92dcf6d + 55d7f73 commit ad6fd9b

File tree

1 file changed

+40
-6
lines changed

1 file changed

+40
-6
lines changed

library/std/src/sys/unix/weak.rs

+40-6
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
use crate::ffi::CStr;
2525
use crate::marker;
2626
use crate::mem;
27-
use crate::sync::atomic::{AtomicUsize, Ordering};
27+
use crate::sync::atomic::{self, AtomicUsize, Ordering};
2828

2929
macro_rules! weak {
3030
(fn $name:ident($($t:ty),*) -> $ret:ty) => (
@@ -47,15 +47,49 @@ impl<F> Weak<F> {
4747
pub fn get(&self) -> Option<F> {
4848
assert_eq!(mem::size_of::<F>(), mem::size_of::<usize>());
4949
unsafe {
50-
if self.addr.load(Ordering::SeqCst) == 1 {
51-
self.addr.store(fetch(self.name), Ordering::SeqCst);
52-
}
53-
match self.addr.load(Ordering::SeqCst) {
50+
// Relaxed is fine here because we fence before reading through the
51+
// pointer (see the comment below).
52+
match self.addr.load(Ordering::Relaxed) {
53+
1 => self.initialize(),
5454
0 => None,
55-
addr => Some(mem::transmute_copy::<usize, F>(&addr)),
55+
addr => {
56+
let func = mem::transmute_copy::<usize, F>(&addr);
57+
// The caller is presumably going to read through this value
58+
// (by calling the function we've dlsymed). This means we'd
59+
// need to have loaded it with at least C11's consume
60+
// ordering in order to be guaranteed that the data we read
61+
// from the pointer isn't from before the pointer was
62+
// stored. Rust has no equivalent to memory_order_consume,
63+
// so we use an acquire fence (sorry, ARM).
64+
//
65+
// Now, in practice this likely isn't needed even on CPUs
66+
// where relaxed and consume mean different things. The
67+
// symbols we're loading are probably present (or not) at
68+
// init, and even if they aren't the runtime dynamic loader
69+
// is extremely likely have sufficient barriers internally
70+
// (possibly implicitly, for example the ones provided by
71+
// invoking `mprotect`).
72+
//
73+
// That said, none of that's *guaranteed*, and so we fence.
74+
atomic::fence(Ordering::Acquire);
75+
Some(func)
76+
}
5677
}
5778
}
5879
}
80+
81+
// Cold because it should only happen during first-time initalization.
82+
#[cold]
83+
unsafe fn initialize(&self) -> Option<F> {
84+
let val = fetch(self.name);
85+
// This synchronizes with the acquire fence in `get`.
86+
self.addr.store(val, Ordering::Release);
87+
88+
match val {
89+
0 => None,
90+
addr => Some(mem::transmute_copy::<usize, F>(&addr)),
91+
}
92+
}
5993
}
6094

6195
unsafe fn fetch(name: &str) -> usize {

0 commit comments

Comments
 (0)