Skip to content

Commit 3ad4a0e

Browse files
committed
use with_lock tp avoid drop
1 parent 3b0715f commit 3ad4a0e

File tree

6 files changed

+121
-157
lines changed

6 files changed

+121
-157
lines changed

compiler/rustc_query_system/src/cache.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -26,17 +26,17 @@ impl<Key, Value> Default for Cache<Key, Value> {
2626
impl<Key, Value> Cache<Key, Value> {
2727
/// Actually frees the underlying memory in contrast to what stdlib containers do on `clear`
2828
pub fn clear(&self) {
29-
*self.hashmap.borrow_mut() = Default::default();
29+
self.hashmap.with_lock(|map| *map = Default::default());
3030
}
3131
}
3232

3333
impl<Key: Eq + Hash, Value: Clone> Cache<Key, Value> {
3434
pub fn get<Tcx: DepContext>(&self, key: &Key, tcx: Tcx) -> Option<Value> {
35-
Some(self.hashmap.borrow().get(key)?.get(tcx))
35+
self.hashmap.with_borrow(|map| map.get(key).map(|node| node.get(tcx)))
3636
}
3737

3838
pub fn insert(&self, key: Key, dep_node: DepNodeIndex, value: Value) {
39-
self.hashmap.borrow_mut().insert(key, WithDepNode::new(dep_node, value));
39+
self.hashmap.with_lock(|map| map.insert(key, WithDepNode::new(dep_node, value)));
4040
}
4141
}
4242

compiler/rustc_query_system/src/dep_graph/graph.rs

+82-82
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ use parking_lot::Mutex;
22
use rustc_data_structures::fingerprint::Fingerprint;
33
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
44
use rustc_data_structures::profiling::{EventId, QueryInvocationId, SelfProfilerRef};
5-
use rustc_data_structures::sharded::{self, Sharded};
5+
use rustc_data_structures::sharded::Sharded;
66
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
77
use rustc_data_structures::steal::Steal;
88
use rustc_data_structures::sync::{AtomicU32, AtomicU64, Lock, Lrc, Ordering};
@@ -466,8 +466,42 @@ impl<K: DepKind> DepGraph<K> {
466466
pub fn read_index(&self, dep_node_index: DepNodeIndex) {
467467
if let Some(ref data) = self.data {
468468
K::read_deps(|task_deps| {
469-
let mut task_deps = match task_deps {
470-
TaskDepsRef::Allow(deps) => deps.lock(),
469+
match task_deps {
470+
TaskDepsRef::Allow(deps) => deps.with_lock(|task_deps| {
471+
// As long as we only have a low number of reads we can avoid doing a hash
472+
// insert and potentially allocating/reallocating the hashmap
473+
let new_read = if task_deps.reads.len() < TASK_DEPS_READS_CAP {
474+
task_deps.reads.iter().all(|other| *other != dep_node_index)
475+
} else {
476+
task_deps.read_set.insert(dep_node_index)
477+
};
478+
if new_read {
479+
task_deps.reads.push(dep_node_index);
480+
if task_deps.reads.len() == TASK_DEPS_READS_CAP {
481+
// Fill `read_set` with what we have so far so we can use the hashset
482+
// next time
483+
task_deps.read_set.extend(task_deps.reads.iter().copied());
484+
}
485+
486+
#[cfg(debug_assertions)]
487+
{
488+
if let Some(target) = task_deps.node {
489+
if let Some(ref forbidden_edge) = data.current.forbidden_edge {
490+
let src =
491+
forbidden_edge.index_to_node.lock()[&dep_node_index];
492+
if forbidden_edge.test(&src, &target) {
493+
panic!(
494+
"forbidden edge {:?} -> {:?} created",
495+
src, target
496+
)
497+
}
498+
}
499+
}
500+
}
501+
} else if cfg!(debug_assertions) {
502+
data.current.total_duplicate_read_count.fetch_add(1, Relaxed);
503+
}
504+
}),
471505
TaskDepsRef::EvalAlways => {
472506
// We don't need to record dependencies of eval_always
473507
// queries. They are re-evaluated unconditionally anyway.
@@ -478,41 +512,6 @@ impl<K: DepKind> DepGraph<K> {
478512
panic!("Illegal read of: {dep_node_index:?}")
479513
}
480514
};
481-
let task_deps = &mut *task_deps;
482-
483-
if cfg!(debug_assertions) {
484-
data.current.total_read_count.fetch_add(1, Relaxed);
485-
}
486-
487-
// As long as we only have a low number of reads we can avoid doing a hash
488-
// insert and potentially allocating/reallocating the hashmap
489-
let new_read = if task_deps.reads.len() < TASK_DEPS_READS_CAP {
490-
task_deps.reads.iter().all(|other| *other != dep_node_index)
491-
} else {
492-
task_deps.read_set.insert(dep_node_index)
493-
};
494-
if new_read {
495-
task_deps.reads.push(dep_node_index);
496-
if task_deps.reads.len() == TASK_DEPS_READS_CAP {
497-
// Fill `read_set` with what we have so far so we can use the hashset
498-
// next time
499-
task_deps.read_set.extend(task_deps.reads.iter().copied());
500-
}
501-
502-
#[cfg(debug_assertions)]
503-
{
504-
if let Some(target) = task_deps.node {
505-
if let Some(ref forbidden_edge) = data.current.forbidden_edge {
506-
let src = forbidden_edge.index_to_node.lock()[&dep_node_index];
507-
if forbidden_edge.test(&src, &target) {
508-
panic!("forbidden edge {:?} -> {:?} created", src, target)
509-
}
510-
}
511-
}
512-
}
513-
} else if cfg!(debug_assertions) {
514-
data.current.total_duplicate_read_count.fetch_add(1, Relaxed);
515-
}
516515
})
517516
}
518517
}
@@ -574,7 +573,9 @@ impl<K: DepKind> DepGraph<K> {
574573

575574
let mut edges = SmallVec::new();
576575
K::read_deps(|task_deps| match task_deps {
577-
TaskDepsRef::Allow(deps) => edges.extend(deps.lock().reads.iter().copied()),
576+
TaskDepsRef::Allow(deps) => {
577+
deps.with_borrow(|deps| edges.extend(deps.reads.iter().copied()))
578+
}
578579
TaskDepsRef::EvalAlways => {
579580
edges.push(DepNodeIndex::FOREVER_RED_NODE);
580581
}
@@ -627,14 +628,11 @@ impl<K: DepKind> DepGraphData<K> {
627628
#[inline]
628629
pub fn dep_node_index_of_opt(&self, dep_node: &DepNode<K>) -> Option<DepNodeIndex> {
629630
if let Some(prev_index) = self.previous.node_to_index_opt(dep_node) {
630-
self.current.prev_index_to_index.lock()[prev_index]
631+
self.current.prev_index_to_index.with_borrow(|nodes| nodes[prev_index])
631632
} else {
632633
self.current
633634
.new_node_to_index
634-
.get_shard_by_value(dep_node)
635-
.lock()
636-
.get(dep_node)
637-
.copied()
635+
.with_get_shard_by_value(dep_node, |node| node.get(dep_node).copied())
638636
}
639637
}
640638

@@ -670,7 +668,7 @@ impl<K: DepKind> DepGraphData<K> {
670668
}
671669

672670
pub fn mark_debug_loaded_from_disk(&self, dep_node: DepNode<K>) {
673-
self.debug_loaded_from_disk.lock().insert(dep_node);
671+
self.debug_loaded_from_disk.with_lock(|node| node.insert(dep_node));
674672
}
675673
}
676674

@@ -693,7 +691,11 @@ impl<K: DepKind> DepGraph<K> {
693691
}
694692

695693
pub fn debug_was_loaded_from_disk(&self, dep_node: DepNode<K>) -> bool {
696-
self.data.as_ref().unwrap().debug_loaded_from_disk.lock().contains(&dep_node)
694+
self.data
695+
.as_ref()
696+
.unwrap()
697+
.debug_loaded_from_disk
698+
.with_borrow(|node| node.contains(&dep_node))
697699
}
698700

699701
#[inline(always)]
@@ -703,15 +705,15 @@ impl<K: DepKind> DepGraph<K> {
703705
{
704706
let dep_node_debug = &self.data.as_ref().unwrap().dep_node_debug;
705707

706-
if dep_node_debug.borrow().contains_key(&dep_node) {
708+
if dep_node_debug.with_borrow(|node| node.contains_key(&dep_node)) {
707709
return;
708710
}
709711
let debug_str = self.with_ignore(debug_str_gen);
710-
dep_node_debug.borrow_mut().insert(dep_node, debug_str);
712+
dep_node_debug.with_lock(|node| node.insert(dep_node, debug_str));
711713
}
712714

713715
pub fn dep_node_debug_str(&self, dep_node: DepNode<K>) -> Option<String> {
714-
self.data.as_ref()?.dep_node_debug.borrow().get(&dep_node).cloned()
716+
self.data.as_ref()?.dep_node_debug.with_borrow(|node| node.get(&dep_node).cloned())
715717
}
716718

717719
fn node_color(&self, dep_node: &DepNode<K>) -> Option<DepNodeColor> {
@@ -1163,10 +1165,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
11631165
record_stats,
11641166
)),
11651167
new_node_to_index: Sharded::new(|| {
1166-
FxHashMap::with_capacity_and_hasher(
1167-
new_node_count_estimate / sharded::SHARDS,
1168-
Default::default(),
1169-
)
1168+
FxHashMap::with_capacity_and_hasher(new_node_count_estimate, Default::default())
11701169
}),
11711170
prev_index_to_index: Lock::new(IndexVec::from_elem_n(None, prev_graph_node_count)),
11721171
anon_id_seed,
@@ -1199,16 +1198,16 @@ impl<K: DepKind> CurrentDepGraph<K> {
11991198
edges: EdgesVec,
12001199
current_fingerprint: Fingerprint,
12011200
) -> DepNodeIndex {
1202-
let dep_node_index = match self.new_node_to_index.get_shard_by_value(&key).lock().entry(key)
1203-
{
1204-
Entry::Occupied(entry) => *entry.get(),
1205-
Entry::Vacant(entry) => {
1206-
let dep_node_index =
1207-
self.encoder.borrow().send(profiler, key, current_fingerprint, edges);
1208-
entry.insert(dep_node_index);
1209-
dep_node_index
1210-
}
1211-
};
1201+
let dep_node_index =
1202+
self.new_node_to_index.with_get_shard_by_value(&key, |node| match node.entry(key) {
1203+
Entry::Occupied(entry) => *entry.get(),
1204+
Entry::Vacant(entry) => {
1205+
let dep_node_index =
1206+
self.encoder.borrow().send(profiler, key, current_fingerprint, edges);
1207+
entry.insert(dep_node_index);
1208+
dep_node_index
1209+
}
1210+
});
12121211

12131212
#[cfg(debug_assertions)]
12141213
self.record_edge(dep_node_index, key, current_fingerprint);
@@ -1298,25 +1297,26 @@ impl<K: DepKind> CurrentDepGraph<K> {
12981297
) -> DepNodeIndex {
12991298
self.debug_assert_not_in_new_nodes(prev_graph, prev_index);
13001299

1301-
let mut prev_index_to_index = self.prev_index_to_index.lock();
1302-
1303-
match prev_index_to_index[prev_index] {
1304-
Some(dep_node_index) => dep_node_index,
1305-
None => {
1306-
let key = prev_graph.index_to_node(prev_index);
1307-
let edges = prev_graph
1308-
.edge_targets_from(prev_index)
1309-
.iter()
1310-
.map(|i| prev_index_to_index[*i].unwrap())
1311-
.collect();
1312-
let fingerprint = prev_graph.fingerprint_by_index(prev_index);
1313-
let dep_node_index = self.encoder.borrow().send(profiler, key, fingerprint, edges);
1314-
prev_index_to_index[prev_index] = Some(dep_node_index);
1315-
#[cfg(debug_assertions)]
1316-
self.record_edge(dep_node_index, key, fingerprint);
1317-
dep_node_index
1300+
self.prev_index_to_index.with_lock(|prev_index_to_index| {
1301+
match prev_index_to_index[prev_index] {
1302+
Some(dep_node_index) => dep_node_index,
1303+
None => {
1304+
let key = prev_graph.index_to_node(prev_index);
1305+
let edges = prev_graph
1306+
.edge_targets_from(prev_index)
1307+
.iter()
1308+
.map(|i| prev_index_to_index[*i].unwrap())
1309+
.collect();
1310+
let fingerprint = prev_graph.fingerprint_by_index(prev_index);
1311+
let dep_node_index =
1312+
self.encoder.borrow().send(profiler, key, fingerprint, edges);
1313+
prev_index_to_index[prev_index] = Some(dep_node_index);
1314+
#[cfg(debug_assertions)]
1315+
self.record_edge(dep_node_index, key, fingerprint);
1316+
dep_node_index
1317+
}
13181318
}
1319-
}
1319+
})
13201320
}
13211321

13221322
#[inline]
@@ -1327,7 +1327,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
13271327
) {
13281328
let node = &prev_graph.index_to_node(prev_index);
13291329
debug_assert!(
1330-
!self.new_node_to_index.get_shard_by_value(node).lock().contains_key(node),
1330+
!self.new_node_to_index.with_get_shard_by_value(node, |lock| lock.contains_key(node)),
13311331
"node from previous graph present in new node collection"
13321332
);
13331333
}

compiler/rustc_query_system/src/dep_graph/serialized.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ impl<K: DepKind> EncoderState<K> {
186186
if let Some(record_graph) = &mut record_graph.try_lock() {
187187
record_graph.push(index, node.node, &node.edges);
188188
}
189-
}
189+
};
190190

191191
if let Some(stats) = &mut self.stats {
192192
let kind = node.node.kind;
@@ -242,7 +242,7 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
242242

243243
pub(crate) fn with_query(&self, f: impl Fn(&DepGraphQuery<K>)) {
244244
if let Some(record_graph) = &self.record_graph {
245-
f(&record_graph.lock())
245+
record_graph.with_borrow(f)
246246
}
247247
}
248248

@@ -307,7 +307,7 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
307307
) -> DepNodeIndex {
308308
let _prof_timer = profiler.generic_activity("incr_comp_encode_dep_graph");
309309
let node = NodeInfo { node, fingerprint, edges };
310-
self.status.lock().encode_node(&node, &self.record_graph)
310+
self.status.with_lock(|status| status.encode_node(&node, &self.record_graph))
311311
}
312312

313313
pub fn finish(self, profiler: &SelfProfilerRef) -> FileEncodeResult {

compiler/rustc_query_system/src/ich/impls_syntax.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for SourceFile {
8080
src_hash.hash_stable(hcx, hasher);
8181

8282
// We are always in `Lines` form by the time we reach here.
83-
assert!(self.lines.borrow().is_lines());
83+
assert!(self.lines.with_borrow(|lines| lines.is_lines()));
8484
self.lines(|lines| {
8585
// We only hash the relative position within this source_file
8686
lines.len().hash_stable(hcx, hasher);

0 commit comments

Comments
 (0)