Skip to content

Commit 0ed54d5

Browse files
committed
test for threads = 1 again
1 parent fd51e35 commit 0ed54d5

File tree

9 files changed

+85
-136
lines changed

9 files changed

+85
-136
lines changed

compiler/rustc_data_structures/Cargo.toml

+2-2
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ indexmap = { version = "1.9.3" }
1414
jobserver_crate = { version = "0.1.13", package = "jobserver" }
1515
libc = "0.2"
1616
measureme = "10.0.0"
17-
rustc-rayon-core = { version = "0.5.0" }
17+
rustc-rayon-core = { version = "0.5.0", optional = true }
1818
rustc-rayon = { version = "0.5.0", optional = true }
1919
rustc_arena = { path = "../rustc_arena" }
2020
rustc_graphviz = { path = "../rustc_graphviz" }
@@ -51,4 +51,4 @@ features = [
5151
memmap2 = "0.2.1"
5252

5353
[features]
54-
rustc_use_parallel_compiler = ["indexmap/rustc-rayon", "rustc-rayon"]
54+
rustc_use_parallel_compiler = ["indexmap/rustc-rayon", "rustc-rayon", "rustc-rayon-core"]

compiler/rustc_data_structures/src/lib.rs

-1
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@
3333
#![feature(strict_provenance)]
3434
#![feature(ptr_alignment_type)]
3535
#![feature(macro_metavar_expr)]
36-
#![feature(mutex_unpoison)]
3736
#![allow(rustc::default_hash_types)]
3837
#![allow(rustc::potential_query_instability)]
3938
#![deny(rustc::untranslatable_diagnostic)]

compiler/rustc_data_structures/src/sync.rs

+2-42
Original file line numberDiff line numberDiff line change
@@ -493,7 +493,7 @@ cfg_if! {
493493
}
494494

495495
#[derive(Default, Debug)]
496-
#[repr(align(64))]
496+
#[cfg_attr(parallel_compiler, repr(align(64)))]
497497
pub struct CacheAligned<T>(pub T);
498498

499499
pub trait HashMapExt<K, V> {
@@ -593,50 +593,10 @@ impl<T> Lock<T> {
593593
LockGuard { lock: &self, marker: PhantomData }
594594
}
595595

596-
#[inline]
597-
pub(crate) fn with_mt_lock<F: FnOnce(&mut T) -> R, R>(&self, f: F) -> R {
598-
unsafe {
599-
self.mutex.lock();
600-
let r = f(&mut *self.data.get());
601-
self.mutex.unlock();
602-
r
603-
}
604-
}
605-
606596
#[inline(always)]
607597
#[track_caller]
608598
pub fn with_lock<F: FnOnce(&mut T) -> R, R>(&self, f: F) -> R {
609-
if likely(self.single_thread) {
610-
assert!(!self.borrow.replace(true));
611-
let r = unsafe { f(&mut *self.data.get()) };
612-
self.borrow.set(false);
613-
r
614-
} else {
615-
self.with_mt_lock(f)
616-
}
617-
}
618-
619-
#[inline]
620-
fn with_mt_borrow<F: FnOnce(&T) -> R, R>(&self, f: F) -> R {
621-
unsafe {
622-
self.mutex.lock();
623-
let r = f(&*self.data.get());
624-
self.mutex.unlock();
625-
r
626-
}
627-
}
628-
629-
#[inline(always)]
630-
#[track_caller]
631-
pub fn with_borrow<F: FnOnce(&T) -> R, R>(&self, f: F) -> R {
632-
if likely(self.single_thread) {
633-
assert!(!self.borrow.replace(true));
634-
let r = unsafe { f(&*self.data.get()) };
635-
self.borrow.set(false);
636-
r
637-
} else {
638-
self.with_mt_borrow(f)
639-
}
599+
f(&mut *self.lock())
640600
}
641601

642602
#[inline(always)]

compiler/rustc_query_system/src/cache.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -26,17 +26,17 @@ impl<Key, Value> Default for Cache<Key, Value> {
2626
impl<Key, Value> Cache<Key, Value> {
2727
/// Actually frees the underlying memory in contrast to what stdlib containers do on `clear`
2828
pub fn clear(&self) {
29-
self.hashmap.with_lock(|map| *map = Default::default());
29+
*self.hashmap.borrow_mut() = Default::default();
3030
}
3131
}
3232

3333
impl<Key: Eq + Hash, Value: Clone> Cache<Key, Value> {
3434
pub fn get<Tcx: DepContext>(&self, key: &Key, tcx: Tcx) -> Option<Value> {
35-
self.hashmap.with_borrow(|map| map.get(key).map(|node| node.get(tcx)))
35+
Some(self.hashmap.borrow().get(key)?.get(tcx))
3636
}
3737

3838
pub fn insert(&self, key: Key, dep_node: DepNodeIndex, value: Value) {
39-
self.hashmap.with_lock(|map| map.insert(key, WithDepNode::new(dep_node, value)));
39+
self.hashmap.borrow_mut().insert(key, WithDepNode::new(dep_node, value));
4040
}
4141
}
4242

compiler/rustc_query_system/src/dep_graph/graph.rs

+64-70
Original file line numberDiff line numberDiff line change
@@ -466,42 +466,8 @@ impl<K: DepKind> DepGraph<K> {
466466
pub fn read_index(&self, dep_node_index: DepNodeIndex) {
467467
if let Some(ref data) = self.data {
468468
K::read_deps(|task_deps| {
469-
match task_deps {
470-
TaskDepsRef::Allow(deps) => deps.with_lock(|task_deps| {
471-
// As long as we only have a low number of reads we can avoid doing a hash
472-
// insert and potentially allocating/reallocating the hashmap
473-
let new_read = if task_deps.reads.len() < TASK_DEPS_READS_CAP {
474-
task_deps.reads.iter().all(|other| *other != dep_node_index)
475-
} else {
476-
task_deps.read_set.insert(dep_node_index)
477-
};
478-
if new_read {
479-
task_deps.reads.push(dep_node_index);
480-
if task_deps.reads.len() == TASK_DEPS_READS_CAP {
481-
// Fill `read_set` with what we have so far so we can use the hashset
482-
// next time
483-
task_deps.read_set.extend(task_deps.reads.iter().copied());
484-
}
485-
486-
#[cfg(debug_assertions)]
487-
{
488-
if let Some(target) = task_deps.node {
489-
if let Some(ref forbidden_edge) = data.current.forbidden_edge {
490-
let src =
491-
forbidden_edge.index_to_node.lock()[&dep_node_index];
492-
if forbidden_edge.test(&src, &target) {
493-
panic!(
494-
"forbidden edge {:?} -> {:?} created",
495-
src, target
496-
)
497-
}
498-
}
499-
}
500-
}
501-
} else if cfg!(debug_assertions) {
502-
data.current.total_duplicate_read_count.fetch_add(1, Relaxed);
503-
}
504-
}),
469+
let mut task_deps = match task_deps {
470+
TaskDepsRef::Allow(deps) => deps.lock(),
505471
TaskDepsRef::EvalAlways => {
506472
// We don't need to record dependencies of eval_always
507473
// queries. They are re-evaluated unconditionally anyway.
@@ -512,6 +478,41 @@ impl<K: DepKind> DepGraph<K> {
512478
panic!("Illegal read of: {dep_node_index:?}")
513479
}
514480
};
481+
let task_deps = &mut *task_deps;
482+
483+
if cfg!(debug_assertions) {
484+
data.current.total_read_count.fetch_add(1, Relaxed);
485+
}
486+
487+
// As long as we only have a low number of reads we can avoid doing a hash
488+
// insert and potentially allocating/reallocating the hashmap
489+
let new_read = if task_deps.reads.len() < TASK_DEPS_READS_CAP {
490+
task_deps.reads.iter().all(|other| *other != dep_node_index)
491+
} else {
492+
task_deps.read_set.insert(dep_node_index)
493+
};
494+
if new_read {
495+
task_deps.reads.push(dep_node_index);
496+
if task_deps.reads.len() == TASK_DEPS_READS_CAP {
497+
// Fill `read_set` with what we have so far so we can use the hashset
498+
// next time
499+
task_deps.read_set.extend(task_deps.reads.iter().copied());
500+
}
501+
502+
#[cfg(debug_assertions)]
503+
{
504+
if let Some(target) = task_deps.node {
505+
if let Some(ref forbidden_edge) = data.current.forbidden_edge {
506+
let src = forbidden_edge.index_to_node.lock()[&dep_node_index];
507+
if forbidden_edge.test(&src, &target) {
508+
panic!("forbidden edge {:?} -> {:?} created", src, target)
509+
}
510+
}
511+
}
512+
}
513+
} else if cfg!(debug_assertions) {
514+
data.current.total_duplicate_read_count.fetch_add(1, Relaxed);
515+
}
515516
})
516517
}
517518
}
@@ -573,9 +574,7 @@ impl<K: DepKind> DepGraph<K> {
573574

574575
let mut edges = SmallVec::new();
575576
K::read_deps(|task_deps| match task_deps {
576-
TaskDepsRef::Allow(deps) => {
577-
deps.with_borrow(|deps| edges.extend(deps.reads.iter().copied()))
578-
}
577+
TaskDepsRef::Allow(deps) => edges.extend(deps.lock().reads.iter().copied()),
579578
TaskDepsRef::EvalAlways => {
580579
edges.push(DepNodeIndex::FOREVER_RED_NODE);
581580
}
@@ -628,7 +627,7 @@ impl<K: DepKind> DepGraphData<K> {
628627
#[inline]
629628
pub fn dep_node_index_of_opt(&self, dep_node: &DepNode<K>) -> Option<DepNodeIndex> {
630629
if let Some(prev_index) = self.previous.node_to_index_opt(dep_node) {
631-
self.current.prev_index_to_index.with_borrow(|nodes| nodes[prev_index])
630+
self.current.prev_index_to_index.lock()[prev_index]
632631
} else {
633632
self.current
634633
.new_node_to_index
@@ -668,7 +667,7 @@ impl<K: DepKind> DepGraphData<K> {
668667
}
669668

670669
pub fn mark_debug_loaded_from_disk(&self, dep_node: DepNode<K>) {
671-
self.debug_loaded_from_disk.with_lock(|node| node.insert(dep_node));
670+
self.debug_loaded_from_disk.lock().insert(dep_node);
672671
}
673672
}
674673

@@ -691,29 +690,25 @@ impl<K: DepKind> DepGraph<K> {
691690
}
692691

693692
pub fn debug_was_loaded_from_disk(&self, dep_node: DepNode<K>) -> bool {
694-
self.data
695-
.as_ref()
696-
.unwrap()
697-
.debug_loaded_from_disk
698-
.with_borrow(|node| node.contains(&dep_node))
693+
self.data.as_ref().unwrap().debug_loaded_from_disk.lock().contains(&dep_node)
699694
}
700695

701696
#[inline(always)]
702697
pub fn register_dep_node_debug_str<F>(&self, dep_node: DepNode<K>, debug_str_gen: F)
703-
where
704-
F: FnOnce() -> String,
698+
where
699+
F: FnOnce() -> String,
705700
{
706701
let dep_node_debug = &self.data.as_ref().unwrap().dep_node_debug;
707702

708-
if dep_node_debug.with_borrow(|node| node.contains_key(&dep_node)) {
703+
if dep_node_debug.borrow().contains_key(&dep_node) {
709704
return;
710705
}
711706
let debug_str = self.with_ignore(debug_str_gen);
712-
dep_node_debug.with_lock(|node| node.insert(dep_node, debug_str));
707+
dep_node_debug.borrow_mut().insert(dep_node, debug_str);
713708
}
714709

715710
pub fn dep_node_debug_str(&self, dep_node: DepNode<K>) -> Option<String> {
716-
self.data.as_ref()?.dep_node_debug.with_borrow(|node| node.get(&dep_node).cloned())
711+
self.data.as_ref()?.dep_node_debug.borrow().get(&dep_node).cloned()
717712
}
718713

719714
fn node_color(&self, dep_node: &DepNode<K>) -> Option<DepNodeColor> {
@@ -1301,26 +1296,25 @@ impl<K: DepKind> CurrentDepGraph<K> {
13011296
) -> DepNodeIndex {
13021297
self.debug_assert_not_in_new_nodes(prev_graph, prev_index);
13031298

1304-
self.prev_index_to_index.with_lock(|prev_index_to_index| {
1305-
match prev_index_to_index[prev_index] {
1306-
Some(dep_node_index) => dep_node_index,
1307-
None => {
1308-
let key = prev_graph.index_to_node(prev_index);
1309-
let edges = prev_graph
1310-
.edge_targets_from(prev_index)
1311-
.iter()
1312-
.map(|i| prev_index_to_index[*i].unwrap())
1313-
.collect();
1314-
let fingerprint = prev_graph.fingerprint_by_index(prev_index);
1315-
let dep_node_index =
1316-
self.encoder.borrow().send(profiler, key, fingerprint, edges);
1317-
prev_index_to_index[prev_index] = Some(dep_node_index);
1318-
#[cfg(debug_assertions)]
1319-
self.record_edge(dep_node_index, key, fingerprint);
1320-
dep_node_index
1321-
}
1299+
let mut prev_index_to_index = self.prev_index_to_index.lock();
1300+
1301+
match prev_index_to_index[prev_index] {
1302+
Some(dep_node_index) => dep_node_index,
1303+
None => {
1304+
let key = prev_graph.index_to_node(prev_index);
1305+
let edges = prev_graph
1306+
.edge_targets_from(prev_index)
1307+
.iter()
1308+
.map(|i| prev_index_to_index[*i].unwrap())
1309+
.collect();
1310+
let fingerprint = prev_graph.fingerprint_by_index(prev_index);
1311+
let dep_node_index = self.encoder.borrow().send(profiler, key, fingerprint, edges);
1312+
prev_index_to_index[prev_index] = Some(dep_node_index);
1313+
#[cfg(debug_assertions)]
1314+
self.record_edge(dep_node_index, key, fingerprint);
1315+
dep_node_index
13221316
}
1323-
})
1317+
}
13241318
}
13251319

13261320
#[inline]

compiler/rustc_query_system/src/dep_graph/serialized.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ impl<K: DepKind> EncoderState<K> {
186186
if let Some(record_graph) = &mut record_graph.try_lock() {
187187
record_graph.push(index, node.node, &node.edges);
188188
}
189-
};
189+
}
190190

191191
if let Some(stats) = &mut self.stats {
192192
let kind = node.node.kind;
@@ -242,7 +242,7 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
242242

243243
pub(crate) fn with_query(&self, f: impl Fn(&DepGraphQuery<K>)) {
244244
if let Some(record_graph) = &self.record_graph {
245-
record_graph.with_borrow(f)
245+
f(&record_graph.lock())
246246
}
247247
}
248248

@@ -307,7 +307,7 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
307307
) -> DepNodeIndex {
308308
let _prof_timer = profiler.generic_activity("incr_comp_encode_dep_graph");
309309
let node = NodeInfo { node, fingerprint, edges };
310-
self.status.with_lock(|status| status.encode_node(&node, &self.record_graph))
310+
self.status.lock().encode_node(&node, &self.record_graph)
311311
}
312312

313313
pub fn finish(self, profiler: &SelfProfilerRef) -> FileEncodeResult {

compiler/rustc_query_system/src/ich/impls_syntax.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for SourceFile {
8080
src_hash.hash_stable(hcx, hasher);
8181

8282
// We are always in `Lines` form by the time we reach here.
83-
assert!(self.lines.with_borrow(|lines| lines.is_lines()));
83+
assert!(self.lines.borrow().is_lines());
8484
self.lines(|lines| {
8585
// We only hash the relative position within this source_file
8686
lines.len().hash_stable(hcx, hasher);

compiler/rustc_session/src/options.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1748,7 +1748,7 @@ options! {
17481748
/// in the future. Note that -Zthreads=0 is the way to get
17491749
/// the num_cpus behavior.
17501750
#[rustc_lint_opt_deny_field_access("use `Session::threads` instead of this field")]
1751-
threads: usize = (8, parse_threads, [UNTRACKED],
1751+
threads: usize = (1, parse_threads, [UNTRACKED],
17521752
"use a thread pool with N threads"),
17531753
time_llvm_passes: bool = (false, parse_bool, [UNTRACKED],
17541754
"measure time of each LLVM pass (default: no)"),

0 commit comments

Comments
 (0)