Skip to content

Commit 57ca354

Browse files
Experiment: Only track fingerprints for reconstructible dep-nodes.
1 parent 17a520a commit 57ca354

File tree

4 files changed

+191
-115
lines changed

4 files changed

+191
-115
lines changed

compiler/rustc_middle/src/query/mod.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -1066,7 +1066,7 @@ rustc_queries! {
10661066
"const-evaluating + checking `{}`",
10671067
key.value.display(tcx)
10681068
}
1069-
cache_on_disk_if { true }
1069+
// cache_on_disk_if { true } Cannot cache this anymore
10701070
}
10711071

10721072
/// Evaluates const items or anonymous constants
@@ -1081,7 +1081,7 @@ rustc_queries! {
10811081
"simplifying constant for the type system `{}`",
10821082
key.value.display(tcx)
10831083
}
1084-
cache_on_disk_if { true }
1084+
// cache_on_disk_if { true } Cannot cache this anymore
10851085
}
10861086

10871087
/// Evaluate a constant and convert it to a type level constant or
@@ -1148,7 +1148,7 @@ rustc_queries! {
11481148
/// look up the correct symbol name of instances from upstream crates.
11491149
query symbol_name(key: ty::Instance<'tcx>) -> ty::SymbolName<'tcx> {
11501150
desc { "computing the symbol for `{}`", key }
1151-
cache_on_disk_if { true }
1151+
// cache_on_disk_if { true } Cannot cache this anymore
11521152
}
11531153

11541154
query def_kind(def_id: DefId) -> DefKind {
@@ -1284,7 +1284,7 @@ rustc_queries! {
12841284
query codegen_select_candidate(
12851285
key: (ty::ParamEnv<'tcx>, ty::TraitRef<'tcx>)
12861286
) -> Result<&'tcx ImplSource<'tcx, ()>, CodegenObligationError> {
1287-
cache_on_disk_if { true }
1287+
// cache_on_disk_if { true } Cannot cache this anymore
12881288
desc { |tcx| "computing candidate for `{}`", key.1 }
12891289
}
12901290

@@ -1894,7 +1894,7 @@ rustc_queries! {
18941894
}
18951895

18961896
query unused_generic_params(key: ty::InstanceDef<'tcx>) -> UnusedGenericParams {
1897-
cache_on_disk_if { key.def_id().is_local() }
1897+
// cache_on_disk_if { key.def_id().is_local() } Cannot cache this anymore
18981898
desc {
18991899
|tcx| "determining which generic parameters are unused by `{}`",
19001900
tcx.def_path_str(key.def_id())

compiler/rustc_query_system/src/dep_graph/graph.rs

+94-77
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ impl<D: Deps> DepGraph<D> {
133133
let colors = DepNodeColorMap::new(prev_graph_node_count);
134134

135135
// Instantiate a dependy-less node only once for anonymous queries.
136-
let _green_node_index = current.intern_new_node(
136+
let _green_node_index = current.alloc_new_node(
137137
profiler,
138138
DepNode { kind: D::DEP_KIND_NULL, hash: current.anon_id_seed.into() },
139139
EdgesVec::new(),
@@ -272,6 +272,7 @@ impl<D: Deps> DepGraph<D> {
272272
D::with_deps(TaskDepsRef::Forbid, op)
273273
}
274274

275+
// FIXME(sparse_fps): Document
275276
#[inline(always)]
276277
pub fn with_task<Ctxt: HasDepContext<Deps = D>, A: Debug, R>(
277278
&self,
@@ -287,6 +288,7 @@ impl<D: Deps> DepGraph<D> {
287288
}
288289
}
289290

291+
// FIXME(sparse_fps): Document
290292
pub fn with_anon_task<Tcx: DepContext<Deps = D>, OP, R>(
291293
&self,
292294
cx: Tcx,
@@ -297,7 +299,7 @@ impl<D: Deps> DepGraph<D> {
297299
OP: FnOnce() -> R,
298300
{
299301
match self.data() {
300-
Some(data) => data.with_anon_task(cx, dep_kind, op),
302+
Some(data) => data.with_anon_task(cx, dep_kind, true, op),
301303
None => (op(), self.next_virtual_depnode_index()),
302304
}
303305
}
@@ -395,61 +397,71 @@ impl<D: Deps> DepGraphData<D> {
395397
(result, dep_node_index)
396398
}
397399

400+
// FIXME(sparse_fps): Document
398401
/// Executes something within an "anonymous" task, that is, a task the
399402
/// `DepNode` of which is determined by the list of inputs it read from.
400403
pub(crate) fn with_anon_task<Tcx: DepContext<Deps = D>, OP, R>(
401404
&self,
402405
cx: Tcx,
403406
dep_kind: DepKind,
407+
intern: bool,
404408
op: OP,
405409
) -> (R, DepNodeIndex)
406410
where
407411
OP: FnOnce() -> R,
408412
{
409-
debug_assert!(!cx.is_eval_always(dep_kind));
410-
411413
let task_deps = Lock::new(TaskDeps::default());
412414
let result = D::with_deps(TaskDepsRef::Allow(&task_deps), op);
413415
let task_deps = task_deps.into_inner();
414416
let task_deps = task_deps.reads;
415417

416-
let dep_node_index = match task_deps.len() {
417-
0 => {
418-
// Because the dep-node id of anon nodes is computed from the sets of its
419-
// dependencies we already know what the ID of this dependency-less node is
420-
// going to be (i.e. equal to the precomputed
421-
// `SINGLETON_DEPENDENCYLESS_ANON_NODE`). As a consequence we can skip creating
422-
// a `StableHasher` and sending the node through interning.
423-
DepNodeIndex::SINGLETON_DEPENDENCYLESS_ANON_NODE
424-
}
425-
1 => {
426-
// When there is only one dependency, don't bother creating a node.
427-
task_deps[0]
428-
}
429-
_ => {
430-
// The dep node indices are hashed here instead of hashing the dep nodes of the
431-
// dependencies. These indices may refer to different nodes per session, but this isn't
432-
// a problem here because we that ensure the final dep node hash is per session only by
433-
// combining it with the per session random number `anon_id_seed`. This hash only need
434-
// to map the dependencies to a single value on a per session basis.
435-
let mut hasher = StableHasher::new();
436-
task_deps.hash(&mut hasher);
437-
438-
let target_dep_node = DepNode {
439-
kind: dep_kind,
440-
// Fingerprint::combine() is faster than sending Fingerprint
441-
// through the StableHasher (at least as long as StableHasher
442-
// is so slow).
443-
hash: self.current.anon_id_seed.combine(hasher.finish()).into(),
444-
};
445-
446-
self.current.intern_new_node(
447-
cx.profiler(),
448-
target_dep_node,
449-
task_deps,
450-
Fingerprint::ZERO,
451-
)
418+
let dep_node_index = if intern {
419+
// FIXME(sparse_fp): what is this assertion about?
420+
debug_assert!(!cx.is_eval_always(dep_kind));
421+
422+
match task_deps.len() {
423+
0 => {
424+
// Because the dep-node id of anon nodes is computed from the sets of its
425+
// dependencies we already know what the ID of this dependency-less node is
426+
// going to be (i.e. equal to the precomputed
427+
// `SINGLETON_DEPENDENCYLESS_ANON_NODE`). As a consequence we can skip creating
428+
// a `StableHasher` and sending the node through interning.
429+
DepNodeIndex::SINGLETON_DEPENDENCYLESS_ANON_NODE
430+
}
431+
1 => {
432+
// When there is only one dependency, don't bother creating a node.
433+
task_deps[0]
434+
}
435+
_ => {
436+
// The dep node indices are hashed here instead of hashing the dep nodes of the
437+
// dependencies. These indices may refer to different nodes per session, but this isn't
438+
// a problem here because we that ensure the final dep node hash is per session only by
439+
// combining it with the per session random number `anon_id_seed`. This hash only need
440+
// to map the dependencies to a single value on a per session basis.
441+
let mut hasher = StableHasher::new();
442+
task_deps.hash(&mut hasher);
443+
dep_kind.hash(&mut hasher);
444+
445+
let dedup_fingerprint: Fingerprint = hasher.finish();
446+
447+
match self
448+
.current
449+
.interned_node_to_index
450+
.lock_shard_by_value(&dedup_fingerprint)
451+
.entry(dedup_fingerprint)
452+
{
453+
Entry::Occupied(entry) => *entry.get(),
454+
Entry::Vacant(entry) => {
455+
let dep_node_index =
456+
self.current.alloc_anon_node(cx.profiler(), dep_kind, task_deps);
457+
entry.insert(dep_node_index);
458+
dep_node_index
459+
}
460+
}
461+
}
452462
}
463+
} else {
464+
self.current.alloc_anon_node(cx.profiler(), dep_kind, task_deps)
453465
};
454466

455467
(result, dep_node_index)
@@ -616,18 +628,20 @@ impl<D: Deps> DepGraph<D> {
616628
}
617629

618630
impl<D: Deps> DepGraphData<D> {
619-
#[inline]
620-
fn dep_node_index_of_opt(&self, dep_node: &DepNode) -> Option<DepNodeIndex> {
621-
if let Some(prev_index) = self.previous.node_to_index_opt(dep_node) {
622-
self.current.prev_index_to_index.lock()[prev_index]
623-
} else {
624-
self.current.new_node_to_index.lock_shard_by_value(dep_node).get(dep_node).copied()
625-
}
626-
}
631+
// #[inline]
632+
// fn dep_node_index_of_opt(&self, dep_node: &DepNode) -> Option<DepNodeIndex> {
633+
// if let Some(prev_index) = self.previous.node_to_index_opt(dep_node) {
634+
// self.current.prev_index_to_index.lock()[prev_index]
635+
// } else {
636+
// self.current.interned_node_to_index.lock_shard_by_value(dep_node).get(dep_node).copied()
637+
// }
638+
// }
627639

628640
#[inline]
629-
fn dep_node_exists(&self, dep_node: &DepNode) -> bool {
630-
self.dep_node_index_of_opt(dep_node).is_some()
641+
fn dep_node_exists(&self, _dep_node: &DepNode) -> bool {
642+
// FIXME(sparse_fps): bring back assertions
643+
//self.dep_node_index_of_opt(dep_node).is_some()
644+
false
631645
}
632646

633647
fn node_color(&self, dep_node: &DepNode) -> Option<DepNodeColor> {
@@ -1071,7 +1085,7 @@ rustc_index::newtype_index! {
10711085
/// first, and `data` second.
10721086
pub(super) struct CurrentDepGraph<D: Deps> {
10731087
encoder: Steal<GraphEncoder<D>>,
1074-
new_node_to_index: Sharded<FxHashMap<DepNode, DepNodeIndex>>,
1088+
interned_node_to_index: Sharded<FxHashMap<Fingerprint, DepNodeIndex>>,
10751089
prev_index_to_index: Lock<IndexVec<SerializedDepNodeIndex, Option<DepNodeIndex>>>,
10761090

10771091
/// This is used to verify that fingerprints do not change between the creation of a node
@@ -1152,7 +1166,7 @@ impl<D: Deps> CurrentDepGraph<D> {
11521166
record_graph,
11531167
record_stats,
11541168
)),
1155-
new_node_to_index: Sharded::new(|| {
1169+
interned_node_to_index: Sharded::new(|| {
11561170
FxHashMap::with_capacity_and_hasher(
11571171
new_node_count_estimate / sharded::shards(),
11581172
Default::default(),
@@ -1182,29 +1196,30 @@ impl<D: Deps> CurrentDepGraph<D> {
11821196
/// Writes the node to the current dep-graph and allocates a `DepNodeIndex` for it.
11831197
/// Assumes that this is a node that has no equivalent in the previous dep-graph.
11841198
#[inline(always)]
1185-
fn intern_new_node(
1199+
fn alloc_new_node(
11861200
&self,
11871201
profiler: &SelfProfilerRef,
11881202
key: DepNode,
11891203
edges: EdgesVec,
11901204
current_fingerprint: Fingerprint,
11911205
) -> DepNodeIndex {
1192-
let dep_node_index = match self.new_node_to_index.lock_shard_by_value(&key).entry(key) {
1193-
Entry::Occupied(entry) => *entry.get(),
1194-
Entry::Vacant(entry) => {
1195-
let dep_node_index =
1196-
self.encoder.borrow().send(profiler, key, current_fingerprint, edges);
1197-
entry.insert(dep_node_index);
1198-
dep_node_index
1199-
}
1200-
};
1201-
1206+
let dep_node_index = self.encoder.borrow().send(profiler, key, current_fingerprint, edges);
12021207
#[cfg(debug_assertions)]
12031208
self.record_edge(dep_node_index, key, current_fingerprint);
12041209

12051210
dep_node_index
12061211
}
12071212

1213+
#[inline(always)]
1214+
fn alloc_anon_node(
1215+
&self,
1216+
profiler: &SelfProfilerRef,
1217+
dep_kind: DepKind,
1218+
edges: EdgesVec,
1219+
) -> DepNodeIndex {
1220+
self.encoder.borrow().send_anon_node(profiler, dep_kind, edges)
1221+
}
1222+
12081223
fn intern_node(
12091224
&self,
12101225
profiler: &SelfProfilerRef,
@@ -1262,7 +1277,7 @@ impl<D: Deps> CurrentDepGraph<D> {
12621277
let fingerprint = fingerprint.unwrap_or(Fingerprint::ZERO);
12631278

12641279
// This is a new node: it didn't exist in the previous compilation session.
1265-
let dep_node_index = self.intern_new_node(profiler, key, edges, fingerprint);
1280+
let dep_node_index = self.alloc_new_node(profiler, key, edges, fingerprint);
12661281

12671282
(dep_node_index, None)
12681283
}
@@ -1274,7 +1289,8 @@ impl<D: Deps> CurrentDepGraph<D> {
12741289
prev_graph: &SerializedDepGraph,
12751290
prev_index: SerializedDepNodeIndex,
12761291
) -> DepNodeIndex {
1277-
self.debug_assert_not_in_new_nodes(prev_graph, prev_index);
1292+
// FIXME(sparse_fp): restore assertions
1293+
// self.debug_assert_not_in_new_nodes(prev_graph, prev_index);
12781294

12791295
let mut prev_index_to_index = self.prev_index_to_index.lock();
12801296

@@ -1296,18 +1312,19 @@ impl<D: Deps> CurrentDepGraph<D> {
12961312
}
12971313
}
12981314

1299-
#[inline]
1300-
fn debug_assert_not_in_new_nodes(
1301-
&self,
1302-
prev_graph: &SerializedDepGraph,
1303-
prev_index: SerializedDepNodeIndex,
1304-
) {
1305-
let node = &prev_graph.index_to_node(prev_index);
1306-
debug_assert!(
1307-
!self.new_node_to_index.lock_shard_by_value(node).contains_key(node),
1308-
"node from previous graph present in new node collection"
1309-
);
1310-
}
1315+
// FIXME(sparse_fp): restore assertions
1316+
// #[inline]
1317+
// fn debug_assert_not_in_new_nodes(
1318+
// &self,
1319+
// prev_graph: &SerializedDepGraph,
1320+
// prev_index: SerializedDepNodeIndex,
1321+
// ) {
1322+
// let node = &prev_graph.index_to_node(prev_index);
1323+
// debug_assert!(
1324+
// !self.interned_node_to_index.lock_shard_by_value(node).contains_key(node),
1325+
// "node from previous graph present in new node collection"
1326+
// );
1327+
// }
13111328
}
13121329

13131330
#[derive(Debug, Clone, Copy)]

0 commit comments

Comments
 (0)