Skip to content

Commit ecbcf1b

Browse files
committed
address comments from mw
1 parent 02a4703 commit ecbcf1b

File tree

2 files changed

+29
-5
lines changed

2 files changed

+29
-5
lines changed

src/librustc_incremental/persist/load.rs

+22-3
Original file line numberDiff line numberDiff line change
@@ -113,10 +113,28 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
113113
// TODO -- this could be more efficient if we integrated the `DefIdDirectory` and
114114
// pred set more deeply
115115

116-
// Compute the set of Hir nodes whose data has changed or which have been removed.
116+
// Compute the set of Hir nodes whose data has changed or which
117+
// have been removed. These are "raw" source nodes, which means
118+
// that they still use the original `DefPathIndex` values from the
119+
// encoding, rather than having been retraced to a `DefId`. The
120+
// reason for this is that this way we can include nodes that have
121+
// been removed (which no longer have a `DefId` in the current
122+
// compilation).
117123
let dirty_raw_source_nodes = dirty_nodes(tcx, &serialized_dep_graph.hashes, &retraced);
118124

119-
// Create a (maybe smaller) list of
125+
// Create a list of (raw-source-node ->
126+
// retracted-target-node) edges. In the process of retracing the
127+
// target nodes, we may discover some of them def-paths no longer exist,
128+
// in which case there is no need to mark the corresopnding nodes as dirty
129+
// (they are just not present). So this list may be smaller than the original.
130+
//
131+
// Note though that in the common case the target nodes are
132+
// `DepNode::WorkProduct` instances, and those don't have a
133+
// def-id, so they will never be considered to not exist. Instead,
134+
// we do a secondary hashing step (later, in trans) when we know
135+
// the set of symbols that go into a work-product: if any symbols
136+
// have been removed (or added) the hash will be different and
137+
// we'll ignore the work-product then.
120138
let retraced_edges: Vec<_> =
121139
serialized_dep_graph.edges.iter()
122140
.filter_map(|&(ref raw_source_node, ref raw_target_node)| {
@@ -125,7 +143,8 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
125143
})
126144
.collect();
127145

128-
// Compute which work-products have changed.
146+
// Compute which work-products have an input that has changed or
147+
// been removed. Put the dirty ones into a set.
129148
let mut dirty_target_nodes = FnvHashSet();
130149
for &(raw_source_node, ref target_node) in &retraced_edges {
131150
if dirty_raw_source_nodes.contains(raw_source_node) {

src/librustc_incremental/persist/save.rs

+7-2
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,12 @@ pub fn encode_dep_graph(preds: &Predecessors,
110110
let mut edges = vec![];
111111
for (&target, sources) in &preds.inputs {
112112
match *target {
113-
DepNode::MetaData(_) => continue, // see encode_metadata_hashes instead
113+
DepNode::MetaData(ref def_id) => {
114+
// Metadata *targets* are always local metadata nodes. We handle
115+
// those in `encode_metadata_hashes`, which comes later.
116+
assert!(def_id.is_local());
117+
continue;
118+
}
114119
_ => (),
115120
}
116121
let target = builder.map(target);
@@ -186,7 +191,7 @@ pub fn encode_metadata_hashes(tcx: TyCtxt,
186191
// Create a vector containing a pair of (source-id, hash).
187192
// The source-id is stored as a `DepNode<u64>`, where the u64
188193
// is the det. hash of the def-path. This is convenient
189-
// because we can sort this to get a table ordering across
194+
// because we can sort this to get a stable ordering across
190195
// compilations, even if the def-ids themselves have changed.
191196
let mut hashes: Vec<(DepNode<u64>, u64)> = sources.iter()
192197
.map(|dep_node| {

0 commit comments

Comments
 (0)