Skip to content

[WIP] traits/select: use global vs per-infcx caches more uniformly. #69294

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 3 commits into from
Closed
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
130 changes: 59 additions & 71 deletions src/librustc_infer/traits/select.rs
Original file line number Diff line number Diff line change
Expand Up @@ -835,18 +835,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
trait_ref: ty::PolyTraitRef<'tcx>,
) -> Option<EvaluationResult> {
let tcx = self.tcx();
if self.can_use_global_caches(param_env) {
let cache = tcx.evaluation_cache.hashmap.borrow();
if let Some(cached) = cache.get(&param_env.and(trait_ref)) {
return Some(cached.get(tcx));
}
}
self.infcx
.evaluation_cache
.hashmap
.borrow()
.get(&param_env.and(trait_ref))
.map(|v| v.get(tcx))
let cache = if self.can_use_global_caches(param_env) && !trait_ref.has_local_value() {
&tcx.evaluation_cache
} else {
&self.infcx.evaluation_cache
};

cache.hashmap.borrow().get(&param_env.and(trait_ref)).map(|v| v.get(tcx))
}

fn insert_evaluation_cache(
Expand All @@ -862,28 +857,22 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
return;
}

if self.can_use_global_caches(param_env) {
if !trait_ref.has_local_value() {
debug!(
"insert_evaluation_cache(trait_ref={:?}, candidate={:?}) global",
trait_ref, result,
);
// This may overwrite the cache with the same value
// FIXME: Due to #50507 this overwrites the different values
// This should be changed to use HashMapExt::insert_same
// when that is fixed
self.tcx()
.evaluation_cache
.hashmap
.borrow_mut()
.insert(param_env.and(trait_ref), WithDepNode::new(dep_node, result));
return;
}
}
let cache = if self.can_use_global_caches(param_env) && !trait_ref.has_local_value() {
debug!(
"insert_evaluation_cache(trait_ref={:?}, candidate={:?}) global",
trait_ref, result,
);
// This may overwrite the cache with the same value
// FIXME: Due to #50507 this overwrites the different values
// This should be changed to use HashMapExt::insert_same
// when that is fixed
&self.tcx().evaluation_cache
} else {
debug!("insert_evaluation_cache(trait_ref={:?}, candidate={:?})", trait_ref, result,);
&self.infcx.evaluation_cache
};

debug!("insert_evaluation_cache(trait_ref={:?}, candidate={:?})", trait_ref, result,);
self.infcx
.evaluation_cache
cache
.hashmap
.borrow_mut()
.insert(param_env.and(trait_ref), WithDepNode::new(dep_node, result));
Expand Down Expand Up @@ -982,6 +971,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
cache_fresh_trait_pred,
dep_node,
candidate.clone(),
stack.obligation.cause.span,
);
candidate
}
Expand Down Expand Up @@ -1250,18 +1240,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
) -> Option<SelectionResult<'tcx, SelectionCandidate<'tcx>>> {
let tcx = self.tcx();
let trait_ref = &cache_fresh_trait_pred.skip_binder().trait_ref;
if self.can_use_global_caches(param_env) {
let cache = tcx.selection_cache.hashmap.borrow();
if let Some(cached) = cache.get(&param_env.and(*trait_ref)) {
return Some(cached.get(tcx));
}
}
self.infcx
.selection_cache
.hashmap
.borrow()
.get(&param_env.and(*trait_ref))
.map(|v| v.get(tcx))
let cache = if self.can_use_global_caches(param_env) && !trait_ref.has_local_value() {
&tcx.selection_cache
} else {
&self.infcx.selection_cache
};

cache.hashmap.borrow().get(&param_env.and(*trait_ref)).map(|v| v.get(tcx))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

to be clear, this commit is just a "logical cleanup", right?

}

/// Determines whether can we safely cache the result
Expand All @@ -1285,9 +1270,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
result: &SelectionResult<'tcx, SelectionCandidate<'tcx>>,
) -> bool {
match result {
Ok(Some(SelectionCandidate::ParamCandidate(trait_ref))) => {
!trait_ref.skip_binder().input_types().any(|t| t.walk().any(|t_| t_.is_ty_infer()))
}
Ok(Some(SelectionCandidate::ParamCandidate(trait_ref))) => !trait_ref.needs_infer(),
Comment on lines -1288 to +1280
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is a fix for #55258 (comment), I should make a separate commit but I didn't want to lose track of this.

_ => true,
}
}
Expand All @@ -1298,6 +1281,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
cache_fresh_trait_pred: ty::PolyTraitPredicate<'tcx>,
dep_node: DepNodeIndex,
candidate: SelectionResult<'tcx, SelectionCandidate<'tcx>>,
span: rustc_span::Span,
) {
let tcx = self.tcx();
let trait_ref = cache_fresh_trait_pred.skip_binder().trait_ref;
Expand All @@ -1311,31 +1295,35 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
return;
}

if self.can_use_global_caches(param_env) {
if let Err(Overflow) = candidate {
// Don't cache overflow globally; we only produce this in certain modes.
} else if !trait_ref.has_local_value() {
if !candidate.has_local_value() {
debug!(
"insert_candidate_cache(trait_ref={:?}, candidate={:?}) global",
trait_ref, candidate,
);
// This may overwrite the cache with the same value.
tcx.selection_cache
.hashmap
.borrow_mut()
.insert(param_env.and(trait_ref), WithDepNode::new(dep_node, candidate));
return;
}
}
// HACK(eddyb) never cache overflow (this check used to be global-only).
if let Err(Overflow) = candidate {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

with the exception of this tiny logic change, I guess

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I can't really think why we would want to cache overflow locally anyhow, that seems like a bug...

Copy link
Member Author

@eddyb eddyb Mar 2, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Pfft, that's nothing, here's something more fun: I don't think we ever use the local caches except for the intercrate mode, because we're doing freshening all the time.

So we could put intercrate in Reveal or w/e then replace freshening with canonicalization, and have only global caching (either that or I'm seriously misunderstanding the implementation).

return;
}

debug!(
"insert_candidate_cache(trait_ref={:?}, candidate={:?}) local",
trait_ref, candidate,
);
self.infcx
.selection_cache
let cache = if self.can_use_global_caches(param_env) && !trait_ref.has_local_value() {
if candidate.has_local_value() {
span_bug!(
span,
"selecting inference-free `{}` resulted in `{:?}`?!",
trait_ref,
candidate,
);
}
debug!(
"insert_candidate_cache(trait_ref={:?}, candidate={:?}) global",
trait_ref, candidate,
);
// This may overwrite the cache with the same value.
&tcx.selection_cache
} else {
debug!(
"insert_candidate_cache(trait_ref={:?}, candidate={:?}) local",
trait_ref, candidate,
);
&self.infcx.selection_cache
};

cache
.hashmap
.borrow_mut()
.insert(param_env.and(trait_ref), WithDepNode::new(dep_node, candidate));
Expand Down