Skip to content

Commit 27de852

Browse files
committed
interning: decide about mutability purely based on the kind of interning, not the types we see
1 parent 481d45a commit 27de852

File tree

1 file changed

+67
-54
lines changed
  • compiler/rustc_const_eval/src/interpret

1 file changed

+67
-54
lines changed

compiler/rustc_const_eval/src/interpret/intern.rs

+67-54
Original file line numberDiff line numberDiff line change
@@ -5,21 +5,20 @@
55
//!
66
//! In principle, this is not very complicated: we recursively walk the final value, follow all the
77
//! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
8-
//! is picking the right mutability for the allocations in a `static` initializer: we want to make
9-
//! as many allocations as possible immutable so LLVM can put them into read-only memory. At the
10-
//! same time, we need to make memory that could be mutated by the program mutable to avoid
11-
//! incorrect compilations. To achieve this, we do a type-based traversal of the final value,
12-
//! tracking mutable and shared references and `UnsafeCell` to determine the current mutability.
13-
//! (In principle, we could skip this type-based part for `const` and promoteds, as they need to be
14-
//! always immutable. At least for `const` however we use this opportunity to reject any `const`
15-
//! that contains allocations whose mutability we cannot identify.)
8+
//! is picking the right mutability: the outermost allocation generally has a clear mutability, but
9+
//! what about the other allocations it points to that have also been created with this value?
10+
//! Here the rules are: `static`, `const`, and promoted can only create immutable allocations
11+
//! that way. `static mut` can be initialized with expressions like `&mut 42`, so all inner
12+
//! allocations are marked mutable. Some of them could potentially be made immutable, but that
13+
//! would require relying on type information, and given how many ways Rust has to lie about
14+
//! type information, we want to avoid doing that.
1615
1716
use super::validity::RefTracking;
1817
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
1918
use rustc_errors::ErrorGuaranteed;
2019
use rustc_hir as hir;
2120
use rustc_middle::mir::interpret::InterpResult;
22-
use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
21+
use rustc_middle::ty::{self, layout::TyAndLayout};
2322

2423
use rustc_ast::Mutability;
2524

@@ -56,6 +55,8 @@ struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_ev
5655
/// This field stores whether we are *currently* inside an `UnsafeCell`. This can affect
5756
/// the intern mode of references we encounter.
5857
inside_unsafe_cell: bool,
58+
/// The mutability with which to intern the pointers we find.
59+
intern_mutability: Mutability,
5960
}
6061

6162
#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
@@ -82,10 +83,9 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
8283
ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
8384
leftover_allocations: &'rt mut FxIndexSet<AllocId>,
8485
alloc_id: AllocId,
85-
mode: InternMode,
86-
ty: Option<Ty<'tcx>>,
86+
mutability: Mutability,
8787
) -> Option<IsStaticOrFn> {
88-
trace!("intern_shallow {:?} with {:?}", alloc_id, mode);
88+
trace!("intern_shallow {:?}", alloc_id);
8989
// remove allocation
9090
let tcx = ecx.tcx;
9191
let Some((kind, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) else {
@@ -112,28 +112,15 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
112112
// Set allocation mutability as appropriate. This is used by LLVM to put things into
113113
// read-only memory, and also by Miri when evaluating other globals that
114114
// access this one.
115-
if let InternMode::Static(mutability) = mode {
116-
// For this, we need to take into account `UnsafeCell`. When `ty` is `None`, we assume
117-
// no interior mutability.
118-
let frozen = ty.map_or(true, |ty| ty.is_freeze(*ecx.tcx, ecx.param_env));
119-
// For statics, allocation mutability is the combination of place mutability and
120-
// type mutability.
121-
// The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere.
122-
let immutable = mutability == Mutability::Not && frozen;
123-
if immutable {
115+
match mutability {
116+
Mutability::Not => {
124117
alloc.mutability = Mutability::Not;
125-
} else {
126-
// Just making sure we are not "upgrading" an immutable allocation to mutable.
118+
}
119+
Mutability::Mut => {
120+
// This must be already mutable, we won't "un-freeze" allocations ever.
127121
assert_eq!(alloc.mutability, Mutability::Mut);
128122
}
129-
} else {
130-
// No matter what, *constants are never mutable*. Mutating them is UB.
131-
// See const_eval::machine::MemoryExtra::can_access_statics for why
132-
// immutability is so important.
133-
134-
// Validation will ensure that there is no `UnsafeCell` on an immutable allocation.
135-
alloc.mutability = Mutability::Not;
136-
};
123+
}
137124
// link the alloc id to the actual allocation
138125
leftover_allocations.extend(alloc.provenance().ptrs().iter().map(|&(_, alloc_id)| alloc_id));
139126
let alloc = tcx.mk_const_alloc(alloc);
@@ -144,13 +131,8 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
144131
impl<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>
145132
InternVisitor<'rt, 'mir, 'tcx, M>
146133
{
147-
fn intern_shallow(
148-
&mut self,
149-
alloc_id: AllocId,
150-
mode: InternMode,
151-
ty: Option<Ty<'tcx>>,
152-
) -> Option<IsStaticOrFn> {
153-
intern_shallow(self.ecx, self.leftover_allocations, alloc_id, mode, ty)
134+
fn intern_shallow(&mut self, alloc_id: AllocId) -> Option<IsStaticOrFn> {
135+
intern_shallow(self.ecx, self.leftover_allocations, alloc_id, self.intern_mutability)
154136
}
155137
}
156138

@@ -181,7 +163,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
181163
if let Some(alloc_id) = ptr.provenance {
182164
// Explicitly choose const mode here, since vtables are immutable, even
183165
// if the reference of the fat pointer is mutable.
184-
self.intern_shallow(alloc_id, InternMode::Const, None);
166+
self.intern_shallow(alloc_id);
185167
} else {
186168
// Validation will error (with a better message) on an invalid vtable pointer.
187169
// Let validation show the error message, but make sure it *does* error.
@@ -234,7 +216,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
234216
InternMode::Const
235217
}
236218
};
237-
match self.intern_shallow(alloc_id, ref_mode, Some(referenced_ty)) {
219+
match self.intern_shallow(alloc_id) {
238220
// No need to recurse, these are interned already and statics may have
239221
// cycles, so we don't want to recurse there
240222
Some(IsStaticOrFn) => {}
@@ -332,12 +314,36 @@ pub fn intern_const_alloc_recursive<
332314
intern_kind: InternKind,
333315
ret: &MPlaceTy<'tcx>,
334316
) -> Result<(), ErrorGuaranteed> {
335-
let tcx = ecx.tcx;
336317
let base_intern_mode = match intern_kind {
337318
InternKind::Static(mutbl) => InternMode::Static(mutbl),
338319
// `Constant` includes array lengths.
339320
InternKind::Constant | InternKind::Promoted => InternMode::Const,
340321
};
322+
// We are interning recursively, and for mutability we are distinguishing the "root" allocation
323+
// that we are starting in, and all other allocations that we are encountering recursively.
324+
let (base_mutability, inner_mutability) = match intern_kind {
325+
InternKind::Constant | InternKind::Promoted => {
326+
// Completely immutable.
327+
(Mutability::Not, Mutability::Not)
328+
}
329+
InternKind::Static(Mutability::Not) => {
330+
(
331+
// Outermost allocation is mutable if `!Freeze`.
332+
if ret.layout.ty.is_freeze(*ecx.tcx, ecx.param_env) {
333+
Mutability::Not
334+
} else {
335+
Mutability::Mut
336+
},
337+
// Inner allocations are never mutable.
338+
Mutability::Not,
339+
)
340+
}
341+
InternKind::Static(Mutability::Mut) => {
342+
// Just make everything mutable. We accept code like
343+
// `static mut X = &mut 42`, so even inner allocations need to be mutable.
344+
(Mutability::Mut, Mutability::Mut)
345+
}
346+
};
341347

342348
// Type based interning.
343349
// `ref_tracking` tracks typed references we have already interned and still need to crawl for
@@ -354,18 +360,22 @@ pub fn intern_const_alloc_recursive<
354360
// The outermost allocation must exist, because we allocated it with
355361
// `Memory::allocate`.
356362
ret.ptr().provenance.unwrap(),
357-
base_intern_mode,
358-
Some(ret.layout.ty),
363+
base_mutability,
359364
);
360365

361366
ref_tracking.track((ret.clone(), base_intern_mode), || ());
362367

368+
// We do a type-based traversal to find more allocations to intern. The interner is currently
369+
// mid-refactoring; eventually the type-based traversal will be replaced but a simple traversal
370+
// of all provenance we see in the allocations, but for now we avoid changing rustc error
371+
// messages or accepting extra programs by keeping the old type-based interner around.
363372
while let Some(((mplace, mode), _)) = ref_tracking.todo.pop() {
364373
let res = InternVisitor {
365374
ref_tracking: &mut ref_tracking,
366375
ecx,
367376
mode,
368377
leftover_allocations,
378+
intern_mutability: inner_mutability,
369379
inside_unsafe_cell: false,
370380
}
371381
.visit_value(&mplace);
@@ -394,9 +404,9 @@ pub fn intern_const_alloc_recursive<
394404
debug!("dead_alloc_map: {:#?}", ecx.memory.dead_alloc_map);
395405
while let Some(alloc_id) = todo.pop() {
396406
if let Some((_, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) {
397-
// We can't call the `intern_shallow` method here, as its logic is tailored to safe
398-
// references and a `leftover_allocations` set (where we only have a todo-list here).
399-
// So we hand-roll the interning logic here again.
407+
// This still needs to be interned. We keep the old logic around here to avoid changing
408+
// rustc behavior; eventually this should all be removed in favor of ignroing all types
409+
// and interning everything with a simple `intern_shallow` loop.
400410
match intern_kind {
401411
// Statics may point to mutable allocations.
402412
// Even for immutable statics it would be ok to have mutable allocations behind
@@ -410,11 +420,7 @@ pub fn intern_const_alloc_recursive<
410420
// mutability), mutating through them would be UB.
411421
// There's no way we can check whether the user is using raw pointers correctly,
412422
// so all we can do is mark this as immutable here.
413-
InternKind::Promoted => {
414-
// See const_eval::machine::MemoryExtra::can_access_statics for why
415-
// immutability is so important.
416-
alloc.mutability = Mutability::Not;
417-
}
423+
InternKind::Promoted => {}
418424
// If it's a constant, we should not have any "leftovers" as everything
419425
// is tracked by const-checking.
420426
// FIXME: downgrade this to a warning? It rejects some legitimate consts,
@@ -425,12 +431,19 @@ pub fn intern_const_alloc_recursive<
425431
// drop glue, such as the example above.
426432
InternKind::Constant => {
427433
ecx.tcx.sess.emit_err(UnsupportedUntypedPointer { span: ecx.tcx.span });
428-
// For better errors later, mark the allocation as immutable.
434+
}
435+
}
436+
match inner_mutability {
437+
Mutability::Not => {
429438
alloc.mutability = Mutability::Not;
430439
}
440+
Mutability::Mut => {
441+
// This must be already mutable, we won't "un-freeze" allocations ever.
442+
assert_eq!(alloc.mutability, Mutability::Mut);
443+
}
431444
}
432-
let alloc = tcx.mk_const_alloc(alloc);
433-
tcx.set_alloc_id_memory(alloc_id, alloc);
445+
let alloc = ecx.tcx.mk_const_alloc(alloc);
446+
ecx.tcx.set_alloc_id_memory(alloc_id, alloc);
434447
for &(_, alloc_id) in alloc.inner().provenance().ptrs().iter() {
435448
if leftover_allocations.insert(alloc_id) {
436449
todo.push(alloc_id);

0 commit comments

Comments
 (0)