Skip to content

Commit 8f1443a

Browse files
committed
Auto merge of #2856 - RalfJung:rustup, r=RalfJung
Rustup
2 parents e46eb2e + 331154b commit 8f1443a

File tree

8 files changed

+289
-22
lines changed

8 files changed

+289
-22
lines changed

rust-version

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
f65615f02d22b85e9205f2716ab36182d34bab2b
1+
70540d51275086ce1a4cb12e9d96a97134df792e

src/concurrency/data_race.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ use std::{
4848

4949
use rustc_ast::Mutability;
5050
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
51-
use rustc_index::vec::{Idx, IndexVec};
51+
use rustc_index::{Idx, IndexVec};
5252
use rustc_middle::mir;
5353
use rustc_span::Span;
5454
use rustc_target::abi::{Align, Size};

src/concurrency/init_once.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
use std::collections::VecDeque;
22
use std::num::NonZeroU32;
33

4-
use rustc_index::vec::Idx;
4+
use rustc_index::Idx;
55

66
use super::sync::EvalContextExtPriv as _;
77
use super::thread::MachineCallback;

src/concurrency/sync.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ use std::ops::Not;
55
use log::trace;
66

77
use rustc_data_structures::fx::FxHashMap;
8-
use rustc_index::vec::{Idx, IndexVec};
8+
use rustc_index::{Idx, IndexVec};
99

1010
use super::init_once::InitOnce;
1111
use super::vector_clock::VClock;

src/concurrency/thread.rs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ use log::trace;
1010

1111
use rustc_data_structures::fx::FxHashMap;
1212
use rustc_hir::def_id::DefId;
13-
use rustc_index::vec::{Idx, IndexVec};
13+
use rustc_index::{Idx, IndexVec};
1414
use rustc_middle::mir::Mutability;
1515
use rustc_middle::ty::layout::TyAndLayout;
1616
use rustc_span::Span;
@@ -603,10 +603,11 @@ impl<'mir, 'tcx: 'mir> ThreadManager<'mir, 'tcx> {
603603
// this allows us to have a deterministic scheduler.
604604
for thread in self.threads.indices() {
605605
match self.timeout_callbacks.entry(thread) {
606-
Entry::Occupied(entry) =>
606+
Entry::Occupied(entry) => {
607607
if entry.get().call_time.get_wait_time(clock) == Duration::new(0, 0) {
608608
return Some((thread, entry.remove().callback));
609-
},
609+
}
610+
}
610611
Entry::Vacant(_) => {}
611612
}
612613
}

src/concurrency/vector_clock.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use rustc_index::vec::Idx;
1+
use rustc_index::Idx;
22
use rustc_span::{Span, SpanData, DUMMY_SP};
33
use smallvec::SmallVec;
44
use std::{

tests/pass/tree-borrows/read-only-from-mut.rs

Lines changed: 0 additions & 14 deletions
This file was deleted.
Lines changed: 280 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,280 @@
1+
//@compile-flags: -Zmiri-tree-borrows
2+
#![feature(allocator_api)]
3+
4+
use std::mem;
5+
use std::ptr;
6+
7+
fn main() {
8+
aliasing_read_only_mutable_refs();
9+
string_as_mut_ptr();
10+
11+
// Stacked Borrows tests
12+
read_does_not_invalidate1();
13+
read_does_not_invalidate2();
14+
mut_raw_then_mut_shr();
15+
mut_shr_then_mut_raw();
16+
mut_raw_mut();
17+
partially_invalidate_mut();
18+
drop_after_sharing();
19+
direct_mut_to_const_raw();
20+
two_raw();
21+
shr_and_raw();
22+
disjoint_mutable_subborrows();
23+
raw_ref_to_part();
24+
array_casts();
25+
mut_below_shr();
26+
wide_raw_ptr_in_tuple();
27+
not_unpin_not_protected();
28+
}
29+
30+
// Tree Borrows has no issue with several mutable references existing
31+
// at the same time, as long as they are used only immutably.
32+
// I.e. multiple Reserved can coexist.
33+
pub fn aliasing_read_only_mutable_refs() {
34+
unsafe {
35+
let base = &mut 42u64;
36+
let r1 = &mut *(base as *mut u64);
37+
let r2 = &mut *(base as *mut u64);
38+
let _l = *r1;
39+
let _l = *r2;
40+
}
41+
}
42+
43+
pub fn string_as_mut_ptr() {
44+
// This errors in Stacked Borrows since as_mut_ptr restricts the provenance,
45+
// but with Tree Borrows it should work.
46+
unsafe {
47+
let mut s = String::from("hello");
48+
s.reserve(1); // make the `str` that `s` derefs to not cover the entire `s`.
49+
50+
// Prevent automatically dropping the String's data
51+
let mut s = mem::ManuallyDrop::new(s);
52+
53+
let ptr = s.as_mut_ptr();
54+
let len = s.len();
55+
let capacity = s.capacity();
56+
57+
let s = String::from_raw_parts(ptr, len, capacity);
58+
59+
assert_eq!(String::from("hello"), s);
60+
}
61+
}
62+
63+
// ----- The tests below were taken from Stacked Borrows ----
64+
65+
// Make sure that reading from an `&mut` does, like reborrowing to `&`,
66+
// NOT invalidate other reborrows.
67+
fn read_does_not_invalidate1() {
68+
fn foo(x: &mut (i32, i32)) -> &i32 {
69+
let xraw = x as *mut (i32, i32);
70+
let ret = unsafe { &(*xraw).1 };
71+
let _val = x.1; // we just read, this does NOT invalidate the reborrows.
72+
ret
73+
}
74+
assert_eq!(*foo(&mut (1, 2)), 2);
75+
}
76+
// Same as above, but this time we first create a raw, then read from `&mut`
77+
// and then freeze from the raw.
78+
fn read_does_not_invalidate2() {
79+
fn foo(x: &mut (i32, i32)) -> &i32 {
80+
let xraw = x as *mut (i32, i32);
81+
let _val = x.1; // we just read, this does NOT invalidate the raw reborrow.
82+
let ret = unsafe { &(*xraw).1 };
83+
ret
84+
}
85+
assert_eq!(*foo(&mut (1, 2)), 2);
86+
}
87+
88+
// Escape a mut to raw, then share the same mut and use the share, then the raw.
89+
// That should work.
90+
fn mut_raw_then_mut_shr() {
91+
let mut x = 2;
92+
let xref = &mut x;
93+
let xraw = &mut *xref as *mut _;
94+
let xshr = &*xref;
95+
assert_eq!(*xshr, 2);
96+
unsafe {
97+
*xraw = 4;
98+
}
99+
assert_eq!(x, 4);
100+
}
101+
102+
// Create first a shared reference and then a raw pointer from a `&mut`
103+
// should permit mutation through that raw pointer.
104+
fn mut_shr_then_mut_raw() {
105+
let xref = &mut 2;
106+
let _xshr = &*xref;
107+
let xraw = xref as *mut _;
108+
unsafe {
109+
*xraw = 3;
110+
}
111+
assert_eq!(*xref, 3);
112+
}
113+
114+
// Ensure that if we derive from a mut a raw, and then from that a mut,
115+
// and then read through the original mut, that does not invalidate the raw.
116+
// This shows that the read-exception for `&mut` applies even if the `Shr` item
117+
// on the stack is not at the top.
118+
fn mut_raw_mut() {
119+
let mut x = 2;
120+
{
121+
let xref1 = &mut x;
122+
let xraw = xref1 as *mut _;
123+
let _xref2 = unsafe { &mut *xraw };
124+
let _val = *xref1;
125+
unsafe {
126+
*xraw = 4;
127+
}
128+
// we can now use both xraw and xref1, for reading
129+
assert_eq!(*xref1, 4);
130+
assert_eq!(unsafe { *xraw }, 4);
131+
assert_eq!(*xref1, 4);
132+
assert_eq!(unsafe { *xraw }, 4);
133+
// we cannot use xref2; see `compile-fail/stacked-borrows/illegal_read4.rs`
134+
}
135+
assert_eq!(x, 4);
136+
}
137+
138+
fn partially_invalidate_mut() {
139+
let data = &mut (0u8, 0u8);
140+
let reborrow = &mut *data as *mut (u8, u8);
141+
let shard = unsafe { &mut (*reborrow).0 };
142+
data.1 += 1; // the deref overlaps with `shard`, but that is ok; the access does not overlap.
143+
*shard += 1; // so we can still use `shard`.
144+
assert_eq!(*data, (1, 1));
145+
}
146+
147+
// Make sure that we can handle the situation where a location is frozen when being dropped.
148+
fn drop_after_sharing() {
149+
let x = String::from("hello!");
150+
let _len = x.len();
151+
}
152+
153+
// Make sure that coercing &mut T to *const T produces a writeable pointer.
154+
fn direct_mut_to_const_raw() {
155+
// TODO: This is currently disabled, waiting on a decision on <https://github.com/rust-lang/rust/issues/56604>
156+
/*let x = &mut 0;
157+
let y: *const i32 = x;
158+
unsafe { *(y as *mut i32) = 1; }
159+
assert_eq!(*x, 1);
160+
*/
161+
}
162+
163+
// Make sure that we can create two raw pointers from a mutable reference and use them both.
164+
fn two_raw() {
165+
unsafe {
166+
let x = &mut 0;
167+
let y1 = x as *mut _;
168+
let y2 = x as *mut _;
169+
*y1 += 2;
170+
*y2 += 1;
171+
}
172+
}
173+
174+
// Make sure that creating a *mut does not invalidate existing shared references.
175+
fn shr_and_raw() {
176+
unsafe {
177+
let x = &mut 0;
178+
let y1: &i32 = mem::transmute(&*x); // launder lifetimes
179+
let y2 = x as *mut _;
180+
let _val = *y1;
181+
*y2 += 1;
182+
}
183+
}
184+
185+
fn disjoint_mutable_subborrows() {
186+
struct Foo {
187+
a: String,
188+
b: Vec<u32>,
189+
}
190+
191+
unsafe fn borrow_field_a<'a>(this: *mut Foo) -> &'a mut String {
192+
&mut (*this).a
193+
}
194+
195+
unsafe fn borrow_field_b<'a>(this: *mut Foo) -> &'a mut Vec<u32> {
196+
&mut (*this).b
197+
}
198+
199+
let mut foo = Foo { a: "hello".into(), b: vec![0, 1, 2] };
200+
201+
let ptr = &mut foo as *mut Foo;
202+
203+
let a = unsafe { borrow_field_a(ptr) };
204+
let b = unsafe { borrow_field_b(ptr) };
205+
b.push(4);
206+
a.push_str(" world");
207+
assert_eq!(format!("{:?} {:?}", a, b), r#""hello world" [0, 1, 2, 4]"#);
208+
}
209+
210+
fn raw_ref_to_part() {
211+
struct Part {
212+
_lame: i32,
213+
}
214+
215+
#[repr(C)]
216+
struct Whole {
217+
part: Part,
218+
extra: i32,
219+
}
220+
221+
let it = Box::new(Whole { part: Part { _lame: 0 }, extra: 42 });
222+
let whole = ptr::addr_of_mut!(*Box::leak(it));
223+
let part = unsafe { ptr::addr_of_mut!((*whole).part) };
224+
let typed = unsafe { &mut *(part as *mut Whole) };
225+
assert!(typed.extra == 42);
226+
drop(unsafe { Box::from_raw(whole) });
227+
}
228+
229+
/// When casting an array reference to a raw element ptr, that should cover the whole array.
230+
fn array_casts() {
231+
let mut x: [usize; 2] = [0, 0];
232+
let p = &mut x as *mut usize;
233+
unsafe {
234+
*p.add(1) = 1;
235+
}
236+
237+
let x: [usize; 2] = [0, 1];
238+
let p = &x as *const usize;
239+
assert_eq!(unsafe { *p.add(1) }, 1);
240+
}
241+
242+
/// Transmuting &&i32 to &&mut i32 is fine.
243+
fn mut_below_shr() {
244+
let x = 0;
245+
let y = &x;
246+
let p = unsafe { core::mem::transmute::<&&i32, &&mut i32>(&y) };
247+
let r = &**p;
248+
let _val = *r;
249+
}
250+
251+
fn wide_raw_ptr_in_tuple() {
252+
let mut x: Box<dyn std::any::Any> = Box::new("ouch");
253+
let r = &mut *x as *mut dyn std::any::Any;
254+
// This triggers the visitor-based recursive retagging. It is *not* supposed to retag raw
255+
// pointers, but then the visitor might recurse into the "fields" of a wide raw pointer and
256+
// finds a reference (to a vtable) there that it wants to retag... and that would be Wrong.
257+
let pair = (r, &0);
258+
let r = unsafe { &mut *pair.0 };
259+
// Make sure the fn ptr part of the vtable is still fine.
260+
r.type_id();
261+
}
262+
263+
fn not_unpin_not_protected() {
264+
// `&mut !Unpin`, at least for now, does not get `noalias` nor `dereferenceable`, so we also
265+
// don't add protectors. (We could, but until we have a better idea for where we want to go with
266+
// the self-referential-generator situation, it does not seem worth the potential trouble.)
267+
use std::marker::PhantomPinned;
268+
269+
pub struct NotUnpin(i32, PhantomPinned);
270+
271+
fn inner(x: &mut NotUnpin, f: fn(&mut NotUnpin)) {
272+
// `f` may mutate, but it may not deallocate!
273+
f(x)
274+
}
275+
276+
inner(Box::leak(Box::new(NotUnpin(0, PhantomPinned))), |x| {
277+
let raw = x as *mut _;
278+
drop(unsafe { Box::from_raw(raw) });
279+
});
280+
}

0 commit comments

Comments
 (0)