Skip to content

Commit 5cbbfe2

Browse files
committed
Auto merge of rust-lang#119440 - Nilstrieb:overflow-checks, r=<try>
[perf experiment] Enable overflow checks for not-std r? `@ghost`
2 parents e51e98d + 80ea420 commit 5cbbfe2

File tree

9 files changed

+39
-38
lines changed

9 files changed

+39
-38
lines changed

compiler/rustc_data_structures/src/sip128.rs

+16-16
Original file line numberDiff line numberDiff line change
@@ -102,20 +102,20 @@ unsafe fn copy_nonoverlapping_small(src: *const u8, dst: *mut u8, count: usize)
102102
return;
103103
}
104104

105-
let mut i = 0;
106-
if i + 3 < count {
105+
let mut i = 0_usize;
106+
if i.wrapping_add(3) < count {
107107
ptr::copy_nonoverlapping(src.add(i), dst.add(i), 4);
108-
i += 4;
108+
i = i.wrapping_add(4);
109109
}
110110

111111
if i + 1 < count {
112112
ptr::copy_nonoverlapping(src.add(i), dst.add(i), 2);
113-
i += 2
113+
i = i.wrapping_add(2);
114114
}
115115

116116
if i < count {
117117
*dst.add(i) = *src.add(i);
118-
i += 1;
118+
i = i.wrapping_add(1);
119119
}
120120

121121
debug_assert_eq!(i, count);
@@ -211,14 +211,14 @@ impl SipHasher128 {
211211
debug_assert!(nbuf < BUFFER_SIZE);
212212
debug_assert!(nbuf + LEN < BUFFER_WITH_SPILL_SIZE);
213213

214-
if nbuf + LEN < BUFFER_SIZE {
214+
if nbuf.wrapping_add(LEN) < BUFFER_SIZE {
215215
unsafe {
216216
// The memcpy call is optimized away because the size is known.
217217
let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf);
218218
ptr::copy_nonoverlapping(bytes.as_ptr(), dst, LEN);
219219
}
220220

221-
self.nbuf = nbuf + LEN;
221+
self.nbuf = nbuf.wrapping_add(LEN);
222222

223223
return;
224224
}
@@ -265,8 +265,8 @@ impl SipHasher128 {
265265
// This function should only be called when the write fills the buffer.
266266
// Therefore, when LEN == 1, the new `self.nbuf` must be zero.
267267
// LEN is statically known, so the branch is optimized away.
268-
self.nbuf = if LEN == 1 { 0 } else { nbuf + LEN - BUFFER_SIZE };
269-
self.processed += BUFFER_SIZE;
268+
self.nbuf = if LEN == 1 { 0 } else { nbuf.wrapping_add(LEN).wrapping_sub(BUFFER_SIZE) };
269+
self.processed = self.processed.wrapping_add(BUFFER_SIZE);
270270
}
271271
}
272272

@@ -277,7 +277,7 @@ impl SipHasher128 {
277277
let nbuf = self.nbuf;
278278
debug_assert!(nbuf < BUFFER_SIZE);
279279

280-
if nbuf + length < BUFFER_SIZE {
280+
if nbuf.wrapping_add(length) < BUFFER_SIZE {
281281
unsafe {
282282
let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf);
283283

@@ -289,7 +289,7 @@ impl SipHasher128 {
289289
}
290290
}
291291

292-
self.nbuf = nbuf + length;
292+
self.nbuf = nbuf.wrapping_add(length);
293293

294294
return;
295295
}
@@ -327,7 +327,7 @@ impl SipHasher128 {
327327
// ELEM_SIZE` to show the compiler that this loop's upper bound is > 0.
328328
// We know that is true, because last step ensured we have a full
329329
// element in the buffer.
330-
let last = nbuf / ELEM_SIZE + 1;
330+
let last = (nbuf / ELEM_SIZE).wrapping_add(1);
331331

332332
for i in 0..last {
333333
let elem = self.buf.get_unchecked(i).assume_init().to_le();
@@ -338,7 +338,7 @@ impl SipHasher128 {
338338

339339
// Process the remaining element-sized chunks of input.
340340
let mut processed = needed_in_elem;
341-
let input_left = length - processed;
341+
let input_left = length.wrapping_sub(processed);
342342
let elems_left = input_left / ELEM_SIZE;
343343
let extra_bytes_left = input_left % ELEM_SIZE;
344344

@@ -347,7 +347,7 @@ impl SipHasher128 {
347347
self.state.v3 ^= elem;
348348
Sip13Rounds::c_rounds(&mut self.state);
349349
self.state.v0 ^= elem;
350-
processed += ELEM_SIZE;
350+
processed = processed.wrapping_add(ELEM_SIZE);
351351
}
352352

353353
// Copy remaining input into start of buffer.
@@ -356,7 +356,7 @@ impl SipHasher128 {
356356
copy_nonoverlapping_small(src, dst, extra_bytes_left);
357357

358358
self.nbuf = extra_bytes_left;
359-
self.processed += nbuf + processed;
359+
self.processed = self.processed.wrapping_add(nbuf).wrapping_add(processed);
360360
}
361361
}
362362

@@ -394,7 +394,7 @@ impl SipHasher128 {
394394
};
395395

396396
// Finalize the hash.
397-
let length = self.processed + self.nbuf;
397+
let length = self.processed.wrapping_add(self.nbuf);
398398
let b: u64 = ((length as u64 & 0xff) << 56) | elem;
399399

400400
state.v3 ^= b;

compiler/rustc_middle/src/mir/interpret/mod.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -285,7 +285,7 @@ impl AllocDecodingState {
285285
let counter = DECODER_SESSION_ID.fetch_add(1, Ordering::SeqCst);
286286

287287
// Make sure this is never zero.
288-
let session_id = DecodingSessionId::new((counter & 0x7FFFFFFF) + 1).unwrap();
288+
let session_id = DecodingSessionId::new((counter & 0x7FFFFFFF).wrapping_add(1)).unwrap();
289289

290290
AllocDecodingSession { state: self, session_id }
291291
}

compiler/rustc_query_system/src/dep_graph/serialized.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -101,14 +101,14 @@ impl SerializedDepGraph {
101101
// edge list, or the end of the array if this is the last edge.
102102
let end = self
103103
.edge_list_indices
104-
.get(source + 1)
104+
.get(source.wrapping_add(1))
105105
.map(|h| h.start())
106-
.unwrap_or_else(|| self.edge_list_data.len() - DEP_NODE_PAD);
106+
.unwrap_or_else(|| self.edge_list_data.len().wrapping_sub(DEP_NODE_PAD));
107107

108108
// The number of edges for this node is implicitly stored in the combination of the byte
109109
// width and the length.
110110
let bytes_per_index = header.bytes_per_index();
111-
let len = (end - header.start()) / bytes_per_index;
111+
let len = (end.wrapping_sub(header.start())) / bytes_per_index;
112112

113113
// LLVM doesn't hoist EdgeHeader::mask so we do it ourselves.
114114
let mask = header.mask();

compiler/rustc_serialize/src/leb128.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -24,15 +24,15 @@ macro_rules! impl_write_unsigned_leb128 {
2424
*out.get_unchecked_mut(i) = value as u8;
2525
}
2626

27-
i += 1;
27+
i = i.wrapping_add(1);
2828
break;
2929
} else {
3030
unsafe {
3131
*out.get_unchecked_mut(i) = ((value & 0x7f) | 0x80) as u8;
3232
}
3333

3434
value >>= 7;
35-
i += 1;
35+
i = i.wrapping_add(1);
3636
}
3737
}
3838

@@ -60,7 +60,7 @@ macro_rules! impl_read_unsigned_leb128 {
6060
return byte as $int_ty;
6161
}
6262
let mut result = (byte & 0x7F) as $int_ty;
63-
let mut shift = 7;
63+
let mut shift = 7_usize;
6464
loop {
6565
let byte = decoder.read_u8();
6666
if (byte & 0x80) == 0 {
@@ -69,7 +69,7 @@ macro_rules! impl_read_unsigned_leb128 {
6969
} else {
7070
result |= ((byte & 0x7F) as $int_ty) << shift;
7171
}
72-
shift += 7;
72+
shift = shift.wrapping_add(7);
7373
}
7474
}
7575
};

compiler/rustc_serialize/src/opaque.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ impl FileEncoder {
6565
// Tracking position this way instead of having a `self.position` field
6666
// means that we only need to update `self.buffered` on a write call,
6767
// as opposed to updating `self.position` and `self.buffered`.
68-
self.flushed + self.buffered
68+
self.flushed.wrapping_add(self.buffered)
6969
}
7070

7171
#[cold]
@@ -119,7 +119,7 @@ impl FileEncoder {
119119
}
120120
if let Some(dest) = self.buffer_empty().get_mut(..buf.len()) {
121121
dest.copy_from_slice(buf);
122-
self.buffered += buf.len();
122+
self.buffered = self.buffered.wrapping_add(buf.len());
123123
} else {
124124
self.write_all_cold_path(buf);
125125
}
@@ -158,7 +158,7 @@ impl FileEncoder {
158158
if written > N {
159159
Self::panic_invalid_write::<N>(written);
160160
}
161-
self.buffered += written;
161+
self.buffered = self.buffered.wrapping_add(written);
162162
}
163163

164164
#[cold]

compiler/rustc_span/src/caching_source_map_view.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ impl CacheEntry {
4040
let pos = self.file.relative_position(pos);
4141
let line_index = self.file.lookup_line(pos).unwrap();
4242
let line_bounds = self.file.line_bounds(line_index);
43-
self.line_number = line_index + 1;
43+
self.line_number = line_index.wrapping_add(1);
4444
self.line = line_bounds;
4545
self.touch(time_stamp);
4646
}
@@ -81,15 +81,15 @@ impl<'sm> CachingSourceMapView<'sm> {
8181
&mut self,
8282
pos: BytePos,
8383
) -> Option<(Lrc<SourceFile>, usize, RelativeBytePos)> {
84-
self.time_stamp += 1;
84+
self.time_stamp = self.time_stamp.wrapping_add(1);
8585

8686
// Check if the position is in one of the cached lines
8787
let cache_idx = self.cache_entry_index(pos);
8888
if cache_idx != -1 {
8989
let cache_entry = &mut self.line_cache[cache_idx as usize];
9090
cache_entry.touch(self.time_stamp);
9191

92-
let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32());
92+
let col = RelativeBytePos(pos.to_u32().wrapping_sub(cache_entry.line.start.to_u32()));
9393
return Some((cache_entry.file.clone(), cache_entry.line_number, col));
9494
}
9595

compiler/rustc_span/src/lib.rs

+6-5
Original file line numberDiff line numberDiff line change
@@ -1746,7 +1746,7 @@ impl SourceFile {
17461746

17471747
#[inline]
17481748
pub fn relative_position(&self, pos: BytePos) -> RelativeBytePos {
1749-
RelativeBytePos::from_u32(pos.to_u32() - self.start_pos.to_u32())
1749+
RelativeBytePos::from_u32(pos.to_u32().wrapping_sub(self.start_pos.to_u32()))
17501750
}
17511751

17521752
#[inline]
@@ -1769,10 +1769,11 @@ impl SourceFile {
17691769

17701770
let lines = self.lines();
17711771
assert!(line_index < lines.len());
1772-
if line_index == (lines.len() - 1) {
1772+
if line_index == (lines.len().wrapping_sub(1)) {
17731773
self.absolute_position(lines[line_index])..self.end_position()
17741774
} else {
1775-
self.absolute_position(lines[line_index])..self.absolute_position(lines[line_index + 1])
1775+
self.absolute_position(lines[line_index])
1776+
..self.absolute_position(lines[line_index.wrapping_add(1)])
17761777
}
17771778
}
17781779

@@ -2039,7 +2040,7 @@ macro_rules! impl_pos {
20392040

20402041
#[inline(always)]
20412042
fn add(self, rhs: $ident) -> $ident {
2042-
$ident(self.0 + rhs.0)
2043+
$ident(self.0.wrapping_add(rhs.0))
20432044
}
20442045
}
20452046

@@ -2048,7 +2049,7 @@ macro_rules! impl_pos {
20482049

20492050
#[inline(always)]
20502051
fn sub(self, rhs: $ident) -> $ident {
2051-
$ident(self.0 - rhs.0)
2052+
$ident(self.0.wrapping_sub(rhs.0))
20522053
}
20532054
}
20542055
)*

compiler/rustc_span/src/span_encoding.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ impl Span {
166166
debug_assert!(len <= MAX_LEN);
167167
SpanData {
168168
lo: BytePos(self.lo_or_index),
169-
hi: BytePos(self.lo_or_index + len),
169+
hi: BytePos(self.lo_or_index.wrapping_add(len)),
170170
ctxt: SyntaxContext::from_u32(self.ctxt_or_parent_or_marker as u32),
171171
parent: None,
172172
}
@@ -179,7 +179,7 @@ impl Span {
179179
};
180180
SpanData {
181181
lo: BytePos(self.lo_or_index),
182-
hi: BytePos(self.lo_or_index + len),
182+
hi: BytePos(self.lo_or_index.wrapping_add(len)),
183183
ctxt: SyntaxContext::root(),
184184
parent: Some(parent),
185185
}

src/bootstrap/src/core/builder.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1749,7 +1749,7 @@ impl<'a> Builder<'a> {
17491749
if mode == Mode::Std {
17501750
self.config.rust_overflow_checks_std.to_string()
17511751
} else {
1752-
self.config.rust_overflow_checks.to_string()
1752+
"true".into()
17531753
},
17541754
);
17551755

0 commit comments

Comments
 (0)