Skip to content

Commit f0904ee

Browse files
committed
Also generate undef scalars and scalar pairs
1 parent 3ff1b64 commit f0904ee

File tree

3 files changed

+38
-25
lines changed

3 files changed

+38
-25
lines changed

compiler/rustc_codegen_ssa/src/mir/operand.rs

+37-22
Original file line numberDiff line numberDiff line change
@@ -204,14 +204,30 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
204204
let alloc_align = alloc.inner().align;
205205
assert!(alloc_align >= layout.align.abi);
206206

207+
// Returns `None` when the value is partially undefined or any byte of it has provenance.
208+
// Otherwise returns the value or (if the entire value is undef) returns an undef.
207209
let read_scalar = |start, size, s: abi::Scalar, ty| {
210+
let range = alloc_range(start, size);
208211
match alloc.0.read_scalar(
209212
bx,
210-
alloc_range(start, size),
213+
range,
211214
/*read_provenance*/ matches!(s.primitive(), abi::Primitive::Pointer(_)),
212215
) {
213-
Ok(val) => bx.scalar_to_backend(val, s, ty),
214-
Err(_) => bx.const_poison(ty),
216+
Ok(val) => Some(bx.scalar_to_backend(val, s, ty)),
217+
Err(_) => {
218+
// We may have failed due to partial provenance or unexpected provenance,
219+
// continue down the normal code path if so.
220+
if alloc.0.provenance().range_empty(range, &bx.tcx())
221+
// Since `read_scalar` failed, but there were no relocations involved, the
222+
// bytes must be partially or fully uninitialized. Thus we can now unwrap the
223+
// information about the range of uninit bytes and check if it's the full range.
224+
&& alloc.0.init_mask().is_range_initialized(range).unwrap_err() == range
225+
{
226+
Some(bx.const_undef(ty))
227+
} else {
228+
None
229+
}
230+
}
215231
}
216232
};
217233

@@ -222,16 +238,14 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
222238
// check that walks over the type of `mplace` to make sure it is truly correct to treat this
223239
// like a `Scalar` (or `ScalarPair`).
224240
match layout.backend_repr {
225-
BackendRepr::Scalar(s @ abi::Scalar::Initialized { .. }) => {
241+
BackendRepr::Scalar(s) => {
226242
let size = s.size(bx);
227243
assert_eq!(size, layout.size, "abi::Scalar size does not match layout size");
228-
let val = read_scalar(offset, size, s, bx.immediate_backend_type(layout));
229-
OperandRef { val: OperandValue::Immediate(val), layout }
244+
if let Some(val) = read_scalar(offset, size, s, bx.immediate_backend_type(layout)) {
245+
return OperandRef { val: OperandValue::Immediate(val), layout };
246+
}
230247
}
231-
BackendRepr::ScalarPair(
232-
a @ abi::Scalar::Initialized { .. },
233-
b @ abi::Scalar::Initialized { .. },
234-
) => {
248+
BackendRepr::ScalarPair(a, b) => {
235249
let (a_size, b_size) = (a.size(bx), b.size(bx));
236250
let b_offset = (offset + a_size).align_to(b.align(bx).abi);
237251
assert!(b_offset.bytes() > 0);
@@ -247,20 +261,21 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
247261
b,
248262
bx.scalar_pair_element_backend_type(layout, 1, true),
249263
);
250-
OperandRef { val: OperandValue::Pair(a_val, b_val), layout }
251-
}
252-
_ if layout.is_zst() => OperandRef::zero_sized(layout),
253-
_ => {
254-
// Neither a scalar nor scalar pair. Load from a place
255-
// FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the
256-
// same `ConstAllocation`?
257-
let init = bx.const_data_from_alloc(alloc);
258-
let base_addr = bx.static_addr_of(init, alloc_align, None);
259-
260-
let llval = bx.const_ptr_byte_offset(base_addr, offset);
261-
bx.load_operand(PlaceRef::new_sized(llval, layout))
264+
if let (Some(a_val), Some(b_val)) = (a_val, b_val) {
265+
return OperandRef { val: OperandValue::Pair(a_val, b_val), layout };
266+
}
262267
}
268+
_ if layout.is_zst() => return OperandRef::zero_sized(layout),
269+
_ => {}
263270
}
271+
// Neither a scalar nor scalar pair. Load from a place
272+
// FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the
273+
// same `ConstAllocation`?
274+
let init = bx.const_data_from_alloc(alloc);
275+
let base_addr = bx.static_addr_of(init, alloc_align, None);
276+
277+
let llval = bx.const_ptr_byte_offset(base_addr, offset);
278+
bx.load_operand(PlaceRef::new_sized(llval, layout))
264279
}
265280

266281
/// Asserts that this operand refers to a scalar and returns

compiler/rustc_middle/src/mir/interpret/allocation.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ impl AllocError {
222222
}
223223

224224
/// The information that makes up a memory access: offset and size.
225-
#[derive(Copy, Clone)]
225+
#[derive(Copy, Clone, PartialEq)]
226226
pub struct AllocRange {
227227
pub start: Size,
228228
pub size: Size,

tests/codegen/overaligned-constant.rs

-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@ pub fn overaligned_constant() {
1717
// CHECK-LABEL: @overaligned_constant
1818
// CHECK: [[full:%_.*]] = alloca [32 x i8], align 8
1919
// CHECK: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[full]], ptr align 8 @0, i64 32, i1 false)
20-
// CHECK: %b.0 = load i32, ptr @0, align 4
21-
// CHECK: %b.1 = load i32, ptr getelementptr inbounds ({{.*}}), align 4
2220
let mut s = S(1);
2321

2422
s.0 = 3;

0 commit comments

Comments
 (0)