Skip to content

Commit 70b9968

Browse files
authored
Rollup merge of rust-lang#137894 - compiler-errors:no-scalar-pair-opt, r=oli-obk
Revert "store ScalarPair via memset when one side is undef and the other side can be memset" cc rust-lang#137892 reverts rust-lang#135335 r? oli-obk
2 parents a4b6181 + 5f575bc commit 70b9968

File tree

8 files changed

+33
-116
lines changed

8 files changed

+33
-116
lines changed

compiler/rustc_codegen_gcc/src/common.rs

-5
Original file line numberDiff line numberDiff line change
@@ -64,11 +64,6 @@ impl<'gcc, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
6464
if type_is_pointer(typ) { self.context.new_null(typ) } else { self.const_int(typ, 0) }
6565
}
6666

67-
fn is_undef(&self, _val: RValue<'gcc>) -> bool {
68-
// FIXME: actually check for undef
69-
false
70-
}
71-
7267
fn const_undef(&self, typ: Type<'gcc>) -> RValue<'gcc> {
7368
let local = self.current_func.borrow().expect("func").new_local(None, typ, "undefined");
7469
if typ.is_struct().is_some() {

compiler/rustc_codegen_llvm/src/common.rs

-4
Original file line numberDiff line numberDiff line change
@@ -127,10 +127,6 @@ impl<'ll, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
127127
unsafe { llvm::LLVMGetUndef(t) }
128128
}
129129

130-
fn is_undef(&self, v: &'ll Value) -> bool {
131-
unsafe { llvm::LLVMIsUndef(v) == True }
132-
}
133-
134130
fn const_poison(&self, t: &'ll Type) -> &'ll Value {
135131
unsafe { llvm::LLVMGetPoison(t) }
136132
}

compiler/rustc_codegen_llvm/src/llvm/ffi.rs

-1
Original file line numberDiff line numberDiff line change
@@ -1046,7 +1046,6 @@ unsafe extern "C" {
10461046
pub(crate) fn LLVMMetadataTypeInContext(C: &Context) -> &Type;
10471047

10481048
// Operations on all values
1049-
pub(crate) fn LLVMIsUndef(Val: &Value) -> Bool;
10501049
pub(crate) fn LLVMTypeOf(Val: &Value) -> &Type;
10511050
pub(crate) fn LLVMGetValueName2(Val: &Value, Length: *mut size_t) -> *const c_char;
10521051
pub(crate) fn LLVMSetValueName2(Val: &Value, Name: *const c_char, NameLen: size_t);

compiler/rustc_codegen_ssa/src/mir/operand.rs

+22-37
Original file line numberDiff line numberDiff line change
@@ -203,30 +203,14 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
203203
let alloc_align = alloc.inner().align;
204204
assert!(alloc_align >= layout.align.abi);
205205

206-
// Returns `None` when the value is partially undefined or any byte of it has provenance.
207-
// Otherwise returns the value or (if the entire value is undef) returns an undef.
208206
let read_scalar = |start, size, s: abi::Scalar, ty| {
209-
let range = alloc_range(start, size);
210207
match alloc.0.read_scalar(
211208
bx,
212-
range,
209+
alloc_range(start, size),
213210
/*read_provenance*/ matches!(s.primitive(), abi::Primitive::Pointer(_)),
214211
) {
215-
Ok(val) => Some(bx.scalar_to_backend(val, s, ty)),
216-
Err(_) => {
217-
// We may have failed due to partial provenance or unexpected provenance,
218-
// continue down the normal code path if so.
219-
if alloc.0.provenance().range_empty(range, &bx.tcx())
220-
// Since `read_scalar` failed, but there were no relocations involved, the
221-
// bytes must be partially or fully uninitialized. Thus we can now unwrap the
222-
// information about the range of uninit bytes and check if it's the full range.
223-
&& alloc.0.init_mask().is_range_initialized(range).unwrap_err() == range
224-
{
225-
Some(bx.const_undef(ty))
226-
} else {
227-
None
228-
}
229-
}
212+
Ok(val) => bx.scalar_to_backend(val, s, ty),
213+
Err(_) => bx.const_poison(ty),
230214
}
231215
};
232216

@@ -237,14 +221,16 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
237221
// check that walks over the type of `mplace` to make sure it is truly correct to treat this
238222
// like a `Scalar` (or `ScalarPair`).
239223
match layout.backend_repr {
240-
BackendRepr::Scalar(s) => {
224+
BackendRepr::Scalar(s @ abi::Scalar::Initialized { .. }) => {
241225
let size = s.size(bx);
242226
assert_eq!(size, layout.size, "abi::Scalar size does not match layout size");
243-
if let Some(val) = read_scalar(offset, size, s, bx.immediate_backend_type(layout)) {
244-
return OperandRef { val: OperandValue::Immediate(val), layout };
245-
}
227+
let val = read_scalar(offset, size, s, bx.immediate_backend_type(layout));
228+
OperandRef { val: OperandValue::Immediate(val), layout }
246229
}
247-
BackendRepr::ScalarPair(a, b) => {
230+
BackendRepr::ScalarPair(
231+
a @ abi::Scalar::Initialized { .. },
232+
b @ abi::Scalar::Initialized { .. },
233+
) => {
248234
let (a_size, b_size) = (a.size(bx), b.size(bx));
249235
let b_offset = (offset + a_size).align_to(b.align(bx).abi);
250236
assert!(b_offset.bytes() > 0);
@@ -260,21 +246,20 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
260246
b,
261247
bx.scalar_pair_element_backend_type(layout, 1, true),
262248
);
263-
if let (Some(a_val), Some(b_val)) = (a_val, b_val) {
264-
return OperandRef { val: OperandValue::Pair(a_val, b_val), layout };
265-
}
249+
OperandRef { val: OperandValue::Pair(a_val, b_val), layout }
250+
}
251+
_ if layout.is_zst() => OperandRef::zero_sized(layout),
252+
_ => {
253+
// Neither a scalar nor scalar pair. Load from a place
254+
// FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the
255+
// same `ConstAllocation`?
256+
let init = bx.const_data_from_alloc(alloc);
257+
let base_addr = bx.static_addr_of(init, alloc_align, None);
258+
259+
let llval = bx.const_ptr_byte_offset(base_addr, offset);
260+
bx.load_operand(PlaceRef::new_sized(llval, layout))
266261
}
267-
_ if layout.is_zst() => return OperandRef::zero_sized(layout),
268-
_ => {}
269262
}
270-
// Neither a scalar nor scalar pair. Load from a place
271-
// FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the
272-
// same `ConstAllocation`?
273-
let init = bx.const_data_from_alloc(alloc);
274-
let base_addr = bx.static_addr_of(init, alloc_align, None);
275-
276-
let llval = bx.const_ptr_byte_offset(base_addr, offset);
277-
bx.load_operand(PlaceRef::new_sized(llval, layout))
278263
}
279264

280265
/// Asserts that this operand refers to a scalar and returns

compiler/rustc_codegen_ssa/src/mir/rvalue.rs

+2-24
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
88
use rustc_middle::{bug, mir, span_bug};
99
use rustc_session::config::OptLevel;
1010
use rustc_span::{DUMMY_SP, Span};
11-
use tracing::{debug, instrument, trace};
11+
use tracing::{debug, instrument};
1212

1313
use super::operand::{OperandRef, OperandValue};
1414
use super::place::PlaceRef;
@@ -93,8 +93,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
9393
return;
9494
}
9595

96-
// If `v` is an integer constant whose value is just a single byte repeated N times,
97-
// emit a `memset` filling the entire `dest` with that byte.
9896
let try_init_all_same = |bx: &mut Bx, v| {
9997
let start = dest.val.llval;
10098
let size = bx.const_usize(dest.layout.size.bytes());
@@ -119,33 +117,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
119117
false
120118
};
121119

122-
trace!(?cg_elem.val);
123120
match cg_elem.val {
124121
OperandValue::Immediate(v) => {
125122
if try_init_all_same(bx, v) {
126123
return;
127124
}
128125
}
129-
OperandValue::Pair(a, b) => {
130-
let a_is_undef = bx.cx().is_undef(a);
131-
match (a_is_undef, bx.cx().is_undef(b)) {
132-
// Can happen for uninit unions
133-
(true, true) => {
134-
// FIXME: can we produce better output here?
135-
}
136-
(false, true) | (true, false) => {
137-
let val = if a_is_undef { b } else { a };
138-
if try_init_all_same(bx, val) {
139-
return;
140-
}
141-
}
142-
(false, false) => {
143-
// FIXME: if both are the same value, use try_init_all_same
144-
}
145-
}
146-
}
147-
OperandValue::ZeroSized => unreachable!("checked above"),
148-
OperandValue::Ref(..) => {}
126+
_ => (),
149127
}
150128

151129
let count = self

compiler/rustc_codegen_ssa/src/traits/consts.rs

-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@ pub trait ConstCodegenMethods<'tcx>: BackendTypes {
99
/// Generate an uninitialized value (matching uninitialized memory in MIR).
1010
/// Whether memory is initialized or not is tracked byte-for-byte.
1111
fn const_undef(&self, t: Self::Type) -> Self::Value;
12-
fn is_undef(&self, v: Self::Value) -> bool;
1312
/// Generate a fake value. Poison always affects the entire value, even if just a single byte is
1413
/// poison. This can only be used in codepaths that are already UB, i.e., UB-free Rust code
1514
/// (including code that e.g. copies uninit memory with `MaybeUninit`) can never encounter a

compiler/rustc_middle/src/mir/interpret/allocation.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ impl AllocError {
222222
}
223223

224224
/// The information that makes up a memory access: offset and size.
225-
#[derive(Copy, Clone, PartialEq)]
225+
#[derive(Copy, Clone)]
226226
pub struct AllocRange {
227227
pub start: Size,
228228
pub size: Size,

tests/codegen/slice-init.rs

+8-43
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,6 @@
22

33
#![crate_type = "lib"]
44

5-
use std::mem::MaybeUninit;
6-
75
// CHECK-LABEL: @zero_sized_elem
86
#[no_mangle]
97
pub fn zero_sized_elem() {
@@ -78,64 +76,31 @@ pub fn u16_init_one_bytes() -> [u16; N] {
7876
[const { u16::from_be_bytes([1, 1]) }; N]
7977
}
8078

79+
// FIXME: undef bytes can just be initialized with the same value as the
80+
// defined bytes, if the defines bytes are all the same.
8181
// CHECK-LABEL: @option_none_init
8282
#[no_mangle]
8383
pub fn option_none_init() -> [Option<u8>; N] {
84-
// CHECK-NOT: select
85-
// CHECK-NOT: br
86-
// CHECK-NOT: switch
87-
// CHECK-NOT: icmp
88-
// CHECK: call void @llvm.memset.p0
89-
[const { None }; N]
90-
}
91-
92-
// If there is partial provenance or some bytes are initialized and some are not,
93-
// we can't really do better than initialize bytes or groups of bytes together.
94-
// CHECK-LABEL: @option_maybe_uninit_init
95-
#[no_mangle]
96-
pub fn option_maybe_uninit_init() -> [MaybeUninit<u16>; N] {
9784
// CHECK-NOT: select
9885
// CHECK: br label %repeat_loop_header{{.*}}
9986
// CHECK-NOT: switch
10087
// CHECK: icmp
10188
// CHECK-NOT: call void @llvm.memset.p0
102-
[const {
103-
let mut val: MaybeUninit<u16> = MaybeUninit::uninit();
104-
let ptr = val.as_mut_ptr() as *mut u8;
105-
unsafe {
106-
ptr.write(0);
107-
}
108-
val
109-
}; N]
89+
[None; N]
11090
}
11191

112-
#[repr(packed)]
113-
struct Packed {
114-
start: u8,
115-
ptr: &'static (),
116-
rest: u16,
117-
rest2: u8,
118-
}
92+
use std::mem::MaybeUninit;
11993

120-
// If there is partial provenance or some bytes are initialized and some are not,
121-
// we can't really do better than initialize bytes or groups of bytes together.
122-
// CHECK-LABEL: @option_maybe_uninit_provenance
94+
// FIXME: This could be optimized into a memset.
95+
// Regression test for <https://github.com/rust-lang/rust/issues/137892>.
12396
#[no_mangle]
124-
pub fn option_maybe_uninit_provenance() -> [MaybeUninit<Packed>; N] {
97+
pub fn half_uninit() -> [(u128, MaybeUninit<u128>); N] {
12598
// CHECK-NOT: select
12699
// CHECK: br label %repeat_loop_header{{.*}}
127100
// CHECK-NOT: switch
128101
// CHECK: icmp
129102
// CHECK-NOT: call void @llvm.memset.p0
130-
[const {
131-
let mut val: MaybeUninit<Packed> = MaybeUninit::uninit();
132-
unsafe {
133-
let ptr = &raw mut (*val.as_mut_ptr()).ptr;
134-
static HAS_ADDR: () = ();
135-
ptr.write_unaligned(&HAS_ADDR);
136-
}
137-
val
138-
}; N]
103+
[const { (0, MaybeUninit::uninit()) }; N]
139104
}
140105

141106
// Use an opaque function to prevent rustc from removing useless drops.

0 commit comments

Comments
 (0)