Skip to content

Commit 4560efe

Browse files
committed
Pass type when creating load
This makes load generation compatible with opaque pointers. The generation of nontemporal copies still accesses the pointer element type, as fixing this requires more movement.
1 parent 33e9a6b commit 4560efe

File tree

10 files changed

+65
-52
lines changed

10 files changed

+65
-52
lines changed

compiler/rustc_codegen_llvm/src/builder.rs

+9-20
Original file line numberDiff line numberDiff line change
@@ -410,17 +410,17 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
410410
}
411411
}
412412

413-
fn load(&mut self, ptr: &'ll Value, align: Align) -> &'ll Value {
413+
fn load(&mut self, ty: &'ll Type, ptr: &'ll Value, align: Align) -> &'ll Value {
414414
unsafe {
415-
let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, UNNAMED);
415+
let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
416416
llvm::LLVMSetAlignment(load, align.bytes() as c_uint);
417417
load
418418
}
419419
}
420420

421-
fn volatile_load(&mut self, ptr: &'ll Value) -> &'ll Value {
421+
fn volatile_load(&mut self, ty: &'ll Type, ptr: &'ll Value) -> &'ll Value {
422422
unsafe {
423-
let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, UNNAMED);
423+
let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
424424
llvm::LLVMSetVolatile(load, llvm::True);
425425
load
426426
}
@@ -488,7 +488,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
488488
}
489489
}
490490
let llval = const_llval.unwrap_or_else(|| {
491-
let load = self.load(place.llval, place.align);
491+
let load = self.load(place.layout.llvm_type(self), place.llval, place.align);
492492
if let abi::Abi::Scalar(ref scalar) = place.layout.abi {
493493
scalar_load_metadata(self, load, scalar);
494494
}
@@ -500,7 +500,8 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
500500

501501
let mut load = |i, scalar: &abi::Scalar, align| {
502502
let llptr = self.struct_gep(place.llval, i as u64);
503-
let load = self.load(llptr, align);
503+
let llty = place.layout.scalar_pair_element_llvm_type(self, i, false);
504+
let load = self.load(llty, llptr, align);
504505
scalar_load_metadata(self, load, scalar);
505506
self.to_immediate_scalar(load, scalar)
506507
};
@@ -817,13 +818,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
817818
size: &'ll Value,
818819
flags: MemFlags,
819820
) {
820-
if flags.contains(MemFlags::NONTEMPORAL) {
821-
// HACK(nox): This is inefficient but there is no nontemporal memcpy.
822-
let val = self.load(src, src_align);
823-
let ptr = self.pointercast(dst, self.type_ptr_to(self.val_ty(val)));
824-
self.store_with_flags(val, ptr, dst_align, flags);
825-
return;
826-
}
821+
assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memcpy not supported");
827822
let size = self.intcast(size, self.type_isize(), false);
828823
let is_volatile = flags.contains(MemFlags::VOLATILE);
829824
let dst = self.pointercast(dst, self.type_i8p());
@@ -850,13 +845,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
850845
size: &'ll Value,
851846
flags: MemFlags,
852847
) {
853-
if flags.contains(MemFlags::NONTEMPORAL) {
854-
// HACK(nox): This is inefficient but there is no nontemporal memmove.
855-
let val = self.load(src, src_align);
856-
let ptr = self.pointercast(dst, self.type_ptr_to(self.val_ty(val)));
857-
self.store_with_flags(val, ptr, dst_align, flags);
858-
return;
859-
}
848+
assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memmove not supported");
860849
let size = self.intcast(size, self.type_isize(), false);
861850
let is_volatile = flags.contains(MemFlags::VOLATILE);
862851
let dst = self.pointercast(dst, self.type_i8p());

compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ pub fn insert_reference_to_gdb_debug_scripts_section_global(bx: &mut Builder<'_,
2020
// LLVM to keep around the reference to the global.
2121
let indices = [bx.const_i32(0), bx.const_i32(0)];
2222
let element = bx.inbounds_gep(gdb_debug_scripts_section, &indices);
23-
let volative_load_instruction = bx.volatile_load(element);
23+
let volative_load_instruction = bx.volatile_load(bx.type_i8(), element);
2424
unsafe {
2525
llvm::LLVMSetAlignment(volative_load_instruction, 1);
2626
}

compiler/rustc_codegen_llvm/src/intrinsic.rs

+11-8
Original file line numberDiff line numberDiff line change
@@ -162,11 +162,14 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
162162

163163
sym::volatile_load | sym::unaligned_volatile_load => {
164164
let tp_ty = substs.type_at(0);
165-
let mut ptr = args[0].immediate();
166-
if let PassMode::Cast(ty) = fn_abi.ret.mode {
167-
ptr = self.pointercast(ptr, self.type_ptr_to(ty.llvm_type(self)));
168-
}
169-
let load = self.volatile_load(ptr);
165+
let ptr = args[0].immediate();
166+
let load = if let PassMode::Cast(ty) = fn_abi.ret.mode {
167+
let llty = ty.llvm_type(self);
168+
let ptr = self.pointercast(ptr, self.type_ptr_to(llty));
169+
self.volatile_load(llty, ptr)
170+
} else {
171+
self.volatile_load(self.layout_of(tp_ty).llvm_type(self), ptr)
172+
};
170173
let align = if name == sym::unaligned_volatile_load {
171174
1
172175
} else {
@@ -319,9 +322,9 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
319322
let integer_ty = self.type_ix(layout.size.bits());
320323
let ptr_ty = self.type_ptr_to(integer_ty);
321324
let a_ptr = self.bitcast(a, ptr_ty);
322-
let a_val = self.load(a_ptr, layout.align.abi);
325+
let a_val = self.load(integer_ty, a_ptr, layout.align.abi);
323326
let b_ptr = self.bitcast(b, ptr_ty);
324-
let b_val = self.load(b_ptr, layout.align.abi);
327+
let b_val = self.load(integer_ty, b_ptr, layout.align.abi);
325328
self.icmp(IntPredicate::IntEQ, a_val, b_val)
326329
} else {
327330
let i8p_ty = self.type_i8p();
@@ -540,7 +543,7 @@ fn codegen_msvc_try(
540543
// Source: MicrosoftCXXABI::getAddrOfCXXCatchHandlerType in clang
541544
let flags = bx.const_i32(8);
542545
let funclet = catchpad_rust.catch_pad(cs, &[tydesc, flags, slot]);
543-
let ptr = catchpad_rust.load(slot, ptr_align);
546+
let ptr = catchpad_rust.load(bx.type_i8p(), slot, ptr_align);
544547
catchpad_rust.call(catch_func, &[data, ptr], Some(&funclet));
545548
catchpad_rust.catch_ret(&funclet, caught.llbb());
546549

compiler/rustc_codegen_llvm/src/llvm/ffi.rs

+6-1
Original file line numberDiff line numberDiff line change
@@ -1385,7 +1385,12 @@ extern "C" {
13851385
Val: &'a Value,
13861386
Name: *const c_char,
13871387
) -> &'a Value;
1388-
pub fn LLVMBuildLoad(B: &Builder<'a>, PointerVal: &'a Value, Name: *const c_char) -> &'a Value;
1388+
pub fn LLVMBuildLoad2(
1389+
B: &Builder<'a>,
1390+
Ty: &'a Type,
1391+
PointerVal: &'a Value,
1392+
Name: *const c_char,
1393+
) -> &'a Value;
13891394

13901395
pub fn LLVMBuildStore(B: &Builder<'a>, Val: &'a Value, Ptr: &'a Value) -> &'a Value;
13911396

compiler/rustc_codegen_llvm/src/va_arg.rs

+12-9
Original file line numberDiff line numberDiff line change
@@ -32,14 +32,15 @@ fn emit_direct_ptr_va_arg(
3232
slot_size: Align,
3333
allow_higher_align: bool,
3434
) -> (&'ll Value, Align) {
35-
let va_list_ptr_ty = bx.cx().type_ptr_to(bx.cx.type_i8p());
35+
let va_list_ty = bx.type_i8p();
36+
let va_list_ptr_ty = bx.type_ptr_to(va_list_ty);
3637
let va_list_addr = if list.layout.llvm_type(bx.cx) != va_list_ptr_ty {
3738
bx.bitcast(list.immediate(), va_list_ptr_ty)
3839
} else {
3940
list.immediate()
4041
};
4142

42-
let ptr = bx.load(va_list_addr, bx.tcx().data_layout.pointer_align.abi);
43+
let ptr = bx.load(va_list_ty, va_list_addr, bx.tcx().data_layout.pointer_align.abi);
4344

4445
let (addr, addr_align) = if allow_higher_align && align > slot_size {
4546
(round_pointer_up_to_alignment(bx, ptr, align, bx.cx().type_i8p()), align)
@@ -82,10 +83,10 @@ fn emit_ptr_va_arg(
8283
let (addr, addr_align) =
8384
emit_direct_ptr_va_arg(bx, list, llty, size, align.abi, slot_size, allow_higher_align);
8485
if indirect {
85-
let tmp_ret = bx.load(addr, addr_align);
86-
bx.load(tmp_ret, align.abi)
86+
let tmp_ret = bx.load(llty, addr, addr_align);
87+
bx.load(bx.cx.layout_of(target_ty).llvm_type(bx.cx), tmp_ret, align.abi)
8788
} else {
88-
bx.load(addr, addr_align)
89+
bx.load(llty, addr, addr_align)
8990
}
9091
}
9192

@@ -118,7 +119,7 @@ fn emit_aapcs_va_arg(
118119
};
119120

120121
// if the offset >= 0 then the value will be on the stack
121-
let mut reg_off_v = bx.load(reg_off, offset_align);
122+
let mut reg_off_v = bx.load(bx.type_i32(), reg_off, offset_align);
122123
let use_stack = bx.icmp(IntPredicate::IntSGE, reg_off_v, zero);
123124
bx.cond_br(use_stack, &on_stack.llbb(), &maybe_reg.llbb());
124125

@@ -139,8 +140,9 @@ fn emit_aapcs_va_arg(
139140
let use_stack = maybe_reg.icmp(IntPredicate::IntSGT, new_reg_off_v, zero);
140141
maybe_reg.cond_br(use_stack, &on_stack.llbb(), &in_reg.llbb());
141142

143+
let top_type = bx.type_i8p();
142144
let top = in_reg.struct_gep(va_list_addr, reg_top_index);
143-
let top = in_reg.load(top, bx.tcx().data_layout.pointer_align.abi);
145+
let top = in_reg.load(top_type, top, bx.tcx().data_layout.pointer_align.abi);
144146

145147
// reg_value = *(@top + reg_off_v);
146148
let mut reg_addr = in_reg.gep(top, &[reg_off_v]);
@@ -149,8 +151,9 @@ fn emit_aapcs_va_arg(
149151
let offset = bx.const_i32((slot_size - layout.size.bytes()) as i32);
150152
reg_addr = in_reg.gep(reg_addr, &[offset]);
151153
}
152-
let reg_addr = in_reg.bitcast(reg_addr, bx.cx.type_ptr_to(layout.llvm_type(bx)));
153-
let reg_value = in_reg.load(reg_addr, layout.align.abi);
154+
let reg_type = layout.llvm_type(bx);
155+
let reg_addr = in_reg.bitcast(reg_addr, bx.cx.type_ptr_to(reg_type));
156+
let reg_value = in_reg.load(reg_type, reg_addr, layout.align.abi);
154157
in_reg.br(&end.llbb());
155158

156159
// On Stack block

compiler/rustc_codegen_ssa/src/meth.rs

+6-4
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,11 @@ impl<'a, 'tcx> VirtualIndex {
2020
// Load the data pointer from the object.
2121
debug!("get_fn({:?}, {:?})", llvtable, self);
2222

23-
let llvtable = bx.pointercast(llvtable, bx.type_ptr_to(bx.fn_ptr_backend_type(fn_abi)));
23+
let llty = bx.fn_ptr_backend_type(fn_abi);
24+
let llvtable = bx.pointercast(llvtable, bx.type_ptr_to(llty));
2425
let ptr_align = bx.tcx().data_layout.pointer_align.abi;
2526
let gep = bx.inbounds_gep(llvtable, &[bx.const_usize(self.0)]);
26-
let ptr = bx.load(gep, ptr_align);
27+
let ptr = bx.load(llty, gep, ptr_align);
2728
bx.nonnull_metadata(ptr);
2829
// Vtable loads are invariant.
2930
bx.set_invariant_load(ptr);
@@ -38,10 +39,11 @@ impl<'a, 'tcx> VirtualIndex {
3839
// Load the data pointer from the object.
3940
debug!("get_int({:?}, {:?})", llvtable, self);
4041

41-
let llvtable = bx.pointercast(llvtable, bx.type_ptr_to(bx.type_isize()));
42+
let llty = bx.type_isize();
43+
let llvtable = bx.pointercast(llvtable, bx.type_ptr_to(llty));
4244
let usize_align = bx.tcx().data_layout.pointer_align.abi;
4345
let gep = bx.inbounds_gep(llvtable, &[bx.const_usize(self.0)]);
44-
let ptr = bx.load(gep, usize_align);
46+
let ptr = bx.load(llty, gep, usize_align);
4547
// Vtable loads are invariant.
4648
bx.set_invariant_load(ptr);
4749
ptr

compiler/rustc_codegen_ssa/src/mir/block.rs

+8-6
Original file line numberDiff line numberDiff line change
@@ -260,7 +260,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
260260
PassMode::Direct(_) | PassMode::Pair(..) => {
261261
let op = self.codegen_consume(&mut bx, mir::Place::return_place().as_ref());
262262
if let Ref(llval, _, align) = op.val {
263-
bx.load(llval, align)
263+
bx.load(bx.backend_type(op.layout), llval, align)
264264
} else {
265265
op.immediate_or_packed_pair(&mut bx)
266266
}
@@ -287,8 +287,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
287287
llval
288288
}
289289
};
290-
let addr = bx.pointercast(llslot, bx.type_ptr_to(bx.cast_backend_type(&cast_ty)));
291-
bx.load(addr, self.fn_abi.ret.layout.align.abi)
290+
let ty = bx.cast_backend_type(&cast_ty);
291+
let addr = bx.pointercast(llslot, bx.type_ptr_to(ty));
292+
bx.load(ty, addr, self.fn_abi.ret.layout.align.abi)
292293
}
293294
};
294295
bx.ret(llval);
@@ -1086,15 +1087,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
10861087
if by_ref && !arg.is_indirect() {
10871088
// Have to load the argument, maybe while casting it.
10881089
if let PassMode::Cast(ty) = arg.mode {
1089-
let addr = bx.pointercast(llval, bx.type_ptr_to(bx.cast_backend_type(&ty)));
1090-
llval = bx.load(addr, align.min(arg.layout.align.abi));
1090+
let llty = bx.cast_backend_type(&ty);
1091+
let addr = bx.pointercast(llval, bx.type_ptr_to(llty));
1092+
llval = bx.load(llty, addr, align.min(arg.layout.align.abi));
10911093
} else {
10921094
// We can't use `PlaceRef::load` here because the argument
10931095
// may have a type we don't treat as immediate, but the ABI
10941096
// used for this call is passing it by-value. In that case,
10951097
// the load would just produce `OperandValue::Ref` instead
10961098
// of the `OperandValue::Immediate` we need for the call.
1097-
llval = bx.load(llval, align);
1099+
llval = bx.load(bx.backend_type(arg.layout), llval, align);
10981100
if let abi::Abi::Scalar(ref scalar) = arg.layout.abi {
10991101
if scalar.is_bool() {
11001102
bx.range_metadata(llval, 0..2);

compiler/rustc_codegen_ssa/src/mir/operand.rs

+9
Original file line numberDiff line numberDiff line change
@@ -289,6 +289,15 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
289289
}
290290
match self {
291291
OperandValue::Ref(r, None, source_align) => {
292+
if flags.contains(MemFlags::NONTEMPORAL) {
293+
// HACK(nox): This is inefficient but there is no nontemporal memcpy.
294+
// FIXME: Don't access pointer element type.
295+
let ty = bx.element_type(bx.val_ty(r));
296+
let val = bx.load(ty, r, source_align);
297+
let ptr = bx.pointercast(dest.llval, bx.type_ptr_to(ty));
298+
bx.store_with_flags(val, ptr, dest.align, flags);
299+
return;
300+
}
292301
base::memcpy_ty(bx, dest.llval, dest.align, r, source_align, dest.layout, flags)
293302
}
294303
OperandValue::Ref(_, Some(_), _) => {

compiler/rustc_codegen_ssa/src/mir/place.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -407,7 +407,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
407407
let layout = bx.layout_of(target_ty.ty);
408408

409409
PlaceRef {
410-
llval: bx.load(self.llval, self.align),
410+
llval: bx.load(bx.backend_type(layout), self.llval, self.align),
411411
llextra: None,
412412
layout,
413413
align: layout.align.abi,

compiler/rustc_codegen_ssa/src/traits/builder.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -137,8 +137,8 @@ pub trait BuilderMethods<'a, 'tcx>:
137137
fn dynamic_alloca(&mut self, ty: Self::Type, align: Align) -> Self::Value;
138138
fn array_alloca(&mut self, ty: Self::Type, len: Self::Value, align: Align) -> Self::Value;
139139

140-
fn load(&mut self, ptr: Self::Value, align: Align) -> Self::Value;
141-
fn volatile_load(&mut self, ptr: Self::Value) -> Self::Value;
140+
fn load(&mut self, ty: Self::Type, ptr: Self::Value, align: Align) -> Self::Value;
141+
fn volatile_load(&mut self, ty: Self::Type, ptr: Self::Value) -> Self::Value;
142142
fn atomic_load(
143143
&mut self,
144144
ty: Self::Type,

0 commit comments

Comments
 (0)