|
1 |
| -use std::assert_matches::assert_matches; |
2 | 1 | use std::fmt;
|
3 | 2 |
|
4 | 3 | use arrayvec::ArrayVec;
|
5 | 4 | use either::Either;
|
6 | 5 | use rustc_abi as abi;
|
7 | 6 | use rustc_abi::{Align, BackendRepr, Size};
|
8 |
| -use rustc_middle::bug; |
9 | 7 | use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range};
|
10 | 8 | use rustc_middle::mir::{self, ConstValue};
|
11 | 9 | use rustc_middle::ty::Ty;
|
12 | 10 | use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
|
| 11 | +use rustc_middle::{bug, span_bug}; |
13 | 12 | use tracing::debug;
|
14 | 13 |
|
15 | 14 | use super::place::{PlaceRef, PlaceValue};
|
@@ -352,79 +351,83 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
352 | 351 |
|
353 | 352 | pub(crate) fn extract_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
354 | 353 | &self,
|
| 354 | + fx: &mut FunctionCx<'a, 'tcx, Bx>, |
355 | 355 | bx: &mut Bx,
|
356 | 356 | i: usize,
|
357 | 357 | ) -> Self {
|
358 | 358 | let field = self.layout.field(bx.cx(), i);
|
359 | 359 | let offset = self.layout.fields.offset(i);
|
360 | 360 |
|
361 |
| - let mut val = match (self.val, self.layout.backend_repr) { |
362 |
| - // If the field is ZST, it has no data. |
363 |
| - _ if field.is_zst() => OperandValue::ZeroSized, |
364 |
| - |
365 |
| - // Newtype of a scalar, scalar pair or vector. |
366 |
| - (OperandValue::Immediate(_) | OperandValue::Pair(..), _) |
367 |
| - if field.size == self.layout.size => |
368 |
| - { |
369 |
| - assert_eq!(offset.bytes(), 0); |
370 |
| - self.val |
| 361 | + let val = if field.is_zst() { |
| 362 | + OperandValue::ZeroSized |
| 363 | + } else if field.size == self.layout.size { |
| 364 | + assert_eq!(offset.bytes(), 0); |
| 365 | + if let Some(field_val) = fx.codegen_transmute_operand(bx, *self, field) { |
| 366 | + field_val |
| 367 | + } else { |
| 368 | + // we have to go through memory for things like |
| 369 | + // Newtype vector of array, e.g. #[repr(simd)] struct S([i32; 4]); |
| 370 | + let place = PlaceRef::alloca(bx, field); |
| 371 | + self.val.store(bx, place.val.with_type(self.layout)); |
| 372 | + bx.load_operand(place).val |
371 | 373 | }
|
372 |
| - |
373 |
| - // Extract a scalar component from a pair. |
374 |
| - (OperandValue::Pair(a_llval, b_llval), BackendRepr::ScalarPair(a, b)) => { |
375 |
| - if offset.bytes() == 0 { |
376 |
| - assert_eq!(field.size, a.size(bx.cx())); |
377 |
| - OperandValue::Immediate(a_llval) |
378 |
| - } else { |
379 |
| - assert_eq!(offset, a.size(bx.cx()).align_to(b.align(bx.cx()).abi)); |
380 |
| - assert_eq!(field.size, b.size(bx.cx())); |
381 |
| - OperandValue::Immediate(b_llval) |
| 374 | + } else { |
| 375 | + let (in_scalar, imm) = match (self.val, self.layout.backend_repr) { |
| 376 | + // Extract a scalar component from a pair. |
| 377 | + (OperandValue::Pair(a_llval, b_llval), BackendRepr::ScalarPair(a, b)) => { |
| 378 | + if offset.bytes() == 0 { |
| 379 | + assert_eq!(field.size, a.size(bx.cx())); |
| 380 | + (Some(a), a_llval) |
| 381 | + } else { |
| 382 | + assert_eq!(offset, a.size(bx.cx()).align_to(b.align(bx.cx()).abi)); |
| 383 | + assert_eq!(field.size, b.size(bx.cx())); |
| 384 | + (Some(b), b_llval) |
| 385 | + } |
382 | 386 | }
|
383 |
| - } |
384 | 387 |
|
385 |
| - // `#[repr(simd)]` types are also immediate. |
386 |
| - (OperandValue::Immediate(llval), BackendRepr::Vector { .. }) => { |
387 |
| - OperandValue::Immediate(bx.extract_element(llval, bx.cx().const_usize(i as u64))) |
388 |
| - } |
| 388 | + // `#[repr(simd)]` types are also immediate. |
| 389 | + (OperandValue::Immediate(llval), BackendRepr::Vector { .. }) => { |
| 390 | + (None, bx.extract_element(llval, bx.cx().const_usize(i as u64))) |
| 391 | + } |
389 | 392 |
|
390 |
| - _ => bug!("OperandRef::extract_field({:?}): not applicable", self), |
| 393 | + _ => { |
| 394 | + span_bug!(fx.mir.span, "OperandRef::extract_field({:?}): not applicable", self) |
| 395 | + } |
| 396 | + }; |
| 397 | + OperandValue::Immediate(match field.backend_repr { |
| 398 | + BackendRepr::Vector { .. } => imm, |
| 399 | + BackendRepr::Scalar(out_scalar) => { |
| 400 | + let Some(in_scalar) = in_scalar else { |
| 401 | + span_bug!( |
| 402 | + fx.mir.span, |
| 403 | + "OperandRef::extract_field({:?}): missing input scalar for output scalar", |
| 404 | + self |
| 405 | + ) |
| 406 | + }; |
| 407 | + if in_scalar != out_scalar { |
| 408 | + // If the backend and backend_immediate types might differ, |
| 409 | + // flip back to the backend type then to the new immediate. |
| 410 | + // This avoids nop truncations, but still handles things like |
| 411 | + // Bools in union fields needs to be truncated. |
| 412 | + let backend = bx.from_immediate(imm); |
| 413 | + bx.to_immediate_scalar(backend, out_scalar) |
| 414 | + } else { |
| 415 | + imm |
| 416 | + } |
| 417 | + } |
| 418 | + BackendRepr::Memory { sized: true } => { |
| 419 | + span_bug!( |
| 420 | + fx.mir.span, |
| 421 | + "Projecting into a simd type with padding doesn't work; \ |
| 422 | + See <https://github.com/rust-lang/rust/issues/137108>", |
| 423 | + ); |
| 424 | + } |
| 425 | + BackendRepr::Uninhabited |
| 426 | + | BackendRepr::ScalarPair(_, _) |
| 427 | + | BackendRepr::Memory { sized: false } => bug!(), |
| 428 | + }) |
391 | 429 | };
|
392 | 430 |
|
393 |
| - match (&mut val, field.backend_repr) { |
394 |
| - (OperandValue::ZeroSized, _) => {} |
395 |
| - ( |
396 |
| - OperandValue::Immediate(llval), |
397 |
| - BackendRepr::Scalar(_) | BackendRepr::ScalarPair(..) | BackendRepr::Vector { .. }, |
398 |
| - ) => { |
399 |
| - // Bools in union fields needs to be truncated. |
400 |
| - *llval = bx.to_immediate(*llval, field); |
401 |
| - } |
402 |
| - (OperandValue::Pair(a, b), BackendRepr::ScalarPair(a_abi, b_abi)) => { |
403 |
| - // Bools in union fields needs to be truncated. |
404 |
| - *a = bx.to_immediate_scalar(*a, a_abi); |
405 |
| - *b = bx.to_immediate_scalar(*b, b_abi); |
406 |
| - } |
407 |
| - // Newtype vector of array, e.g. #[repr(simd)] struct S([i32; 4]); |
408 |
| - (OperandValue::Immediate(llval), BackendRepr::Memory { sized: true }) => { |
409 |
| - assert_matches!(self.layout.backend_repr, BackendRepr::Vector { .. }); |
410 |
| - |
411 |
| - let llfield_ty = bx.cx().backend_type(field); |
412 |
| - |
413 |
| - // Can't bitcast an aggregate, so round trip through memory. |
414 |
| - let llptr = bx.alloca(field.size, field.align.abi); |
415 |
| - bx.store(*llval, llptr, field.align.abi); |
416 |
| - *llval = bx.load(llfield_ty, llptr, field.align.abi); |
417 |
| - } |
418 |
| - ( |
419 |
| - OperandValue::Immediate(_), |
420 |
| - BackendRepr::Uninhabited | BackendRepr::Memory { sized: false }, |
421 |
| - ) => { |
422 |
| - bug!() |
423 |
| - } |
424 |
| - (OperandValue::Pair(..), _) => bug!(), |
425 |
| - (OperandValue::Ref(..), _) => bug!(), |
426 |
| - } |
427 |
| - |
428 | 431 | OperandRef { val, layout: field }
|
429 | 432 | }
|
430 | 433 | }
|
@@ -587,7 +590,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
587 | 590 | "Bad PlaceRef: destructing pointers should use cast/PtrMetadata, \
|
588 | 591 | but tried to access field {f:?} of pointer {o:?}",
|
589 | 592 | );
|
590 |
| - o = o.extract_field(bx, f.index()); |
| 593 | + o = o.extract_field(self, bx, f.index()); |
591 | 594 | }
|
592 | 595 | mir::ProjectionElem::Index(_)
|
593 | 596 | | mir::ProjectionElem::ConstantIndex { .. } => {
|
|
0 commit comments