Skip to content

Commit

Permalink
Auto merge of rust-lang#132371 - workingjubilee:rollup-aqd86tm, r=wor…
Browse files Browse the repository at this point in the history
…kingjubilee

Rollup of 5 pull requests

Successful merges:

 - rust-lang#129383 (Remap impl-trait lifetimes on HIR instead of AST lowering)
 - rust-lang#132210 (rustdoc: make doctest span tweak a 2024 edition change)
 - rust-lang#132246 (Rename `rustc_abi::Abi` to `BackendRepr`)
 - rust-lang#132267 (force-recompile library changes on download-rustc="if-unchanged")
 - rust-lang#132344 (Merge `HostPolarity` and `BoundConstness`)

Failed merges:

 - rust-lang#132347 (Remove `ValueAnalysis` and `ValueAnalysisWrapper`.)

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed Oct 30, 2024
2 parents 759e07f + 7b19508 commit 4add5e4
Show file tree
Hide file tree
Showing 164 changed files with 1,580 additions and 1,506 deletions.
16 changes: 8 additions & 8 deletions compiler/rustc_abi/src/callconv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ mod abi {
#[cfg(feature = "nightly")]
use rustc_macros::HashStable_Generic;

#[cfg(feature = "nightly")]
use crate::{Abi, FieldsShape, TyAbiInterface, TyAndLayout};
use crate::{Align, HasDataLayout, Size};
#[cfg(feature = "nightly")]
use crate::{BackendRepr, FieldsShape, TyAbiInterface, TyAndLayout};

#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
Expand Down Expand Up @@ -128,27 +128,27 @@ impl<'a, Ty> TyAndLayout<'a, Ty> {
where
Ty: TyAbiInterface<'a, C> + Copy,
{
match self.abi {
Abi::Uninhabited => Err(Heterogeneous),
match self.backend_repr {
BackendRepr::Uninhabited => Err(Heterogeneous),

// The primitive for this algorithm.
Abi::Scalar(scalar) => {
BackendRepr::Scalar(scalar) => {
let kind = match scalar.primitive() {
abi::Int(..) | abi::Pointer(_) => RegKind::Integer,
abi::Float(_) => RegKind::Float,
};
Ok(HomogeneousAggregate::Homogeneous(Reg { kind, size: self.size }))
}

Abi::Vector { .. } => {
BackendRepr::Vector { .. } => {
assert!(!self.is_zst());
Ok(HomogeneousAggregate::Homogeneous(Reg {
kind: RegKind::Vector,
size: self.size,
}))
}

Abi::ScalarPair(..) | Abi::Aggregate { sized: true } => {
BackendRepr::ScalarPair(..) | BackendRepr::Memory { sized: true } => {
// Helper for computing `homogeneous_aggregate`, allowing a custom
// starting offset (used below for handling variants).
let from_fields_at =
Expand Down Expand Up @@ -246,7 +246,7 @@ impl<'a, Ty> TyAndLayout<'a, Ty> {
Ok(result)
}
}
Abi::Aggregate { sized: false } => Err(Heterogeneous),
BackendRepr::Memory { sized: false } => Err(Heterogeneous),
}
}
}
104 changes: 53 additions & 51 deletions compiler/rustc_abi/src/layout.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use rustc_index::Idx;
use tracing::debug;

use crate::{
Abi, AbiAndPrefAlign, Align, FieldsShape, HasDataLayout, IndexSlice, IndexVec, Integer,
AbiAndPrefAlign, Align, BackendRepr, FieldsShape, HasDataLayout, IndexSlice, IndexVec, Integer,
LayoutData, Niche, NonZeroUsize, Primitive, ReprOptions, Scalar, Size, StructKind, TagEncoding,
Variants, WrappingRange,
};
Expand Down Expand Up @@ -125,7 +125,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
offsets: [Size::ZERO, b_offset].into(),
memory_index: [0, 1].into(),
},
abi: Abi::ScalarPair(a, b),
backend_repr: BackendRepr::ScalarPair(a, b),
largest_niche,
align,
size,
Expand Down Expand Up @@ -216,7 +216,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
LayoutData {
variants: Variants::Single { index: VariantIdx::new(0) },
fields: FieldsShape::Primitive,
abi: Abi::Uninhabited,
backend_repr: BackendRepr::Uninhabited,
largest_niche: None,
align: dl.i8_align,
size: Size::ZERO,
Expand Down Expand Up @@ -331,7 +331,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {

if let Ok(common) = common_non_zst_abi_and_align {
// Discard valid range information and allow undef
let field_abi = field.abi.to_union();
let field_abi = field.backend_repr.to_union();

if let Some((common_abi, common_align)) = common {
if common_abi != field_abi {
Expand All @@ -340,7 +340,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
} else {
// Fields with the same non-Aggregate ABI should also
// have the same alignment
if !matches!(common_abi, Abi::Aggregate { .. }) {
if !matches!(common_abi, BackendRepr::Memory { .. }) {
assert_eq!(
common_align, field.align.abi,
"non-Aggregate field with matching ABI but differing alignment"
Expand Down Expand Up @@ -369,11 +369,11 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
// If all non-ZST fields have the same ABI, we may forward that ABI
// for the union as a whole, unless otherwise inhibited.
let abi = match common_non_zst_abi_and_align {
Err(AbiMismatch) | Ok(None) => Abi::Aggregate { sized: true },
Err(AbiMismatch) | Ok(None) => BackendRepr::Memory { sized: true },
Ok(Some((abi, _))) => {
if abi.inherent_align(dl).map(|a| a.abi) != Some(align.abi) {
// Mismatched alignment (e.g. union is #[repr(packed)]): disable opt
Abi::Aggregate { sized: true }
BackendRepr::Memory { sized: true }
} else {
abi
}
Expand All @@ -387,7 +387,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
Ok(LayoutData {
variants: Variants::Single { index: only_variant_idx },
fields: FieldsShape::Union(union_field_count),
abi,
backend_repr: abi,
largest_niche: None,
align,
size: size.align_to(align.abi),
Expand Down Expand Up @@ -434,23 +434,23 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
// Already doesn't have any niches
Scalar::Union { .. } => {}
};
match &mut st.abi {
Abi::Uninhabited => {}
Abi::Scalar(scalar) => hide_niches(scalar),
Abi::ScalarPair(a, b) => {
match &mut st.backend_repr {
BackendRepr::Uninhabited => {}
BackendRepr::Scalar(scalar) => hide_niches(scalar),
BackendRepr::ScalarPair(a, b) => {
hide_niches(a);
hide_niches(b);
}
Abi::Vector { element, count: _ } => hide_niches(element),
Abi::Aggregate { sized: _ } => {}
BackendRepr::Vector { element, count: _ } => hide_niches(element),
BackendRepr::Memory { sized: _ } => {}
}
st.largest_niche = None;
return Ok(st);
}

let (start, end) = scalar_valid_range;
match st.abi {
Abi::Scalar(ref mut scalar) | Abi::ScalarPair(ref mut scalar, _) => {
match st.backend_repr {
BackendRepr::Scalar(ref mut scalar) | BackendRepr::ScalarPair(ref mut scalar, _) => {
// Enlarging validity ranges would result in missed
// optimizations, *not* wrongly assuming the inner
// value is valid. e.g. unions already enlarge validity ranges,
Expand Down Expand Up @@ -607,8 +607,8 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
}

// It can't be a Scalar or ScalarPair because the offset isn't 0.
if !layout.abi.is_uninhabited() {
layout.abi = Abi::Aggregate { sized: true };
if !layout.is_uninhabited() {
layout.backend_repr = BackendRepr::Memory { sized: true };
}
layout.size += this_offset;

Expand All @@ -627,26 +627,26 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
let same_size = size == variant_layouts[largest_variant_index].size;
let same_align = align == variant_layouts[largest_variant_index].align;

let abi = if variant_layouts.iter().all(|v| v.abi.is_uninhabited()) {
Abi::Uninhabited
let abi = if variant_layouts.iter().all(|v| v.is_uninhabited()) {
BackendRepr::Uninhabited
} else if same_size && same_align && others_zst {
match variant_layouts[largest_variant_index].abi {
match variant_layouts[largest_variant_index].backend_repr {
// When the total alignment and size match, we can use the
// same ABI as the scalar variant with the reserved niche.
Abi::Scalar(_) => Abi::Scalar(niche_scalar),
Abi::ScalarPair(first, second) => {
BackendRepr::Scalar(_) => BackendRepr::Scalar(niche_scalar),
BackendRepr::ScalarPair(first, second) => {
// Only the niche is guaranteed to be initialised,
// so use union layouts for the other primitive.
if niche_offset == Size::ZERO {
Abi::ScalarPair(niche_scalar, second.to_union())
BackendRepr::ScalarPair(niche_scalar, second.to_union())
} else {
Abi::ScalarPair(first.to_union(), niche_scalar)
BackendRepr::ScalarPair(first.to_union(), niche_scalar)
}
}
_ => Abi::Aggregate { sized: true },
_ => BackendRepr::Memory { sized: true },
}
} else {
Abi::Aggregate { sized: true }
BackendRepr::Memory { sized: true }
};

let layout = LayoutData {
Expand All @@ -664,7 +664,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
offsets: [niche_offset].into(),
memory_index: [0].into(),
},
abi,
backend_repr: abi,
largest_niche,
size,
align,
Expand Down Expand Up @@ -833,14 +833,14 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
end: (max as u128 & tag_mask),
},
};
let mut abi = Abi::Aggregate { sized: true };
let mut abi = BackendRepr::Memory { sized: true };

if layout_variants.iter().all(|v| v.abi.is_uninhabited()) {
abi = Abi::Uninhabited;
if layout_variants.iter().all(|v| v.is_uninhabited()) {
abi = BackendRepr::Uninhabited;
} else if tag.size(dl) == size {
// Make sure we only use scalar layout when the enum is entirely its
// own tag (i.e. it has no padding nor any non-ZST variant fields).
abi = Abi::Scalar(tag);
abi = BackendRepr::Scalar(tag);
} else {
// Try to use a ScalarPair for all tagged enums.
// That's possible only if we can find a common primitive type for all variants.
Expand All @@ -864,8 +864,8 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
break;
}
};
let prim = match field.abi {
Abi::Scalar(scalar) => {
let prim = match field.backend_repr {
BackendRepr::Scalar(scalar) => {
common_prim_initialized_in_all_variants &=
matches!(scalar, Scalar::Initialized { .. });
scalar.primitive()
Expand Down Expand Up @@ -934,20 +934,22 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
{
// We can use `ScalarPair` only when it matches our
// already computed layout (including `#[repr(C)]`).
abi = pair.abi;
abi = pair.backend_repr;
}
}
}

// If we pick a "clever" (by-value) ABI, we might have to adjust the ABI of the
// variants to ensure they are consistent. This is because a downcast is
// semantically a NOP, and thus should not affect layout.
if matches!(abi, Abi::Scalar(..) | Abi::ScalarPair(..)) {
if matches!(abi, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
for variant in &mut layout_variants {
// We only do this for variants with fields; the others are not accessed anyway.
// Also do not overwrite any already existing "clever" ABIs.
if variant.fields.count() > 0 && matches!(variant.abi, Abi::Aggregate { .. }) {
variant.abi = abi;
if variant.fields.count() > 0
&& matches!(variant.backend_repr, BackendRepr::Memory { .. })
{
variant.backend_repr = abi;
// Also need to bump up the size and alignment, so that the entire value fits
// in here.
variant.size = cmp::max(variant.size, size);
Expand All @@ -970,7 +972,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
memory_index: [0].into(),
},
largest_niche,
abi,
backend_repr: abi,
align,
size,
max_repr_align,
Expand Down Expand Up @@ -1252,7 +1254,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
}
let mut layout_of_single_non_zst_field = None;
let sized = unsized_field.is_none();
let mut abi = Abi::Aggregate { sized };
let mut abi = BackendRepr::Memory { sized };

let optimize_abi = !repr.inhibit_newtype_abi_optimization();

Expand All @@ -1270,16 +1272,16 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
// Field fills the struct and it has a scalar or scalar pair ABI.
if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size
{
match field.abi {
match field.backend_repr {
// For plain scalars, or vectors of them, we can't unpack
// newtypes for `#[repr(C)]`, as that affects C ABIs.
Abi::Scalar(_) | Abi::Vector { .. } if optimize_abi => {
abi = field.abi;
BackendRepr::Scalar(_) | BackendRepr::Vector { .. } if optimize_abi => {
abi = field.backend_repr;
}
// But scalar pairs are Rust-specific and get
// treated as aggregates by C ABIs anyway.
Abi::ScalarPair(..) => {
abi = field.abi;
BackendRepr::ScalarPair(..) => {
abi = field.backend_repr;
}
_ => {}
}
Expand All @@ -1288,8 +1290,8 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {

// Two non-ZST fields, and they're both scalars.
(Some((i, a)), Some((j, b)), None) => {
match (a.abi, b.abi) {
(Abi::Scalar(a), Abi::Scalar(b)) => {
match (a.backend_repr, b.backend_repr) {
(BackendRepr::Scalar(a), BackendRepr::Scalar(b)) => {
// Order by the memory placement, not source order.
let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
((i, a), (j, b))
Expand All @@ -1315,7 +1317,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
{
// We can use `ScalarPair` only when it matches our
// already computed layout (including `#[repr(C)]`).
abi = pair.abi;
abi = pair.backend_repr;
}
}
_ => {}
Expand All @@ -1325,8 +1327,8 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
_ => {}
}
}
if fields.iter().any(|f| f.abi.is_uninhabited()) {
abi = Abi::Uninhabited;
if fields.iter().any(|f| f.is_uninhabited()) {
abi = BackendRepr::Uninhabited;
}

let unadjusted_abi_align = if repr.transparent() {
Expand All @@ -1344,7 +1346,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
Ok(LayoutData {
variants: Variants::Single { index: VariantIdx::new(0) },
fields: FieldsShape::Arbitrary { offsets, memory_index },
abi,
backend_repr: abi,
largest_niche,
align,
size,
Expand Down
12 changes: 6 additions & 6 deletions compiler/rustc_abi/src/layout/ty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,8 @@ impl<'a> Layout<'a> {
&self.0.0.variants
}

pub fn abi(self) -> Abi {
self.0.0.abi
pub fn backend_repr(self) -> BackendRepr {
self.0.0.backend_repr
}

pub fn largest_niche(self) -> Option<Niche> {
Expand Down Expand Up @@ -114,7 +114,7 @@ impl<'a> Layout<'a> {
pub fn is_pointer_like(self, data_layout: &TargetDataLayout) -> bool {
self.size() == data_layout.pointer_size
&& self.align().abi == data_layout.pointer_align.abi
&& matches!(self.abi(), Abi::Scalar(Scalar::Initialized { .. }))
&& matches!(self.backend_repr(), BackendRepr::Scalar(Scalar::Initialized { .. }))
}
}

Expand Down Expand Up @@ -196,9 +196,9 @@ impl<'a, Ty> TyAndLayout<'a, Ty> {
Ty: TyAbiInterface<'a, C>,
C: HasDataLayout,
{
match self.abi {
Abi::Scalar(scalar) => matches!(scalar.primitive(), Float(F32 | F64)),
Abi::Aggregate { .. } => {
match self.backend_repr {
BackendRepr::Scalar(scalar) => matches!(scalar.primitive(), Float(F32 | F64)),
BackendRepr::Memory { .. } => {
if self.fields.count() == 1 && self.fields.offset(0).bytes() == 0 {
self.field(cx, 0).is_single_fp_element(cx)
} else {
Expand Down
Loading

0 comments on commit 4add5e4

Please sign in to comment.