Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 2824db3

Browse files
committedApr 6, 2023
Auto merge of #109915 - scottmcm:layout-indexvec, r=oli-obk
Use `FieldIdx` in `FieldsShape` Finally got to the main motivating example from rust-lang/compiler-team#606 :)
2 parents 7f6edd3 + 21bb8ef commit 2824db3

File tree

7 files changed

+132
-100
lines changed

7 files changed

+132
-100
lines changed
 

‎compiler/rustc_abi/src/layout.rs

Lines changed: 35 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -8,19 +8,6 @@ use rand_xoshiro::Xoshiro128StarStar;
88

99
use tracing::debug;
1010

11-
// Invert a bijective mapping, i.e. `invert(map)[y] = x` if `map[x] = y`.
12-
// This is used to go between `memory_index` (source field order to memory order)
13-
// and `inverse_memory_index` (memory order to source field order).
14-
// See also `FieldsShape::Arbitrary::memory_index` for more details.
15-
// FIXME(eddyb) build a better abstraction for permutations, if possible.
16-
fn invert_mapping(map: &[u32]) -> Vec<u32> {
17-
let mut inverse = vec![0; map.len()];
18-
for i in 0..map.len() {
19-
inverse[map[i] as usize] = i as u32;
20-
}
21-
inverse
22-
}
23-
2411
pub trait LayoutCalculator {
2512
type TargetDataLayoutRef: Borrow<TargetDataLayout>;
2613

@@ -45,8 +32,8 @@ pub trait LayoutCalculator {
4532
LayoutS {
4633
variants: Variants::Single { index: FIRST_VARIANT },
4734
fields: FieldsShape::Arbitrary {
48-
offsets: vec![Size::ZERO, b_offset],
49-
memory_index: vec![0, 1],
35+
offsets: [Size::ZERO, b_offset].into(),
36+
memory_index: [0, 1].into(),
5037
},
5138
abi: Abi::ScalarPair(a, b),
5239
largest_niche,
@@ -58,18 +45,18 @@ pub trait LayoutCalculator {
5845
fn univariant(
5946
&self,
6047
dl: &TargetDataLayout,
61-
fields: &[Layout<'_>],
48+
fields: &IndexSlice<FieldIdx, Layout<'_>>,
6249
repr: &ReprOptions,
6350
kind: StructKind,
6451
) -> Option<LayoutS> {
6552
let pack = repr.pack;
6653
let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
67-
let mut inverse_memory_index: Vec<u32> = (0..fields.len() as u32).collect();
54+
let mut inverse_memory_index: IndexVec<u32, FieldIdx> = fields.indices().collect();
6855
let optimize = !repr.inhibit_struct_field_reordering_opt();
6956
if optimize {
7057
let end =
7158
if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
72-
let optimizing = &mut inverse_memory_index[..end];
59+
let optimizing = &mut inverse_memory_index.raw[..end];
7360
let effective_field_align = |layout: Layout<'_>| {
7461
if let Some(pack) = pack {
7562
// return the packed alignment in bytes
@@ -105,7 +92,7 @@ pub trait LayoutCalculator {
10592
// Place ZSTs first to avoid "interesting offsets",
10693
// especially with only one or two non-ZST fields.
10794
// Then place largest alignments first, largest niches within an alignment group last
108-
let f = fields[x as usize];
95+
let f = fields[x];
10996
let niche_size = f.largest_niche().map_or(0, |n| n.available(dl));
11097
(!f.0.is_zst(), cmp::Reverse(effective_field_align(f)), niche_size)
11198
});
@@ -117,7 +104,7 @@ pub trait LayoutCalculator {
117104
// And put the largest niche in an alignment group at the end
118105
// so it can be used as discriminant in jagged enums
119106
optimizing.sort_by_key(|&x| {
120-
let f = fields[x as usize];
107+
let f = fields[x];
121108
let niche_size = f.largest_niche().map_or(0, |n| n.available(dl));
122109
(effective_field_align(f), niche_size)
123110
});
@@ -135,7 +122,7 @@ pub trait LayoutCalculator {
135122
// At the bottom of this function, we invert `inverse_memory_index` to
136123
// produce `memory_index` (see `invert_mapping`).
137124
let mut sized = true;
138-
let mut offsets = vec![Size::ZERO; fields.len()];
125+
let mut offsets = IndexVec::from_elem(Size::ZERO, &fields);
139126
let mut offset = Size::ZERO;
140127
let mut largest_niche = None;
141128
let mut largest_niche_available = 0;
@@ -146,7 +133,7 @@ pub trait LayoutCalculator {
146133
offset = prefix_size.align_to(prefix_align);
147134
}
148135
for &i in &inverse_memory_index {
149-
let field = &fields[i as usize];
136+
let field = &fields[i];
150137
if !sized {
151138
self.delay_bug(&format!(
152139
"univariant: field #{} comes after unsized field",
@@ -168,7 +155,7 @@ pub trait LayoutCalculator {
168155
align = align.max(field_align);
169156

170157
debug!("univariant offset: {:?} field: {:#?}", offset, field);
171-
offsets[i as usize] = offset;
158+
offsets[i] = offset;
172159

173160
if let Some(mut niche) = field.largest_niche() {
174161
let available = niche.available(dl);
@@ -192,14 +179,18 @@ pub trait LayoutCalculator {
192179
// If field 5 has offset 0, offsets[0] is 5, and memory_index[5] should be 0.
193180
// Field 5 would be the first element, so memory_index is i:
194181
// Note: if we didn't optimize, it's already right.
195-
let memory_index =
196-
if optimize { invert_mapping(&inverse_memory_index) } else { inverse_memory_index };
182+
let memory_index = if optimize {
183+
inverse_memory_index.invert_bijective_mapping()
184+
} else {
185+
debug_assert!(inverse_memory_index.iter().copied().eq(fields.indices()));
186+
inverse_memory_index.into_iter().map(FieldIdx::as_u32).collect()
187+
};
197188
let size = min_size.align_to(align.abi);
198189
let mut abi = Abi::Aggregate { sized };
199190
// Unpack newtype ABIs and find scalar pairs.
200191
if sized && size.bytes() > 0 {
201192
// All other fields must be ZSTs.
202-
let mut non_zst_fields = fields.iter().enumerate().filter(|&(_, f)| !f.0.is_zst());
193+
let mut non_zst_fields = fields.iter_enumerated().filter(|&(_, f)| !f.0.is_zst());
203194

204195
match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
205196
// We have exactly one non-ZST field.
@@ -238,13 +229,13 @@ pub trait LayoutCalculator {
238229
let pair = self.scalar_pair(a, b);
239230
let pair_offsets = match pair.fields {
240231
FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
241-
assert_eq!(memory_index, &[0, 1]);
232+
assert_eq!(memory_index.raw, [0, 1]);
242233
offsets
243234
}
244235
_ => panic!(),
245236
};
246-
if offsets[i] == pair_offsets[0]
247-
&& offsets[j] == pair_offsets[1]
237+
if offsets[i] == pair_offsets[FieldIdx::from_usize(0)]
238+
&& offsets[j] == pair_offsets[FieldIdx::from_usize(1)]
248239
&& align == pair.align
249240
&& size == pair.size
250241
{
@@ -289,7 +280,7 @@ pub trait LayoutCalculator {
289280
fn layout_of_struct_or_enum(
290281
&self,
291282
repr: &ReprOptions,
292-
variants: &IndexSlice<VariantIdx, Vec<Layout<'_>>>,
283+
variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, Layout<'_>>>,
293284
is_enum: bool,
294285
is_unsafe_cell: bool,
295286
scalar_valid_range: (Bound<u128>, Bound<u128>),
@@ -312,7 +303,7 @@ pub trait LayoutCalculator {
312303
// but *not* an encoding of the discriminant (e.g., a tag value).
313304
// See issue #49298 for more details on the need to leave space
314305
// for non-ZST uninhabited data (mostly partial initialization).
315-
let absent = |fields: &[Layout<'_>]| {
306+
let absent = |fields: &IndexSlice<FieldIdx, Layout<'_>>| {
316307
let uninhabited = fields.iter().any(|f| f.abi().is_uninhabited());
317308
let is_zst = fields.iter().all(|f| f.0.is_zst());
318309
uninhabited && is_zst
@@ -510,7 +501,7 @@ pub trait LayoutCalculator {
510501
// It'll fit, but we need to make some adjustments.
511502
match layout.fields {
512503
FieldsShape::Arbitrary { ref mut offsets, .. } => {
513-
for (j, offset) in offsets.iter_mut().enumerate() {
504+
for (j, offset) in offsets.iter_enumerated_mut() {
514505
if !variants[i][j].0.is_zst() {
515506
*offset += this_offset;
516507
}
@@ -577,8 +568,8 @@ pub trait LayoutCalculator {
577568
variants: IndexVec::new(),
578569
},
579570
fields: FieldsShape::Arbitrary {
580-
offsets: vec![niche_offset],
581-
memory_index: vec![0],
571+
offsets: [niche_offset].into(),
572+
memory_index: [0].into(),
582573
},
583574
abi,
584575
largest_niche,
@@ -651,7 +642,8 @@ pub trait LayoutCalculator {
651642
st.variants = Variants::Single { index: i };
652643
// Find the first field we can't move later
653644
// to make room for a larger discriminant.
654-
for field in st.fields.index_by_increasing_offset().map(|j| &field_layouts[j]) {
645+
for field_idx in st.fields.index_by_increasing_offset() {
646+
let field = &field_layouts[FieldIdx::from_usize(field_idx)];
655647
if !field.0.is_zst() || field.align().abi.bytes() != 1 {
656648
start_align = start_align.min(field.align().abi);
657649
break;
@@ -802,13 +794,13 @@ pub trait LayoutCalculator {
802794
let pair = self.scalar_pair(tag, prim_scalar);
803795
let pair_offsets = match pair.fields {
804796
FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
805-
assert_eq!(memory_index, &[0, 1]);
797+
assert_eq!(memory_index.raw, [0, 1]);
806798
offsets
807799
}
808800
_ => panic!(),
809801
};
810-
if pair_offsets[0] == Size::ZERO
811-
&& pair_offsets[1] == *offset
802+
if pair_offsets[FieldIdx::from_u32(0)] == Size::ZERO
803+
&& pair_offsets[FieldIdx::from_u32(1)] == *offset
812804
&& align == pair.align
813805
&& size == pair.size
814806
{
@@ -844,7 +836,10 @@ pub trait LayoutCalculator {
844836
tag_field: 0,
845837
variants: IndexVec::new(),
846838
},
847-
fields: FieldsShape::Arbitrary { offsets: vec![Size::ZERO], memory_index: vec![0] },
839+
fields: FieldsShape::Arbitrary {
840+
offsets: [Size::ZERO].into(),
841+
memory_index: [0].into(),
842+
},
848843
largest_niche,
849844
abi,
850845
align,
@@ -883,7 +878,7 @@ pub trait LayoutCalculator {
883878
fn layout_of_union(
884879
&self,
885880
repr: &ReprOptions,
886-
variants: &IndexSlice<VariantIdx, Vec<Layout<'_>>>,
881+
variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, Layout<'_>>>,
887882
) -> Option<LayoutS> {
888883
let dl = self.current_data_layout();
889884
let dl = dl.borrow();

‎compiler/rustc_abi/src/lib.rs

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1108,7 +1108,7 @@ pub enum FieldsShape {
11081108
/// ordered to match the source definition order.
11091109
/// This vector does not go in increasing order.
11101110
// FIXME(eddyb) use small vector optimization for the common case.
1111-
offsets: Vec<Size>,
1111+
offsets: IndexVec<FieldIdx, Size>,
11121112

11131113
/// Maps source order field indices to memory order indices,
11141114
/// depending on how the fields were reordered (if at all).
@@ -1122,7 +1122,7 @@ pub enum FieldsShape {
11221122
///
11231123
// FIXME(eddyb) build a better abstraction for permutations, if possible.
11241124
// FIXME(camlorn) also consider small vector optimization here.
1125-
memory_index: Vec<u32>,
1125+
memory_index: IndexVec<FieldIdx, u32>,
11261126
},
11271127
}
11281128

@@ -1157,7 +1157,7 @@ impl FieldsShape {
11571157
assert!(i < count);
11581158
stride * i
11591159
}
1160-
FieldsShape::Arbitrary { ref offsets, .. } => offsets[i],
1160+
FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::from_usize(i)],
11611161
}
11621162
}
11631163

@@ -1168,28 +1168,27 @@ impl FieldsShape {
11681168
unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
11691169
}
11701170
FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1171-
FieldsShape::Arbitrary { ref memory_index, .. } => memory_index[i].try_into().unwrap(),
1171+
FieldsShape::Arbitrary { ref memory_index, .. } => {
1172+
memory_index[FieldIdx::from_usize(i)].try_into().unwrap()
1173+
}
11721174
}
11731175
}
11741176

11751177
/// Gets source indices of the fields by increasing offsets.
11761178
#[inline]
11771179
pub fn index_by_increasing_offset<'a>(&'a self) -> impl Iterator<Item = usize> + 'a {
11781180
let mut inverse_small = [0u8; 64];
1179-
let mut inverse_big = vec![];
1181+
let mut inverse_big = IndexVec::new();
11801182
let use_small = self.count() <= inverse_small.len();
11811183

11821184
// We have to write this logic twice in order to keep the array small.
11831185
if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
11841186
if use_small {
1185-
for i in 0..self.count() {
1186-
inverse_small[memory_index[i] as usize] = i as u8;
1187+
for (field_idx, &mem_idx) in memory_index.iter_enumerated() {
1188+
inverse_small[mem_idx as usize] = field_idx.as_u32() as u8;
11871189
}
11881190
} else {
1189-
inverse_big = vec![0; self.count()];
1190-
for i in 0..self.count() {
1191-
inverse_big[memory_index[i] as usize] = i as u32;
1192-
}
1191+
inverse_big = memory_index.invert_bijective_mapping();
11931192
}
11941193
}
11951194

@@ -1199,7 +1198,7 @@ impl FieldsShape {
11991198
if use_small {
12001199
inverse_small[i] as usize
12011200
} else {
1202-
inverse_big[i] as usize
1201+
inverse_big[i as u32].as_usize()
12031202
}
12041203
}
12051204
})

‎compiler/rustc_index/src/vec.rs

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ pub trait Idx: Copy + 'static + Eq + PartialEq + Debug + Hash {
2424
}
2525

2626
#[inline]
27+
#[must_use = "Use `increment_by` if you wanted to update the index in-place"]
2728
fn plus(self, amount: usize) -> Self {
2829
Self::new(self.index() + amount)
2930
}
@@ -294,6 +295,11 @@ impl<I: Idx, T: Clone> ToOwned for IndexSlice<I, T> {
294295
}
295296

296297
impl<I: Idx, T> IndexSlice<I, T> {
298+
#[inline]
299+
pub fn empty() -> &'static Self {
300+
Default::default()
301+
}
302+
297303
#[inline]
298304
pub fn from_raw(raw: &[T]) -> &Self {
299305
let ptr: *const [T] = raw;
@@ -409,6 +415,36 @@ impl<I: Idx, T> IndexSlice<I, T> {
409415
}
410416
}
411417

418+
impl<I: Idx, J: Idx> IndexSlice<I, J> {
419+
/// Invert a bijective mapping, i.e. `invert(map)[y] = x` if `map[x] = y`,
420+
/// assuming the values in `self` are a permutation of `0..self.len()`.
421+
///
422+
/// This is used to go between `memory_index` (source field order to memory order)
423+
/// and `inverse_memory_index` (memory order to source field order).
424+
/// See also `FieldsShape::Arbitrary::memory_index` for more details.
425+
// FIXME(eddyb) build a better abstraction for permutations, if possible.
426+
pub fn invert_bijective_mapping(&self) -> IndexVec<J, I> {
427+
debug_assert_eq!(
428+
self.iter().map(|x| x.index() as u128).sum::<u128>(),
429+
(0..self.len() as u128).sum::<u128>(),
430+
"The values aren't 0..N in input {self:?}",
431+
);
432+
433+
let mut inverse = IndexVec::from_elem_n(Idx::new(0), self.len());
434+
for (i1, &i2) in self.iter_enumerated() {
435+
inverse[i2] = i1;
436+
}
437+
438+
debug_assert_eq!(
439+
inverse.iter().map(|x| x.index() as u128).sum::<u128>(),
440+
(0..inverse.len() as u128).sum::<u128>(),
441+
"The values aren't 0..N in result {self:?}",
442+
);
443+
444+
inverse
445+
}
446+
}
447+
412448
/// `IndexVec` is often used as a map, so it provides some map-like APIs.
413449
impl<I: Idx, T> IndexVec<I, Option<T>> {
414450
#[inline]
@@ -513,6 +549,13 @@ impl<I: Idx, T> FromIterator<T> for IndexVec<I, T> {
513549
}
514550
}
515551

552+
impl<I: Idx, T, const N: usize> From<[T; N]> for IndexVec<I, T> {
553+
#[inline]
554+
fn from(array: [T; N]) -> Self {
555+
IndexVec::from_raw(array.into())
556+
}
557+
}
558+
516559
impl<I: Idx, T> IntoIterator for IndexVec<I, T> {
517560
type Item = T;
518561
type IntoIter = vec::IntoIter<T>;

‎compiler/rustc_middle/src/ty/layout.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ use crate::ty::{self, ReprOptions, Ty, TyCtxt, TypeVisitableExt};
55
use rustc_errors::{DiagnosticBuilder, Handler, IntoDiagnostic};
66
use rustc_hir as hir;
77
use rustc_hir::def_id::DefId;
8+
use rustc_index::vec::IndexVec;
89
use rustc_session::config::OptLevel;
910
use rustc_span::symbol::{sym, Symbol};
1011
use rustc_span::{Span, DUMMY_SP};
@@ -635,7 +636,7 @@ where
635636
variants: Variants::Single { index: variant_index },
636637
fields: match NonZeroUsize::new(fields) {
637638
Some(fields) => FieldsShape::Union(fields),
638-
None => FieldsShape::Arbitrary { offsets: vec![], memory_index: vec![] },
639+
None => FieldsShape::Arbitrary { offsets: IndexVec::new(), memory_index: IndexVec::new() },
639640
},
640641
abi: Abi::Uninhabited,
641642
largest_niche: None,

‎compiler/rustc_mir_transform/src/generator.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -287,7 +287,7 @@ impl<'tcx> TransformVisitor<'tcx> {
287287
statements.push(Statement {
288288
kind: StatementKind::Assign(Box::new((
289289
Place::return_place(),
290-
Rvalue::Aggregate(Box::new(kind), IndexVec::from_iter([val])),
290+
Rvalue::Aggregate(Box::new(kind), [val].into()),
291291
))),
292292
source_info,
293293
});

‎compiler/rustc_ty_utils/src/layout.rs

Lines changed: 39 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
use hir::def_id::DefId;
22
use rustc_hir as hir;
33
use rustc_index::bit_set::BitSet;
4-
use rustc_index::vec::IndexVec;
4+
use rustc_index::vec::{IndexSlice, IndexVec};
55
use rustc_middle::mir::{GeneratorLayout, GeneratorSavedLocal};
66
use rustc_middle::ty::layout::{
77
IntegerExt, LayoutCx, LayoutError, LayoutOf, TyAndLayout, MAX_SIMD_LANES,
@@ -62,23 +62,10 @@ fn layout_of<'tcx>(
6262
Ok(layout)
6363
}
6464

65-
// Invert a bijective mapping, i.e. `invert(map)[y] = x` if `map[x] = y`.
66-
// This is used to go between `memory_index` (source field order to memory order)
67-
// and `inverse_memory_index` (memory order to source field order).
68-
// See also `FieldsShape::Arbitrary::memory_index` for more details.
69-
// FIXME(eddyb) build a better abstraction for permutations, if possible.
70-
fn invert_mapping(map: &[u32]) -> Vec<u32> {
71-
let mut inverse = vec![0; map.len()];
72-
for i in 0..map.len() {
73-
inverse[map[i] as usize] = i as u32;
74-
}
75-
inverse
76-
}
77-
7865
fn univariant_uninterned<'tcx>(
7966
cx: &LayoutCx<'tcx, TyCtxt<'tcx>>,
8067
ty: Ty<'tcx>,
81-
fields: &[Layout<'_>],
68+
fields: &IndexSlice<FieldIdx, Layout<'_>>,
8269
repr: &ReprOptions,
8370
kind: StructKind,
8471
) -> Result<LayoutS, LayoutError<'tcx>> {
@@ -106,7 +93,7 @@ fn layout_of_uncached<'tcx>(
10693
};
10794
let scalar = |value: Primitive| tcx.mk_layout(LayoutS::scalar(cx, scalar_unit(value)));
10895

109-
let univariant = |fields: &[Layout<'_>], repr: &ReprOptions, kind| {
96+
let univariant = |fields: &IndexSlice<FieldIdx, Layout<'_>>, repr: &ReprOptions, kind| {
11097
Ok(tcx.mk_layout(univariant_uninterned(cx, ty, fields, repr, kind)?))
11198
};
11299
debug_assert!(!ty.has_non_region_infer());
@@ -256,12 +243,14 @@ fn layout_of_uncached<'tcx>(
256243
}),
257244

258245
// Odd unit types.
259-
ty::FnDef(..) => univariant(&[], &ReprOptions::default(), StructKind::AlwaysSized)?,
246+
ty::FnDef(..) => {
247+
univariant(IndexSlice::empty(), &ReprOptions::default(), StructKind::AlwaysSized)?
248+
}
260249
ty::Dynamic(_, _, ty::Dyn) | ty::Foreign(..) => {
261250
let mut unit = univariant_uninterned(
262251
cx,
263252
ty,
264-
&[],
253+
IndexSlice::empty(),
265254
&ReprOptions::default(),
266255
StructKind::AlwaysSized,
267256
)?;
@@ -277,7 +266,7 @@ fn layout_of_uncached<'tcx>(
277266
ty::Closure(_, ref substs) => {
278267
let tys = substs.as_closure().upvar_tys();
279268
univariant(
280-
&tys.map(|ty| Ok(cx.layout_of(ty)?.layout)).collect::<Result<Vec<_>, _>>()?,
269+
&tys.map(|ty| Ok(cx.layout_of(ty)?.layout)).try_collect::<IndexVec<_, _>>()?,
281270
&ReprOptions::default(),
282271
StructKind::AlwaysSized,
283272
)?
@@ -288,7 +277,7 @@ fn layout_of_uncached<'tcx>(
288277
if tys.len() == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized };
289278

290279
univariant(
291-
&tys.iter().map(|k| Ok(cx.layout_of(k)?.layout)).collect::<Result<Vec<_>, _>>()?,
280+
&tys.iter().map(|k| Ok(cx.layout_of(k)?.layout)).try_collect::<IndexVec<_, _>>()?,
292281
&ReprOptions::default(),
293282
kind,
294283
)?
@@ -393,7 +382,7 @@ fn layout_of_uncached<'tcx>(
393382

394383
// Compute the placement of the vector fields:
395384
let fields = if is_array {
396-
FieldsShape::Arbitrary { offsets: vec![Size::ZERO], memory_index: vec![0] }
385+
FieldsShape::Arbitrary { offsets: [Size::ZERO].into(), memory_index: [0].into() }
397386
} else {
398387
FieldsShape::Array { stride: e_ly.size, count: e_len }
399388
};
@@ -418,9 +407,9 @@ fn layout_of_uncached<'tcx>(
418407
v.fields
419408
.iter()
420409
.map(|field| Ok(cx.layout_of(field.ty(tcx, substs))?.layout))
421-
.collect::<Result<Vec<_>, _>>()
410+
.try_collect::<IndexVec<_, _>>()
422411
})
423-
.collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
412+
.try_collect::<IndexVec<VariantIdx, _>>()?;
424413

425414
if def.is_union() {
426415
if def.repr().pack.is_some() && def.repr().align.is_some() {
@@ -492,8 +481,7 @@ fn layout_of_uncached<'tcx>(
492481
enum SavedLocalEligibility {
493482
Unassigned,
494483
Assigned(VariantIdx),
495-
// FIXME: Use newtype_index so we aren't wasting bytes
496-
Ineligible(Option<u32>),
484+
Ineligible(Option<FieldIdx>),
497485
}
498486

499487
// When laying out generators, we divide our saved local fields into two
@@ -605,7 +593,7 @@ fn generator_saved_local_eligibility(
605593
// Write down the order of our locals that will be promoted to the prefix.
606594
{
607595
for (idx, local) in ineligible_locals.iter().enumerate() {
608-
assignments[local] = Ineligible(Some(idx as u32));
596+
assignments[local] = Ineligible(Some(FieldIdx::from_usize(idx)));
609597
}
610598
}
611599
debug!("generator saved local assignments: {:?}", assignments);
@@ -654,7 +642,7 @@ fn generator_layout<'tcx>(
654642
.map(|ty| Ok(cx.layout_of(ty)?.layout))
655643
.chain(iter::once(Ok(tag_layout)))
656644
.chain(promoted_layouts)
657-
.collect::<Result<Vec<_>, _>>()?;
645+
.try_collect::<IndexVec<_, _>>()?;
658646
let prefix = univariant_uninterned(
659647
cx,
660648
ty,
@@ -672,26 +660,28 @@ fn generator_layout<'tcx>(
672660
debug!("prefix = {:#?}", prefix);
673661
let (outer_fields, promoted_offsets, promoted_memory_index) = match prefix.fields {
674662
FieldsShape::Arbitrary { mut offsets, memory_index } => {
675-
let mut inverse_memory_index = invert_mapping(&memory_index);
663+
let mut inverse_memory_index = memory_index.invert_bijective_mapping();
676664

677665
// "a" (`0..b_start`) and "b" (`b_start..`) correspond to
678666
// "outer" and "promoted" fields respectively.
679-
let b_start = (tag_index + 1) as u32;
680-
let offsets_b = offsets.split_off(b_start as usize);
667+
let b_start = FieldIdx::from_usize(tag_index + 1);
668+
let offsets_b = IndexVec::from_raw(offsets.raw.split_off(b_start.as_usize()));
681669
let offsets_a = offsets;
682670

683671
// Disentangle the "a" and "b" components of `inverse_memory_index`
684672
// by preserving the order but keeping only one disjoint "half" each.
685673
// FIXME(eddyb) build a better abstraction for permutations, if possible.
686-
let inverse_memory_index_b: Vec<_> =
687-
inverse_memory_index.iter().filter_map(|&i| i.checked_sub(b_start)).collect();
688-
inverse_memory_index.retain(|&i| i < b_start);
674+
let inverse_memory_index_b: IndexVec<u32, FieldIdx> = inverse_memory_index
675+
.iter()
676+
.filter_map(|&i| i.as_u32().checked_sub(b_start.as_u32()).map(FieldIdx::from_u32))
677+
.collect();
678+
inverse_memory_index.raw.retain(|&i| i < b_start);
689679
let inverse_memory_index_a = inverse_memory_index;
690680

691681
// Since `inverse_memory_index_{a,b}` each only refer to their
692682
// respective fields, they can be safely inverted
693-
let memory_index_a = invert_mapping(&inverse_memory_index_a);
694-
let memory_index_b = invert_mapping(&inverse_memory_index_b);
683+
let memory_index_a = inverse_memory_index_a.invert_bijective_mapping();
684+
let memory_index_b = inverse_memory_index_b.invert_bijective_mapping();
695685

696686
let outer_fields =
697687
FieldsShape::Arbitrary { offsets: offsets_a, memory_index: memory_index_a };
@@ -722,7 +712,7 @@ fn generator_layout<'tcx>(
722712
ty,
723713
&variant_only_tys
724714
.map(|ty| Ok(cx.layout_of(ty)?.layout))
725-
.collect::<Result<Vec<_>, _>>()?,
715+
.try_collect::<IndexVec<_, _>>()?,
726716
&ReprOptions::default(),
727717
StructKind::Prefixed(prefix_size, prefix_align.abi),
728718
)?;
@@ -741,13 +731,16 @@ fn generator_layout<'tcx>(
741731
// promoted fields were being used, but leave the elements not in the
742732
// subset as `INVALID_FIELD_IDX`, which we can filter out later to
743733
// obtain a valid (bijective) mapping.
744-
const INVALID_FIELD_IDX: u32 = !0;
745-
let mut combined_inverse_memory_index =
746-
vec![INVALID_FIELD_IDX; promoted_memory_index.len() + memory_index.len()];
734+
const INVALID_FIELD_IDX: FieldIdx = FieldIdx::MAX;
735+
debug_assert!(variant_fields.next_index() <= INVALID_FIELD_IDX);
736+
737+
let mut combined_inverse_memory_index = IndexVec::from_elem_n(
738+
INVALID_FIELD_IDX,
739+
promoted_memory_index.len() + memory_index.len(),
740+
);
747741
let mut offsets_and_memory_index = iter::zip(offsets, memory_index);
748742
let combined_offsets = variant_fields
749-
.iter()
750-
.enumerate()
743+
.iter_enumerated()
751744
.map(|(i, local)| {
752745
let (offset, memory_index) = match assignments[*local] {
753746
Unassigned => bug!(),
@@ -756,19 +749,19 @@ fn generator_layout<'tcx>(
756749
(offset, promoted_memory_index.len() as u32 + memory_index)
757750
}
758751
Ineligible(field_idx) => {
759-
let field_idx = field_idx.unwrap() as usize;
752+
let field_idx = field_idx.unwrap();
760753
(promoted_offsets[field_idx], promoted_memory_index[field_idx])
761754
}
762755
};
763-
combined_inverse_memory_index[memory_index as usize] = i as u32;
756+
combined_inverse_memory_index[memory_index] = i;
764757
offset
765758
})
766759
.collect();
767760

768761
// Remove the unused slots and invert the mapping to obtain the
769762
// combined `memory_index` (also see previous comment).
770-
combined_inverse_memory_index.retain(|&i| i != INVALID_FIELD_IDX);
771-
let combined_memory_index = invert_mapping(&combined_inverse_memory_index);
763+
combined_inverse_memory_index.raw.retain(|&i| i != INVALID_FIELD_IDX);
764+
let combined_memory_index = combined_inverse_memory_index.invert_bijective_mapping();
772765

773766
variant.fields = FieldsShape::Arbitrary {
774767
offsets: combined_offsets,
@@ -779,7 +772,7 @@ fn generator_layout<'tcx>(
779772
align = align.max(variant.align);
780773
Ok(variant)
781774
})
782-
.collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
775+
.try_collect::<IndexVec<VariantIdx, _>>()?;
783776

784777
size = size.align_to(align.abi);
785778

‎compiler/rustc_ty_utils/src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
//! This API is completely unstable and subject to change.
66
77
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
8+
#![feature(iterator_try_collect)]
89
#![feature(let_chains)]
910
#![feature(never_type)]
1011
#![feature(box_patterns)]

0 commit comments

Comments
 (0)
Please sign in to comment.