Skip to content

Commit a8c8dea

Browse files
committed
Remplace ConstValue::Slice by ConstValue::ScalarPair.
1 parent d3f8661 commit a8c8dea

File tree

74 files changed

+417
-442
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

74 files changed

+417
-442
lines changed

compiler/rustc_codegen_cranelift/src/constant.rs

+87-94
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,91 @@ pub(crate) fn codegen_constant_operand<'tcx>(
103103
codegen_const_value(fx, const_val, ty)
104104
}
105105

106+
pub(crate) fn codegen_const_scalar<'tcx>(
107+
fx: &mut FunctionCx<'_, '_, 'tcx>,
108+
scalar: Scalar,
109+
layout: TyAndLayout<'tcx>,
110+
) -> CValue<'tcx> {
111+
match scalar {
112+
Scalar::Int(int) => {
113+
if fx.clif_type(layout.ty).is_some() {
114+
return CValue::const_val(fx, layout, int);
115+
} else {
116+
let raw_val = int.to_bits(int.size()).unwrap();
117+
let val = match int.size().bytes() {
118+
1 => fx.bcx.ins().iconst(types::I8, raw_val as i64),
119+
2 => fx.bcx.ins().iconst(types::I16, raw_val as i64),
120+
4 => fx.bcx.ins().iconst(types::I32, raw_val as i64),
121+
8 => fx.bcx.ins().iconst(types::I64, raw_val as i64),
122+
16 => {
123+
let lsb = fx.bcx.ins().iconst(types::I64, raw_val as u64 as i64);
124+
let msb = fx.bcx.ins().iconst(types::I64, (raw_val >> 64) as u64 as i64);
125+
fx.bcx.ins().iconcat(lsb, msb)
126+
}
127+
_ => unreachable!(),
128+
};
129+
130+
// FIXME avoid this extra copy to the stack and directly write to the final
131+
// destination
132+
let place = CPlace::new_stack_slot(fx, layout);
133+
place.to_ptr().store(fx, val, MemFlags::trusted());
134+
place.to_cvalue(fx)
135+
}
136+
}
137+
Scalar::Ptr(ptr, _size) => {
138+
let (alloc_id, offset) = ptr.into_parts(); // we know the `offset` is relative
139+
let base_addr = match fx.tcx.global_alloc(alloc_id) {
140+
GlobalAlloc::Memory(alloc) => {
141+
let data_id = data_id_for_alloc_id(
142+
&mut fx.constants_cx,
143+
fx.module,
144+
alloc_id,
145+
alloc.inner().mutability,
146+
);
147+
let local_data_id = fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
148+
if fx.clif_comments.enabled() {
149+
fx.add_comment(local_data_id, format!("{:?}", alloc_id));
150+
}
151+
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
152+
}
153+
GlobalAlloc::Function(instance) => {
154+
let func_id = crate::abi::import_function(fx.tcx, fx.module, instance);
155+
let local_func_id = fx.module.declare_func_in_func(func_id, &mut fx.bcx.func);
156+
fx.bcx.ins().func_addr(fx.pointer_type, local_func_id)
157+
}
158+
GlobalAlloc::VTable(ty, trait_ref) => {
159+
let alloc_id = fx.tcx.vtable_allocation((ty, trait_ref));
160+
let alloc = fx.tcx.global_alloc(alloc_id).unwrap_memory();
161+
// FIXME: factor this common code with the `Memory` arm into a function?
162+
let data_id = data_id_for_alloc_id(
163+
&mut fx.constants_cx,
164+
fx.module,
165+
alloc_id,
166+
alloc.inner().mutability,
167+
);
168+
let local_data_id = fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
169+
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
170+
}
171+
GlobalAlloc::Static(def_id) => {
172+
assert!(fx.tcx.is_static(def_id));
173+
let data_id = data_id_for_static(fx.tcx, fx.module, def_id, false);
174+
let local_data_id = fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
175+
if fx.clif_comments.enabled() {
176+
fx.add_comment(local_data_id, format!("{:?}", def_id));
177+
}
178+
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
179+
}
180+
};
181+
let val = if offset.bytes() != 0 {
182+
fx.bcx.ins().iadd_imm(base_addr, i64::try_from(offset.bytes()).unwrap())
183+
} else {
184+
base_addr
185+
};
186+
CValue::by_val(val, layout)
187+
}
188+
}
189+
}
190+
106191
pub(crate) fn codegen_const_value<'tcx>(
107192
fx: &mut FunctionCx<'_, '_, 'tcx>,
108193
const_val: ConstValue<'tcx>,
@@ -117,105 +202,13 @@ pub(crate) fn codegen_const_value<'tcx>(
117202

118203
match *const_val.kind() {
119204
ConstValueKind::ZeroSized => unreachable!(), // we already handled ZST above
120-
ConstValueKind::Scalar(x) => match x {
121-
Scalar::Int(int) => {
122-
if fx.clif_type(layout.ty).is_some() {
123-
return CValue::const_val(fx, layout, int);
124-
} else {
125-
let raw_val = int.to_bits(int.size()).unwrap();
126-
let val = match int.size().bytes() {
127-
1 => fx.bcx.ins().iconst(types::I8, raw_val as i64),
128-
2 => fx.bcx.ins().iconst(types::I16, raw_val as i64),
129-
4 => fx.bcx.ins().iconst(types::I32, raw_val as i64),
130-
8 => fx.bcx.ins().iconst(types::I64, raw_val as i64),
131-
16 => {
132-
let lsb = fx.bcx.ins().iconst(types::I64, raw_val as u64 as i64);
133-
let msb =
134-
fx.bcx.ins().iconst(types::I64, (raw_val >> 64) as u64 as i64);
135-
fx.bcx.ins().iconcat(lsb, msb)
136-
}
137-
_ => unreachable!(),
138-
};
139-
140-
// FIXME avoid this extra copy to the stack and directly write to the final
141-
// destination
142-
let place = CPlace::new_stack_slot(fx, layout);
143-
place.to_ptr().store(fx, val, MemFlags::trusted());
144-
place.to_cvalue(fx)
145-
}
146-
}
147-
Scalar::Ptr(ptr, _size) => {
148-
let (alloc_id, offset) = ptr.into_parts(); // we know the `offset` is relative
149-
let base_addr = match fx.tcx.global_alloc(alloc_id) {
150-
GlobalAlloc::Memory(alloc) => {
151-
let data_id = data_id_for_alloc_id(
152-
&mut fx.constants_cx,
153-
fx.module,
154-
alloc_id,
155-
alloc.inner().mutability,
156-
);
157-
let local_data_id =
158-
fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
159-
if fx.clif_comments.enabled() {
160-
fx.add_comment(local_data_id, format!("{:?}", alloc_id));
161-
}
162-
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
163-
}
164-
GlobalAlloc::Function(instance) => {
165-
let func_id = crate::abi::import_function(fx.tcx, fx.module, instance);
166-
let local_func_id =
167-
fx.module.declare_func_in_func(func_id, &mut fx.bcx.func);
168-
fx.bcx.ins().func_addr(fx.pointer_type, local_func_id)
169-
}
170-
GlobalAlloc::VTable(ty, trait_ref) => {
171-
let alloc_id = fx.tcx.vtable_allocation((ty, trait_ref));
172-
let alloc = fx.tcx.global_alloc(alloc_id).unwrap_memory();
173-
// FIXME: factor this common code with the `Memory` arm into a function?
174-
let data_id = data_id_for_alloc_id(
175-
&mut fx.constants_cx,
176-
fx.module,
177-
alloc_id,
178-
alloc.inner().mutability,
179-
);
180-
let local_data_id =
181-
fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
182-
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
183-
}
184-
GlobalAlloc::Static(def_id) => {
185-
assert!(fx.tcx.is_static(def_id));
186-
let data_id = data_id_for_static(fx.tcx, fx.module, def_id, false);
187-
let local_data_id =
188-
fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
189-
if fx.clif_comments.enabled() {
190-
fx.add_comment(local_data_id, format!("{:?}", def_id));
191-
}
192-
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
193-
}
194-
};
195-
let val = if offset.bytes() != 0 {
196-
fx.bcx.ins().iadd_imm(base_addr, i64::try_from(offset.bytes()).unwrap())
197-
} else {
198-
base_addr
199-
};
200-
CValue::by_val(val, layout)
201-
}
202-
},
205+
ConstValueKind::Scalar(x) => codegen_const_scalar(fx, x, layout),
203206
ConstValueKind::Indirect { alloc_id, offset } => CValue::by_ref(
204207
pointer_for_allocation(fx, alloc_id)
205208
.offset_i64(fx, i64::try_from(offset.bytes()).unwrap()),
206209
layout,
207210
),
208-
ConstValueKind::Slice { data, start, end } => {
209-
let alloc_id = fx.tcx.reserve_and_set_memory_alloc(data);
210-
let ptr = pointer_for_allocation(fx, alloc_id)
211-
.offset_i64(fx, i64::try_from(start).unwrap())
212-
.get_addr(fx);
213-
let len = fx
214-
.bcx
215-
.ins()
216-
.iconst(fx.pointer_type, i64::try_from(end.checked_sub(start).unwrap()).unwrap());
217-
CValue::by_val_pair(ptr, len, layout)
218-
}
211+
ConstValueKind::ScalarPair(..) => todo!(),
219212
}
220213
}
221214

compiler/rustc_codegen_ssa/src/mir/operand.rs

+9-12
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use crate::traits::*;
77
use crate::MemFlags;
88

99
use rustc_middle::mir;
10-
use rustc_middle::mir::interpret::{alloc_range, ConstValue, ConstValueKind, Pointer, Scalar};
10+
use rustc_middle::mir::interpret::{alloc_range, ConstValue, ConstValueKind};
1111
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
1212
use rustc_middle::ty::Ty;
1313
use rustc_target::abi::{self, Abi, Align, Size};
@@ -100,23 +100,20 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
100100
OperandValue::Immediate(llval)
101101
}
102102
ConstValueKind::ZeroSized => return OperandRef::zero_sized(layout),
103-
ConstValueKind::Slice { data, start, end } => {
104-
let Abi::ScalarPair(a_scalar, _) = layout.abi else {
105-
bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
103+
ConstValueKind::ScalarPair(a, b) => {
104+
let Abi::ScalarPair(a_scalar, b_scalar) = layout.abi else {
105+
bug!("from_const: invalid ByVal layout: {:#?}", layout);
106106
};
107-
let a = Scalar::from_pointer(
108-
Pointer::new(
109-
bx.tcx().reserve_and_set_memory_alloc(data),
110-
Size::from_bytes(start),
111-
),
112-
&bx.tcx(),
113-
);
114107
let a_llval = bx.scalar_to_backend(
115108
a,
116109
a_scalar,
117110
bx.scalar_pair_element_backend_type(layout, 0, true),
118111
);
119-
let b_llval = bx.const_usize((end - start) as u64);
112+
let b_llval = bx.scalar_to_backend(
113+
b,
114+
b_scalar,
115+
bx.scalar_pair_element_backend_type(layout, 1, true),
116+
);
120117
OperandValue::Pair(a_llval, b_llval)
121118
}
122119
ConstValueKind::Indirect { alloc_id, offset } => {

compiler/rustc_const_eval/src/const_eval/eval_queries.rs

+1-16
Original file line numberDiff line numberDiff line change
@@ -151,22 +151,7 @@ pub(super) fn op_to_const<'tcx>(
151151
// see comment on `let force_as_immediate` above
152152
Right(imm) => match *imm {
153153
Immediate::Scalar(x) => ConstValue::from_scalar(tcx, x),
154-
Immediate::ScalarPair(a, b) => {
155-
debug!("ScalarPair(a: {:?}, b: {:?})", a, b);
156-
// FIXME: assert that this has an appropriate type.
157-
// Currently we actually get here for non-[u8] slices during valtree construction!
158-
let msg = "`op_to_const` on an immediate scalar pair must only be used on slice references to actually allocated memory";
159-
// We know `offset` is relative to the allocation, so we can use `into_parts`.
160-
// We use `ConstValueKind::Slice` so that we don't have to generate an allocation for
161-
// `ConstValueKind::Indirect` here.
162-
let (alloc_id, offset) = a.to_pointer(ecx).expect(msg).into_parts();
163-
let alloc_id = alloc_id.expect(msg);
164-
let data = ecx.tcx.global_alloc(alloc_id).unwrap_memory();
165-
let start = offset.bytes_usize();
166-
let len = b.to_target_usize(ecx).expect(msg);
167-
let len: usize = len.try_into().unwrap();
168-
ConstValue::from_slice(tcx, data, start, start + len)
169-
}
154+
Immediate::ScalarPair(a, b) => ConstValue::from_pair(tcx, a, b),
170155
Immediate::Uninit => bug!("`Uninit` is not a valid value for {}", op.layout.ty),
171156
},
172157
}

compiler/rustc_const_eval/src/interpret/intrinsics.rs

+7-8
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,7 @@
55
use rustc_hir::def_id::DefId;
66
use rustc_middle::mir::{
77
self,
8-
interpret::{
9-
Allocation, ConstAllocation, ConstValue, GlobalId, InterpResult, PointerArithmetic, Scalar,
10-
},
8+
interpret::{AllocId, ConstValue, GlobalId, InterpResult, PointerArithmetic, Scalar},
119
BinOp, NonDivergingIntrinsic,
1210
};
1311
use rustc_middle::ty;
@@ -44,10 +42,11 @@ fn numeric_intrinsic<Prov>(name: Symbol, bits: u128, kind: Primitive) -> Scalar<
4442
}
4543

4644
/// Directly returns an `Allocation` containing an absolute path representation of the given type.
47-
pub(crate) fn alloc_type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> ConstAllocation<'tcx> {
45+
pub(crate) fn alloc_type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> (AllocId, usize) {
4846
let path = crate::util::type_name(tcx, ty);
49-
let alloc = Allocation::from_bytes_byte_aligned_immutable(path.into_bytes());
50-
tcx.mk_const_alloc(alloc)
47+
let bytes = path.into_bytes();
48+
let len = bytes.len();
49+
(tcx.allocate_bytes(bytes), len)
5150
}
5251

5352
/// The logic for all nullary intrinsics is implemented here. These intrinsics don't get evaluated
@@ -63,8 +62,8 @@ pub(crate) fn eval_nullary_intrinsic<'tcx>(
6362
Ok(match name {
6463
sym::type_name => {
6564
ensure_monomorphic_enough(tcx, tp_ty)?;
66-
let alloc = alloc_type_name(tcx, tp_ty);
67-
ConstValue::from_slice(tcx, alloc, 0, alloc.inner().len())
65+
let (alloc, len) = alloc_type_name(tcx, tp_ty);
66+
ConstValue::from_slice(tcx, Pointer::from(alloc), len)
6867
}
6968
sym::needs_drop => {
7069
ensure_monomorphic_enough(tcx, tp_ty)?;

compiler/rustc_const_eval/src/interpret/operand.rs

+3-13
Original file line numberDiff line numberDiff line change
@@ -771,20 +771,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
771771
Operand::Indirect(MemPlace::from_ptr(ptr.into()))
772772
}
773773
ConstValueKind::Scalar(x) => Operand::Immediate(adjust_scalar(x)?.into()),
774-
ConstValueKind::ZeroSized => Operand::Immediate(Immediate::Uninit),
775-
ConstValueKind::Slice { data, start, end } => {
776-
// We rely on mutability being set correctly in `data` to prevent writes
777-
// where none should happen.
778-
let ptr = Pointer::new(
779-
self.tcx.reserve_and_set_memory_alloc(data),
780-
Size::from_bytes(start), // offset: `start`
781-
);
782-
Operand::Immediate(Immediate::new_slice(
783-
Scalar::from_pointer(self.global_base_pointer(ptr)?, &*self.tcx),
784-
u64::try_from(end.checked_sub(start).unwrap()).unwrap(), // len: `end - start`
785-
self,
786-
))
774+
ConstValueKind::ScalarPair(a, b) => {
775+
Operand::Immediate(Immediate::ScalarPair(adjust_scalar(a)?, adjust_scalar(b)?))
787776
}
777+
ConstValueKind::ZeroSized => Operand::Immediate(Immediate::Uninit),
788778
};
789779
Ok(OpTy { op, layout, align: Some(layout.align.abi) })
790780
}

0 commit comments

Comments
 (0)