@@ -15,11 +15,10 @@ use rustc_middle::ty::{self, GenericArgsRef, Ty};
15
15
use rustc_middle:: { bug, span_bug} ;
16
16
use rustc_span:: { Span , Symbol , sym} ;
17
17
use rustc_symbol_mangling:: mangle_internal_symbol;
18
- use rustc_target:: callconv:: { FnAbi , PassMode } ;
19
18
use rustc_target:: spec:: { HasTargetSpec , PanicStrategy } ;
20
19
use tracing:: debug;
21
20
22
- use crate :: abi:: { FnAbiLlvmExt , LlvmType } ;
21
+ use crate :: abi:: FnAbiLlvmExt ;
23
22
use crate :: builder:: Builder ;
24
23
use crate :: context:: CodegenCx ;
25
24
use crate :: llvm:: { self , Metadata } ;
@@ -165,7 +164,6 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
165
164
fn codegen_intrinsic_call (
166
165
& mut self ,
167
166
instance : ty:: Instance < ' tcx > ,
168
- fn_abi : & FnAbi < ' tcx , Ty < ' tcx > > ,
169
167
args : & [ OperandRef < ' tcx , & ' ll Value > ] ,
170
168
result : PlaceRef < ' tcx , & ' ll Value > ,
171
169
span : Span ,
@@ -263,7 +261,7 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
263
261
self . call_intrinsic ( "llvm.va_copy" , & [ args[ 0 ] . immediate ( ) , args[ 1 ] . immediate ( ) ] )
264
262
}
265
263
sym:: va_arg => {
266
- match fn_abi . ret . layout . backend_repr {
264
+ match result . layout . backend_repr {
267
265
BackendRepr :: Scalar ( scalar) => {
268
266
match scalar. primitive ( ) {
269
267
Primitive :: Int ( ..) => {
@@ -298,18 +296,12 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
298
296
}
299
297
300
298
sym:: volatile_load | sym:: unaligned_volatile_load => {
301
- let tp_ty = fn_args. type_at ( 0 ) ;
302
299
let ptr = args[ 0 ] . immediate ( ) ;
303
- let load = if let PassMode :: Cast { cast : ty, pad_i32 : _ } = & fn_abi. ret . mode {
304
- let llty = ty. llvm_type ( self ) ;
305
- self . volatile_load ( llty, ptr)
306
- } else {
307
- self . volatile_load ( self . layout_of ( tp_ty) . llvm_type ( self ) , ptr)
308
- } ;
300
+ let load = self . volatile_load ( result. layout . llvm_type ( self ) , ptr) ;
309
301
let align = if name == sym:: unaligned_volatile_load {
310
302
1
311
303
} else {
312
- self . align_of ( tp_ty ) . bytes ( ) as u32
304
+ result . layout . align . abi . bytes ( ) as u32
313
305
} ;
314
306
unsafe {
315
307
llvm:: LLVMSetAlignment ( load, align) ;
@@ -628,14 +620,12 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
628
620
}
629
621
} ;
630
622
631
- if !fn_abi. ret . is_ignore ( ) {
632
- if let PassMode :: Cast { .. } = & fn_abi. ret . mode {
633
- self . store ( llval, result. val . llval , result. val . align ) ;
634
- } else {
635
- OperandRef :: from_immediate_or_packed_pair ( self , llval, result. layout )
636
- . val
637
- . store ( self , result) ;
638
- }
623
+ if result. layout . ty . is_bool ( ) {
624
+ OperandRef :: from_immediate_or_packed_pair ( self , llval, result. layout )
625
+ . val
626
+ . store ( self , result) ;
627
+ } else if !result. layout . ty . is_unit ( ) {
628
+ self . store_to_place ( llval, result. val ) ;
639
629
}
640
630
Ok ( ( ) )
641
631
}
0 commit comments