197
197
#include " llvm/ADT/SmallVector.h"
198
198
#include " llvm/ADT/Statistic.h"
199
199
#include " llvm/CodeGen/LivePhysRegs.h"
200
+ #include " llvm/CodeGen/LiveRegUnits.h"
200
201
#include " llvm/CodeGen/MachineBasicBlock.h"
201
202
#include " llvm/CodeGen/MachineFrameInfo.h"
202
203
#include " llvm/CodeGen/MachineFunction.h"
@@ -1011,7 +1012,7 @@ void AArch64FrameLowering::emitZeroCallUsedRegs(BitVector RegsToZero,
1011
1012
}
1012
1013
}
1013
1014
1014
- static void getLiveRegsForEntryMBB (LivePhysRegs &LiveRegs,
1015
+ static void getLiveRegsForEntryMBB (LiveRegUnits &LiveRegs,
1015
1016
const MachineBasicBlock &MBB) {
1016
1017
const MachineFunction *MF = MBB.getParent ();
1017
1018
LiveRegs.addLiveIns (MBB);
@@ -1044,16 +1045,18 @@ static Register findScratchNonCalleeSaveRegister(MachineBasicBlock *MBB) {
1044
1045
1045
1046
const AArch64Subtarget &Subtarget = MF->getSubtarget <AArch64Subtarget>();
1046
1047
const AArch64RegisterInfo &TRI = *Subtarget.getRegisterInfo ();
1047
- LivePhysRegs LiveRegs (TRI);
1048
+ LiveRegUnits LiveRegs (TRI);
1048
1049
getLiveRegsForEntryMBB (LiveRegs, *MBB);
1049
1050
1050
1051
// Prefer X9 since it was historically used for the prologue scratch reg.
1051
- const MachineRegisterInfo &MRI = MF->getRegInfo ();
1052
- if (LiveRegs.available (MRI, AArch64::X9))
1052
+ if (LiveRegs.available (AArch64::X9))
1053
1053
return AArch64::X9;
1054
1054
1055
- for (unsigned Reg : AArch64::GPR64RegClass) {
1056
- if (LiveRegs.available (MRI, Reg))
1055
+ BitVector Allocatable =
1056
+ TRI.getAllocatableSet (*MF, TRI.getRegClass (AArch64::GPR64RegClassID));
1057
+
1058
+ for (unsigned Reg : Allocatable.set_bits ()) {
1059
+ if (LiveRegs.available (Reg))
1057
1060
return Reg;
1058
1061
}
1059
1062
return AArch64::NoRegister;
@@ -1069,14 +1072,11 @@ bool AArch64FrameLowering::canUseAsPrologue(
1069
1072
const AArch64FunctionInfo *AFI = MF->getInfo <AArch64FunctionInfo>();
1070
1073
1071
1074
if (AFI->hasSwiftAsyncContext ()) {
1072
- const AArch64RegisterInfo &TRI = *Subtarget.getRegisterInfo ();
1073
- const MachineRegisterInfo &MRI = MF->getRegInfo ();
1074
- LivePhysRegs LiveRegs (TRI);
1075
+ LiveRegUnits LiveRegs (*RegInfo);
1075
1076
getLiveRegsForEntryMBB (LiveRegs, MBB);
1076
1077
// The StoreSwiftAsyncContext clobbers X16 and X17. Make sure they are
1077
1078
// available.
1078
- if (!LiveRegs.available (MRI, AArch64::X16) ||
1079
- !LiveRegs.available (MRI, AArch64::X17))
1079
+ if (!LiveRegs.available (AArch64::X16) || !LiveRegs.available (AArch64::X17))
1080
1080
return false ;
1081
1081
}
1082
1082
@@ -1668,7 +1668,7 @@ static void emitDefineCFAWithFP(MachineFunction &MF, MachineBasicBlock &MBB,
1668
1668
// / Collect live registers from the end of \p MI's parent up to (including) \p
1669
1669
// / MI in \p LiveRegs.
1670
1670
static void getLivePhysRegsUpTo (MachineInstr &MI, const TargetRegisterInfo &TRI,
1671
- LivePhysRegs &LiveRegs) {
1671
+ LiveRegUnits &LiveRegs) {
1672
1672
1673
1673
MachineBasicBlock &MBB = *MI.getParent ();
1674
1674
LiveRegs.addLiveOuts (MBB);
@@ -1706,7 +1706,7 @@ void AArch64FrameLowering::emitPrologue(MachineFunction &MF,
1706
1706
NonFrameStart->getFlag (MachineInstr::FrameSetup))
1707
1707
++NonFrameStart;
1708
1708
1709
- LivePhysRegs LiveRegs (*TRI);
1709
+ LiveRegUnits LiveRegs (*TRI);
1710
1710
if (NonFrameStart != MBB.end ()) {
1711
1711
getLivePhysRegsUpTo (*NonFrameStart, *TRI, LiveRegs);
1712
1712
// Ignore registers used for stack management for now.
@@ -1730,7 +1730,7 @@ void AArch64FrameLowering::emitPrologue(MachineFunction &MF,
1730
1730
make_range (MBB.instr_begin (), NonFrameStart->getIterator ())) {
1731
1731
for (auto &Op : MI.operands ())
1732
1732
if (Op.isReg () && Op.isDef ())
1733
- assert (! LiveRegs.contains (Op.getReg ()) &&
1733
+ assert (LiveRegs.available (Op.getReg ()) &&
1734
1734
" live register clobbered by inserted prologue instructions" );
1735
1735
}
1736
1736
});
@@ -4324,7 +4324,7 @@ MachineBasicBlock::iterator tryMergeAdjacentSTG(MachineBasicBlock::iterator II,
4324
4324
// FIXME : This approach of bailing out from merge is conservative in
4325
4325
// some ways like even if stg loops are not present after merge the
4326
4326
// insert list, this liveness check is done (which is not needed).
4327
- LivePhysRegs LiveRegs (*(MBB->getParent ()->getSubtarget ().getRegisterInfo ()));
4327
+ LiveRegUnits LiveRegs (*(MBB->getParent ()->getSubtarget ().getRegisterInfo ()));
4328
4328
LiveRegs.addLiveOuts (*MBB);
4329
4329
for (auto I = MBB->rbegin ();; ++I) {
4330
4330
MachineInstr &MI = *I;
@@ -4333,7 +4333,7 @@ MachineBasicBlock::iterator tryMergeAdjacentSTG(MachineBasicBlock::iterator II,
4333
4333
LiveRegs.stepBackward (*I);
4334
4334
}
4335
4335
InsertI++;
4336
- if (LiveRegs.contains (AArch64::NZCV))
4336
+ if (! LiveRegs.available (AArch64::NZCV))
4337
4337
return InsertI;
4338
4338
4339
4339
llvm::stable_sort (Instrs,
0 commit comments