46 #define DEBUG_TYPE "ppc-instr-info"
48 #define GET_INSTRMAP_INFO
49 #define GET_INSTRINFO_CTOR_DTOR
50 #include "PPCGenInstrInfo.inc"
53 "Number of spillvsrrc spilled to stack as vec");
55 "Number of spillvsrrc spilled to stack as gpr");
56 STATISTIC(NumGPRtoVSRSpill,
"Number of gpr spills to spillvsrrc");
58 "Number of ISELs that depend on comparison of constants converted");
59 STATISTIC(MissedConvertibleImmediateInstrs,
60 "Number of compare-immediate instructions fed by constants");
62 "Number of record-form rotates converted to record-form andi");
66 cl::desc(
"Disable analysis for CTR loops"));
72 cl::desc(
"Causes the backend to crash instead of generating a nop VSX copy"),
77 cl::desc(
"Use the old (incorrect) instruction latency calculation"));
81 cl::desc(
"register pressure factor for the transformations."));
85 cl::desc(
"enable register pressure reduce in machine combiner pass."));
88 void PPCInstrInfo::anchor() {}
93 STI.isPPC64() ?
PPC::BLR8 :
PPC::BLR),
94 Subtarget(STI), RI(STI.getTargetMachine()) {}
102 static_cast<const PPCSubtarget *
>(STI)->getCPUDirective();
106 static_cast<const PPCSubtarget *
>(STI)->getInstrItineraryData();
138 unsigned *PredCost)
const {
140 return PPCGenInstrInfo::getInstrLatency(ItinData,
MI, PredCost);
150 unsigned DefClass =
MI.getDesc().getSchedClass();
151 for (
unsigned i = 0,
e =
MI.getNumOperands();
i !=
e; ++
i) {
169 unsigned UseIdx)
const {
170 int Latency = PPCGenInstrInfo::getOperandLatency(ItinData,
DefMI, DefIdx,
173 if (!
DefMI.getParent())
186 IsRegCR = PPC::CRRCRegClass.contains(
Reg) ||
187 PPC::CRBITRCRegClass.contains(
Reg);
190 if (
UseMI.isBranch() && IsRegCR) {
229 NewMI1.
clearFlag(MachineInstr::MIFlag::NoSWrap);
230 NewMI1.
clearFlag(MachineInstr::MIFlag::NoUWrap);
231 NewMI1.
clearFlag(MachineInstr::MIFlag::IsExact);
234 NewMI2.
clearFlag(MachineInstr::MIFlag::NoSWrap);
235 NewMI2.
clearFlag(MachineInstr::MIFlag::NoUWrap);
236 NewMI2.
clearFlag(MachineInstr::MIFlag::IsExact);
242 MI.clearFlag(MachineInstr::MIFlag::NoSWrap);
243 MI.clearFlag(MachineInstr::MIFlag::NoUWrap);
244 MI.clearFlag(MachineInstr::MIFlag::IsExact);
273 return Inst.
getFlag(MachineInstr::MIFlag::FmReassoc) &&
274 Inst.
getFlag(MachineInstr::MIFlag::FmNsz);
287 #define InfoArrayIdxFMAInst 0
288 #define InfoArrayIdxFAddInst 1
289 #define InfoArrayIdxFMULInst 2
290 #define InfoArrayIdxAddOpIdx 3
291 #define InfoArrayIdxMULOpIdx 4
292 #define InfoArrayIdxFSubInst 5
303 {PPC::XSMADDADP, PPC::XSADDDP, PPC::XSMULDP, 1, 2, PPC::XSSUBDP},
304 {PPC::XSMADDASP, PPC::XSADDSP, PPC::XSMULSP, 1, 2, PPC::XSSUBSP},
305 {PPC::XVMADDADP, PPC::XVADDDP, PPC::XVMULDP, 1, 2, PPC::XVSUBDP},
306 {PPC::XVMADDASP, PPC::XVADDSP, PPC::XVMULSP, 1, 2, PPC::XVSUBSP},
312 int16_t PPCInstrInfo::getFMAOpIdxInfo(
unsigned Opcode)
const {
369 bool DoRegPressureReduce)
const {
374 auto IsAllOpsVirtualReg = [](
const MachineInstr &Instr) {
375 for (
const auto &MO : Instr.explicit_operands())
381 auto IsReassociableAddOrSub = [&](
const MachineInstr &Instr,
389 if (!(Instr.
getFlag(MachineInstr::MIFlag::FmReassoc) &&
390 Instr.
getFlag(MachineInstr::MIFlag::FmNsz)))
394 if (!IsAllOpsVirtualReg(Instr))
406 auto IsReassociableFMA = [&](
const MachineInstr &Instr, int16_t &AddOpIdx,
407 int16_t &MulOpIdx,
bool IsLeaf) {
408 int16_t Idx = getFMAOpIdxInfo(Instr.
getOpcode());
414 if (!(Instr.
getFlag(MachineInstr::MIFlag::FmReassoc) &&
415 Instr.
getFlag(MachineInstr::MIFlag::FmNsz)))
419 if (!IsAllOpsVirtualReg(Instr))
439 int16_t AddOpIdx = -1;
440 int16_t MulOpIdx = -1;
442 bool IsUsedOnceL =
false;
443 bool IsUsedOnceR =
false;
447 auto IsRPReductionCandidate = [&]() {
451 if (Opcode != PPC::XSMADDASP && Opcode != PPC::XSMADDADP)
456 if (IsReassociableFMA(Root, AddOpIdx, MulOpIdx,
true)) {
457 assert((MulOpIdx >= 0) &&
"mul operand index not right!");
462 if (!MULRegL && !MULRegR)
465 if (MULRegL && !MULRegR) {
469 }
else if (!MULRegL && MULRegR) {
490 if (DoRegPressureReduce && IsRPReductionCandidate()) {
491 assert((MULInstrL && MULInstrR) &&
"wrong register preduction candidate!");
493 if (isLoadFromConstantPool(MULInstrL) && IsUsedOnceR &&
501 if ((isLoadFromConstantPool(MULInstrR) && IsUsedOnceL &&
512 if (!IsReassociableFMA(Root, AddOpIdx, MulOpIdx,
false))
515 assert((AddOpIdx >= 0) &&
"add operand index not right!");
522 if (!IsReassociableFMA(*Prev, AddOpIdx, MulOpIdx,
false))
525 assert((AddOpIdx >= 0) &&
"add operand index not right!");
530 if (IsReassociableFMA(*Leaf, AddOpIdx, MulOpIdx,
true)) {
546 assert(!InsInstrs.empty() &&
"Instructions set to be inserted is empty!");
553 int16_t Idx = getFMAOpIdxInfo(Root.
getOpcode());
578 const Constant *
C = getConstantFromConstantPool(ConstDefInstr);
579 assert(isa<llvm::ConstantFP>(
C) &&
"not a valid constant!");
582 APFloat F1((dyn_cast<ConstantFP>(
C))->getValueAPF());
592 for (
auto *Inst : InsInstrs) {
594 assert(Operand.isReg() &&
"Invalid instruction in InsInstrs!");
595 if (Operand.getReg() == PPC::ZERO8) {
596 Placeholder = &Operand;
602 assert(Placeholder &&
"Placeholder does not exist!");
607 generateLoadForNewConst(ConstPoolIdx, &Root,
C->getType(), InsInstrs);
610 Placeholder->setReg(LoadNewConst);
651 if (
MI.isDebugValue() ||
MI.isDebugLabel())
657 RPTracker.
recede(RegOpers);
671 return GetMBBPressure(
MBB)[PPC::RegisterPressureSets::VSSRC] >
675 bool PPCInstrInfo::isLoadFromConstantPool(
MachineInstr *
I)
const {
677 if (!
I->hasOneMemOperand())
681 return Op->isLoad() &&
Op->getPseudoValue() &&
685 Register PPCInstrInfo::generateLoadForNewConst(
693 "Target not supported!\n");
701 BuildMI(*MF,
MI->getDebugLoc(),
get(PPC::ADDIStocHA8), VReg1)
706 "Only float and double are supported!");
711 LoadOpcode = PPC::DFLOADf32;
713 LoadOpcode = PPC::DFLOADf64;
732 InsInstrs.
insert(InsInstrs.begin(), TOCOffset);
739 PPCInstrInfo::getConstantFromConstantPool(
MachineInstr *
I)
const {
743 assert(
I->mayLoad() &&
"Should be a load instruction.\n");
744 for (
auto MO :
I->uses()) {
754 return (MCP->
getConstants())[MO2.getIndex()].Val.ConstVal;
761 bool DoRegPressureReduce)
const {
771 DoRegPressureReduce);
784 reassociateFMA(Root,
Pattern, InsInstrs, DelInstrs, InstrIdxForVirtReg);
789 DelInstrs, InstrIdxForVirtReg);
794 void PPCInstrInfo::reassociateFMA(
808 int16_t Idx = getFMAOpIdxInfo(FmaOp);
809 assert(Idx >= 0 &&
"Root must be a FMA instruction");
811 bool IsILPReassociate =
843 if (IsILPReassociate)
852 KillFlag = Operand.
isKill();
857 bool &MulOp1KillFlag,
bool &MulOp2KillFlag,
858 bool &AddOpKillFlag) {
859 GetOperandInfo(Instr.
getOperand(FirstMulOpIdx), MulOp1, MulOp1KillFlag);
860 GetOperandInfo(Instr.
getOperand(FirstMulOpIdx + 1), MulOp2, MulOp2KillFlag);
861 GetOperandInfo(Instr.
getOperand(AddOpIdx), AddOp, AddOpKillFlag);
864 Register RegM11, RegM12, RegX, RegY, RegM21, RegM22, RegM31, RegM32, RegA11,
866 bool KillX =
false, KillY =
false, KillM11 =
false, KillM12 =
false,
867 KillM21 =
false, KillM22 =
false, KillM31 =
false, KillM32 =
false,
868 KillA11 =
false, KillA21 =
false, KillB =
false;
870 GetFMAInstrInfo(Root, RegM31, RegM32, RegB, KillM31, KillM32, KillB);
872 if (IsILPReassociate)
873 GetFMAInstrInfo(*Prev, RegM21, RegM22, RegA21, KillM21, KillM22, KillA21);
876 GetFMAInstrInfo(*Leaf, RegM11, RegM12, RegA11, KillM11, KillM12, KillA11);
877 GetOperandInfo(Leaf->
getOperand(AddOpIdx), RegX, KillX);
879 GetOperandInfo(Leaf->
getOperand(1), RegX, KillX);
880 GetOperandInfo(Leaf->
getOperand(2), RegY, KillY);
883 GetOperandInfo(Leaf->
getOperand(1), RegX, KillX);
884 GetOperandInfo(Leaf->
getOperand(2), RegY, KillY);
894 InstrIdxForVirtReg.
insert(std::make_pair(NewVRA, 0));
897 if (IsILPReassociate) {
899 InstrIdxForVirtReg.
insert(std::make_pair(NewVRB, 1));
905 InstrIdxForVirtReg.
insert(std::make_pair(NewVRD, 2));
910 Register RegMul2,
bool KillRegMul2) {
911 MI->getOperand(AddOpIdx).setReg(RegAdd);
912 MI->getOperand(AddOpIdx).setIsKill(KillAdd);
913 MI->getOperand(FirstMulOpIdx).setReg(RegMul1);
914 MI->getOperand(FirstMulOpIdx).setIsKill(KillRegMul1);
915 MI->getOperand(FirstMulOpIdx + 1).setReg(RegMul2);
916 MI->getOperand(FirstMulOpIdx + 1).setIsKill(KillRegMul2);
937 AdjustOperandOrder(MINewB, RegX, KillX, RegM21, KillM21, RegM22, KillM22);
938 AdjustOperandOrder(MINewA, RegY, KillY, RegM31, KillM31, RegM32, KillM32);
953 InsInstrs.push_back(MINewA);
954 InsInstrs.push_back(MINewB);
955 InsInstrs.push_back(MINewC);
959 assert(NewVRD &&
"new FMA register not created!");
978 AdjustOperandOrder(MINewB, RegX, KillX, RegM21, KillM21, RegM22, KillM22);
979 AdjustOperandOrder(MINewD, NewVRA,
true, RegM31, KillM31, RegM32,
996 InsInstrs.push_back(MINewA);
997 InsInstrs.push_back(MINewB);
998 InsInstrs.push_back(MINewD);
999 InsInstrs.push_back(MINewC);
1005 bool KillVarReg =
false;
1008 KillVarReg = KillM31;
1011 KillVarReg = KillM32;
1035 if (!IsILPReassociate) {
1039 InsInstrs.push_back(NewARegPressure);
1040 InsInstrs.push_back(NewCRegPressure);
1043 assert(!InsInstrs.empty() &&
1044 "Insertion instructions set should not be empty!");
1047 DelInstrs.push_back(Leaf);
1048 if (IsILPReassociate)
1049 DelInstrs.push_back(Prev);
1050 DelInstrs.push_back(&Root);
1056 unsigned &SubIdx)
const {
1057 switch (
MI.getOpcode()) {
1058 default:
return false;
1061 case PPC::EXTSW_32_64:
1062 SrcReg =
MI.getOperand(1).getReg();
1063 DstReg =
MI.getOperand(0).getReg();
1064 SubIdx = PPC::sub_32;
1071 unsigned Opcode =
MI.getOpcode();
1072 const unsigned *OpcodesForSpill = getLoadOpcodesForSpillArray();
1075 if (End !=
std::find(OpcodesForSpill, End, Opcode)) {
1078 if (
MI.getOperand(1).isImm() && !
MI.getOperand(1).getImm() &&
1079 MI.getOperand(2).isFI()) {
1081 return MI.getOperand(0).getReg();
1091 switch (
MI.getOpcode()) {
1103 case PPC::ADDIStocHA:
1104 case PPC::ADDIStocHA8:
1106 case PPC::LOAD_STACK_GUARD:
1108 case PPC::XXLXORspz:
1109 case PPC::XXLXORdpz:
1110 case PPC::XXLEQVOnes:
1113 case PPC::XXSPLTIDP:
1117 case PPC::V_SETALLONESB:
1118 case PPC::V_SETALLONESH:
1119 case PPC::V_SETALLONES:
1122 case PPC::XXSETACCZ:
1130 unsigned Opcode =
MI.getOpcode();
1131 const unsigned *OpcodesForSpill = getStoreOpcodesForSpillArray();
1134 if (End !=
std::find(OpcodesForSpill, End, Opcode)) {
1135 if (
MI.getOperand(1).isImm() && !
MI.getOperand(1).getImm() &&
1136 MI.getOperand(2).isFI()) {
1138 return MI.getOperand(0).getReg();
1146 unsigned OpIdx2)
const {
1150 if (
MI.getOpcode() != PPC::RLWIMI &&
MI.getOpcode() != PPC::RLWIMI_rec)
1158 if (
MI.getOperand(3).getImm() != 0)
1169 assert(((OpIdx1 == 1 && OpIdx2 == 2) || (OpIdx1 == 2 && OpIdx2 == 1)) &&
1170 "Only the operands 1 and 2 can be swapped in RLSIMI/RLWIMI_rec.");
1174 unsigned SubReg1 =
MI.getOperand(1).getSubReg();
1175 unsigned SubReg2 =
MI.getOperand(2).getSubReg();
1176 bool Reg1IsKill =
MI.getOperand(1).isKill();
1177 bool Reg2IsKill =
MI.getOperand(2).isKill();
1178 bool ChangeReg0 =
false;
1184 "Expecting a two-address instruction!");
1185 assert(
MI.getOperand(0).getSubReg() == SubReg1 &&
"Tied subreg mismatch");
1191 unsigned MB =
MI.getOperand(4).getImm();
1192 unsigned ME =
MI.getOperand(5).getImm();
1196 if (MB == 0 && ME == 31)
1201 Register Reg0 = ChangeReg0 ? Reg2 :
MI.getOperand(0).getReg();
1202 bool Reg0IsDead =
MI.getOperand(0).isDead();
1203 return BuildMI(MF,
MI.getDebugLoc(),
MI.getDesc())
1212 MI.getOperand(0).setReg(Reg2);
1213 MI.getOperand(0).setSubReg(SubReg2);
1215 MI.getOperand(2).setReg(Reg1);
1216 MI.getOperand(1).setReg(Reg2);
1217 MI.getOperand(2).setSubReg(SubReg1);
1218 MI.getOperand(1).setSubReg(SubReg2);
1219 MI.getOperand(2).setIsKill(Reg1IsKill);
1220 MI.getOperand(1).setIsKill(Reg2IsKill);
1223 MI.getOperand(4).setImm((ME + 1) & 31);
1224 MI.getOperand(5).setImm((MB - 1) & 31);
1229 unsigned &SrcOpIdx1,
1230 unsigned &SrcOpIdx2)
const {
1241 return fixCommutedOpIndices(SrcOpIdx1, SrcOpIdx2, 2, 3);
1251 default: Opcode = PPC::NOP;
break;
1266 Nop.setOpcode(PPC::NOP);
1277 bool AllowModify)
const {
1278 bool isPPC64 = Subtarget.
isPPC64();
1285 if (!isUnpredicatedTerminator(*
I))
1291 if (
I->getOpcode() ==
PPC::B &&
1293 I->eraseFromParent();
1297 if (
I ==
MBB.
end() || !isUnpredicatedTerminator(*
I))
1306 if (
I ==
MBB.
begin() || !isUnpredicatedTerminator(*--
I)) {
1312 }
else if (LastInst.
getOpcode() == PPC::BCC) {
1320 }
else if (LastInst.
getOpcode() == PPC::BC) {
1328 }
else if (LastInst.
getOpcode() == PPC::BCn) {
1336 }
else if (LastInst.
getOpcode() == PPC::BDNZ8 ||
1347 }
else if (LastInst.
getOpcode() == PPC::BDZ8 ||
1368 if (
I !=
MBB.
begin() && isUnpredicatedTerminator(*--
I))
1372 if (SecondLastInst.
getOpcode() == PPC::BCC &&
1382 }
else if (SecondLastInst.
getOpcode() == PPC::BC &&
1392 }
else if (SecondLastInst.
getOpcode() == PPC::BCn &&
1402 }
else if ((SecondLastInst.
getOpcode() == PPC::BDNZ8 ||
1416 }
else if ((SecondLastInst.
getOpcode() == PPC::BDZ8 ||
1440 I->eraseFromParent();
1449 int *BytesRemoved)
const {
1450 assert(!BytesRemoved &&
"code size not handled");
1456 if (
I->getOpcode() !=
PPC::B &&
I->getOpcode() != PPC::BCC &&
1457 I->getOpcode() != PPC::BC &&
I->getOpcode() != PPC::BCn &&
1458 I->getOpcode() != PPC::BDNZ8 &&
I->getOpcode() !=
PPC::BDNZ &&
1459 I->getOpcode() != PPC::BDZ8 &&
I->getOpcode() !=
PPC::BDZ)
1463 I->eraseFromParent();
1469 if (
I->getOpcode() != PPC::BCC &&
1470 I->getOpcode() != PPC::BC &&
I->getOpcode() != PPC::BCn &&
1471 I->getOpcode() != PPC::BDNZ8 &&
I->getOpcode() !=
PPC::BDNZ &&
1472 I->getOpcode() != PPC::BDZ8 &&
I->getOpcode() !=
PPC::BDZ)
1476 I->eraseFromParent();
1485 int *BytesAdded)
const {
1487 assert(TBB &&
"insertBranch must not be told to insert a fallthrough");
1489 "PPC branch conditions have two components!");
1490 assert(!BytesAdded &&
"code size not handled");
1492 bool isPPC64 = Subtarget.
isPPC64();
1536 Register FalseReg,
int &CondCycles,
1537 int &TrueCycles,
int &FalseCycles)
const {
1538 if (
Cond.size() != 2)
1559 if (!PPC::GPRCRegClass.hasSubClassEq(RC) &&
1560 !PPC::GPRC_NOR0RegClass.hasSubClassEq(RC) &&
1561 !PPC::G8RCRegClass.hasSubClassEq(RC) &&
1562 !PPC::G8RC_NOX0RegClass.hasSubClassEq(RC))
1582 "PPC branch conditions have two components!");
1588 assert(RC &&
"TrueReg and FalseReg must have overlapping register classes");
1590 bool Is64Bit = PPC::G8RCRegClass.hasSubClassEq(RC) ||
1591 PPC::G8RC_NOX0RegClass.hasSubClassEq(RC);
1593 PPC::GPRCRegClass.hasSubClassEq(RC) ||
1594 PPC::GPRC_NOR0RegClass.hasSubClassEq(RC)) &&
1595 "isel is for regular integer GPRs only");
1597 unsigned OpCode = Is64Bit ? PPC::ISEL8 :
PPC::ISEL;
1600 unsigned SubIdx = 0;
1601 bool SwapOps =
false;
1602 switch (SelectPred) {
1606 SubIdx = PPC::sub_eq; SwapOps =
false;
break;
1610 SubIdx = PPC::sub_eq; SwapOps =
true;
break;
1614 SubIdx = PPC::sub_lt; SwapOps =
false;
break;
1618 SubIdx = PPC::sub_lt; SwapOps =
true;
break;
1622 SubIdx = PPC::sub_gt; SwapOps =
false;
break;
1626 SubIdx = PPC::sub_gt; SwapOps =
true;
break;
1630 SubIdx = PPC::sub_un; SwapOps =
false;
break;
1634 SubIdx = PPC::sub_un; SwapOps =
true;
break;
1639 Register FirstReg = SwapOps ? FalseReg : TrueReg,
1640 SecondReg = SwapOps ? TrueReg : FalseReg;
1649 &PPC::G8RC_NOX0RegClass : &PPC::GPRC_NOR0RegClass;
1663 if (CRBit == PPC::CR0LT || CRBit == PPC::CR1LT ||
1664 CRBit == PPC::CR2LT || CRBit == PPC::CR3LT ||
1665 CRBit == PPC::CR4LT || CRBit == PPC::CR5LT ||
1666 CRBit == PPC::CR6LT || CRBit == PPC::CR7LT)
1668 if (CRBit == PPC::CR0GT || CRBit == PPC::CR1GT ||
1669 CRBit == PPC::CR2GT || CRBit == PPC::CR3GT ||
1670 CRBit == PPC::CR4GT || CRBit == PPC::CR5GT ||
1671 CRBit == PPC::CR6GT || CRBit == PPC::CR7GT)
1673 if (CRBit == PPC::CR0EQ || CRBit == PPC::CR1EQ ||
1674 CRBit == PPC::CR2EQ || CRBit == PPC::CR3EQ ||
1675 CRBit == PPC::CR4EQ || CRBit == PPC::CR5EQ ||
1676 CRBit == PPC::CR6EQ || CRBit == PPC::CR7EQ)
1678 if (CRBit == PPC::CR0UN || CRBit == PPC::CR1UN ||
1679 CRBit == PPC::CR2UN || CRBit == PPC::CR3UN ||
1680 CRBit == PPC::CR4UN || CRBit == PPC::CR5UN ||
1681 CRBit == PPC::CR6UN || CRBit == PPC::CR7UN)
1684 assert(
Ret != 4 &&
"Invalid CR bit register");
1695 if (PPC::F8RCRegClass.
contains(DestReg) &&
1696 PPC::VSRCRegClass.
contains(SrcReg)) {
1704 }
else if (PPC::F8RCRegClass.
contains(SrcReg) &&
1705 PPC::VSRCRegClass.
contains(DestReg)) {
1716 if (PPC::CRBITRCRegClass.
contains(SrcReg) &&
1717 PPC::GPRCRegClass.
contains(DestReg)) {
1729 }
else if (PPC::CRRCRegClass.
contains(SrcReg) &&
1730 (PPC::G8RCRegClass.
contains(DestReg) ||
1731 PPC::GPRCRegClass.
contains(DestReg))) {
1732 bool Is64Bit = PPC::G8RCRegClass.contains(DestReg);
1733 unsigned MvCode = Is64Bit ? PPC::MFOCRF8 :
PPC::MFOCRF;
1734 unsigned ShCode = Is64Bit ? PPC::RLWINM8 : PPC::RLWINM;
1747 }
else if (PPC::G8RCRegClass.
contains(SrcReg) &&
1748 PPC::VSFRCRegClass.
contains(DestReg)) {
1750 "Subtarget doesn't support directmove, don't know how to copy.");
1755 }
else if (PPC::VSFRCRegClass.
contains(SrcReg) &&
1756 PPC::G8RCRegClass.
contains(DestReg)) {
1758 "Subtarget doesn't support directmove, don't know how to copy.");
1762 }
else if (PPC::SPERCRegClass.
contains(SrcReg) &&
1763 PPC::GPRCRegClass.
contains(DestReg)) {
1767 }
else if (PPC::GPRCRegClass.
contains(SrcReg) &&
1768 PPC::SPERCRegClass.
contains(DestReg)) {
1775 if (PPC::GPRCRegClass.
contains(DestReg, SrcReg))
1777 else if (PPC::G8RCRegClass.
contains(DestReg, SrcReg))
1779 else if (PPC::F4RCRegClass.
contains(DestReg, SrcReg))
1781 else if (PPC::CRRCRegClass.
contains(DestReg, SrcReg))
1783 else if (PPC::VRRCRegClass.
contains(DestReg, SrcReg))
1785 else if (PPC::VSRCRegClass.
contains(DestReg, SrcReg))
1795 else if (PPC::VSFRCRegClass.
contains(DestReg, SrcReg) ||
1796 PPC::VSSRCRegClass.
contains(DestReg, SrcReg))
1797 Opc = (Subtarget.
hasP9Vector()) ? PPC::XSCPSGNDP : PPC::XXLORf;
1799 PPC::VSRpRCRegClass.contains(DestReg, SrcReg)) {
1800 if (SrcReg > PPC::VSRp15)
1801 SrcReg = PPC::V0 + (SrcReg - PPC::VSRp16) * 2;
1803 SrcReg = PPC::VSL0 + (SrcReg - PPC::VSRp0) * 2;
1804 if (DestReg > PPC::VSRp15)
1805 DestReg = PPC::V0 + (DestReg - PPC::VSRp16) * 2;
1807 DestReg = PPC::VSL0 + (DestReg - PPC::VSRp0) * 2;
1814 else if (PPC::CRBITRCRegClass.
contains(DestReg, SrcReg))
1816 else if (PPC::SPERCRegClass.
contains(DestReg, SrcReg))
1818 else if ((PPC::ACCRCRegClass.
contains(DestReg) ||
1819 PPC::UACCRCRegClass.
contains(DestReg)) &&
1820 (PPC::ACCRCRegClass.
contains(SrcReg) ||
1821 PPC::UACCRCRegClass.
contains(SrcReg))) {
1827 bool DestPrimed = PPC::ACCRCRegClass.contains(DestReg);
1828 bool SrcPrimed = PPC::ACCRCRegClass.contains(SrcReg);
1830 PPC::VSL0 + (SrcReg - (SrcPrimed ? PPC::ACC0 : PPC::UACC0)) * 4;
1832 PPC::VSL0 + (DestReg - (DestPrimed ? PPC::ACC0 : PPC::UACC0)) * 4;
1835 for (
unsigned Idx = 0; Idx < 4; Idx++)
1841 if (SrcPrimed && !KillSrc)
1844 }
else if (PPC::G8pRCRegClass.
contains(DestReg) &&
1845 PPC::G8pRCRegClass.
contains(SrcReg)) {
1847 unsigned DestRegIdx = DestReg - PPC::G8p0;
1848 MCRegister DestRegSub0 = PPC::X0 + 2 * DestRegIdx;
1849 MCRegister DestRegSub1 = PPC::X0 + 2 * DestRegIdx + 1;
1850 unsigned SrcRegIdx = SrcReg - PPC::G8p0;
1851 MCRegister SrcRegSub0 = PPC::X0 + 2 * SrcRegIdx;
1852 MCRegister SrcRegSub1 = PPC::X0 + 2 * SrcRegIdx + 1;
1874 if (PPC::GPRCRegClass.hasSubClassEq(RC) ||
1875 PPC::GPRC_NOR0RegClass.hasSubClassEq(RC)) {
1877 }
else if (PPC::G8RCRegClass.hasSubClassEq(RC) ||
1878 PPC::G8RC_NOX0RegClass.hasSubClassEq(RC)) {
1880 }
else if (PPC::F8RCRegClass.hasSubClassEq(RC)) {
1882 }
else if (PPC::F4RCRegClass.hasSubClassEq(RC)) {
1884 }
else if (PPC::SPERCRegClass.hasSubClassEq(RC)) {
1886 }
else if (PPC::CRRCRegClass.hasSubClassEq(RC)) {
1888 }
else if (PPC::CRBITRCRegClass.hasSubClassEq(RC)) {
1890 }
else if (PPC::VRRCRegClass.hasSubClassEq(RC)) {
1892 }
else if (PPC::VSRCRegClass.hasSubClassEq(RC)) {
1894 }
else if (PPC::VSFRCRegClass.hasSubClassEq(RC)) {
1896 }
else if (PPC::VSSRCRegClass.hasSubClassEq(RC)) {
1898 }
else if (PPC::SPILLTOVSRRCRegClass.hasSubClassEq(RC)) {
1900 }
else if (PPC::ACCRCRegClass.hasSubClassEq(RC)) {
1902 "Register unexpected when paired memops are disabled.");
1904 }
else if (PPC::UACCRCRegClass.hasSubClassEq(RC)) {
1906 "Register unexpected when paired memops are disabled.");
1908 }
else if (PPC::VSRpRCRegClass.hasSubClassEq(RC)) {
1910 "Register unexpected when paired memops are disabled.");
1912 }
else if (PPC::G8pRCRegClass.hasSubClassEq(RC)) {
1922 const unsigned *OpcodesForSpill = getStoreOpcodesForSpillArray();
1923 return OpcodesForSpill[getSpillIndex(RC)];
1928 const unsigned *OpcodesForSpill = getLoadOpcodesForSpillArray();
1929 return OpcodesForSpill[getSpillIndex(RC)];
1932 void PPCInstrInfo::StoreRegToStackSlot(
1946 if (PPC::CRRCRegClass.hasSubClassEq(RC) ||
1947 PPC::CRBITRCRegClass.hasSubClassEq(RC))
1961 StoreRegToStackSlot(MF, SrcReg, isKill, FrameIdx, RC, NewMIs);
1963 for (
unsigned i = 0,
e = NewMIs.size();
i !=
e; ++
i)
1971 NewMIs.back()->addMemOperand(MF, MMO);
1992 unsigned DestReg,
int FrameIdx,
2001 if (PPC::CRRCRegClass.hasSubClassEq(RC) ||
2002 PPC::CRBITRCRegClass.hasSubClassEq(RC))
2021 LoadRegFromStackSlot(MF,
DL, DestReg, FrameIdx, RC, NewMIs);
2023 for (
unsigned i = 0,
e = NewMIs.size();
i !=
e; ++
i)
2031 NewMIs.back()->addMemOperand(MF, MMO);
2053 assert(
Cond.size() == 2 &&
"Invalid PPC branch opcode!");
2055 Cond[0].setImm(
Cond[0].getImm() == 0 ? 1 : 0);
2068 unsigned DefOpc =
DefMI.getOpcode();
2069 if (DefOpc != PPC::LI && DefOpc != PPC::LI8)
2071 if (!
DefMI.getOperand(1).isImm())
2073 if (
DefMI.getOperand(1).getImm() != 0)
2089 for (UseIdx = 0; UseIdx <
UseMI.getNumOperands(); ++UseIdx)
2090 if (
UseMI.getOperand(UseIdx).isReg() &&
2094 assert(UseIdx <
UseMI.getNumOperands() &&
"Cannot find Reg in UseMI");
2105 if (UseInfo->
RegClass != PPC::GPRC_NOR0RegClassID &&
2106 UseInfo->
RegClass != PPC::G8RC_NOX0RegClassID)
2118 bool isPPC64 = Subtarget.
isPPC64();
2119 ZeroReg = isPPC64 ? PPC::ZERO8 : PPC::ZERO;
2121 ZeroReg = UseInfo->
RegClass == PPC::G8RC_NOX0RegClassID ?
2122 PPC::ZERO8 : PPC::ZERO;
2125 UseMI.getOperand(UseIdx).setReg(ZeroReg);
2136 DefMI.eraseFromParent();
2142 if (
MI.definesRegister(PPC::CTR) ||
MI.definesRegister(PPC::CTR8))
2154 unsigned NumT,
unsigned ExtraT,
2156 unsigned NumF,
unsigned ExtraF,
2179 if (
MI.getOpcode() ==
PPC::MFFS ||
MI.getOpcode() == PPC::MTFSF)
2186 unsigned OpC =
MI.getOpcode();
2187 if (OpC == PPC::BLR || OpC == PPC::BLR8) {
2188 if (Pred[1].
getReg() == PPC::CTR8 || Pred[1].
getReg() == PPC::CTR) {
2189 bool isPPC64 = Subtarget.
isPPC64();
2190 MI.setDesc(
get(Pred[0].getImm() ? (isPPC64 ? PPC::BDNZLR8 : PPC::BDNZLR)
2191 : (isPPC64 ? PPC::BDZLR8 : PPC::BDZLR)));
2197 MI.setDesc(
get(PPC::BCLR));
2200 MI.setDesc(
get(PPC::BCLRn));
2203 MI.setDesc(
get(PPC::BCCLR));
2205 .
addImm(Pred[0].getImm())
2210 }
else if (OpC ==
PPC::B) {
2211 if (Pred[1].
getReg() == PPC::CTR8 || Pred[1].
getReg() == PPC::CTR) {
2212 bool isPPC64 = Subtarget.
isPPC64();
2213 MI.setDesc(
get(Pred[0].getImm() ? (isPPC64 ? PPC::BDNZ8 :
PPC::BDNZ)
2214 : (isPPC64 ? PPC::BDZ8 :
PPC::BDZ)));
2221 MI.removeOperand(0);
2223 MI.setDesc(
get(PPC::BC));
2229 MI.removeOperand(0);
2231 MI.setDesc(
get(PPC::BCn));
2237 MI.removeOperand(0);
2239 MI.setDesc(
get(PPC::BCC));
2241 .
addImm(Pred[0].getImm())
2249 OpC == PPC::BCTRL8_RM) {
2250 if (Pred[1].
getReg() == PPC::CTR8 || Pred[1].
getReg() == PPC::CTR)
2253 bool setLR = OpC ==
PPC::BCTRL || OpC == PPC::BCTRL8 ||
2255 bool isPPC64 = Subtarget.
isPPC64();
2258 MI.setDesc(
get(isPPC64 ? (setLR ? PPC::BCCTRL8 : PPC::BCCTR8)
2259 : (setLR ? PPC::BCCTRL : PPC::BCCTR)));
2262 MI.setDesc(
get(isPPC64 ? (setLR ? PPC::BCCTRL8n : PPC::BCCTR8n)
2263 : (setLR ? PPC::BCCTRLn : PPC::BCCTRn)));
2266 MI.setDesc(
get(isPPC64 ? (setLR ? PPC::BCCCTRL8 : PPC::BCCCTR8)
2267 : (setLR ? PPC::BCCCTRL : PPC::BCCCTR)));
2269 .
addImm(Pred[0].getImm())
2290 assert(Pred1.
size() == 2 &&
"Invalid PPC first predicate");
2291 assert(Pred2.
size() == 2 &&
"Invalid PPC second predicate");
2293 if (Pred1[1].
getReg() == PPC::CTR8 || Pred1[1].
getReg() == PPC::CTR)
2295 if (Pred2[1].
getReg() == PPC::CTR8 || Pred2[1].
getReg() == PPC::CTR)
2320 std::vector<MachineOperand> &Pred,
2321 bool SkipDead)
const {
2329 { &PPC::CRRCRegClass, &PPC::CRBITRCRegClass,
2330 &PPC::CTRRCRegClass, &PPC::CTRRC8RegClass };
2337 if (MO.isDef() && RC->
contains(MO.getReg())) {
2341 }
else if (MO.isRegMask()) {
2343 if (MO.clobbersPhysReg(R)) {
2356 int64_t &
Value)
const {
2357 unsigned Opc =
MI.getOpcode();
2360 default:
return false;
2365 SrcReg =
MI.getOperand(1).getReg();
2367 Value =
MI.getOperand(2).getImm();
2376 SrcReg =
MI.getOperand(1).getReg();
2377 SrcReg2 =
MI.getOperand(2).getReg();
2396 if (OpC == PPC::FCMPUS || OpC == PPC::FCMPUD)
2408 bool isPPC64 = Subtarget.
isPPC64();
2409 bool is32BitSignedCompare = OpC == PPC::CMPWI || OpC == PPC::CMPW;
2410 bool is32BitUnsignedCompare = OpC == PPC::CMPLWI || OpC == PPC::CMPLW;
2411 bool is64BitUnsignedCompare = OpC == PPC::CMPLDI || OpC == PPC::CMPLD;
2420 if (!
MI)
return false;
2422 bool equalityOnly =
false;
2425 if (is32BitSignedCompare) {
2431 }
else if (is32BitUnsignedCompare) {
2436 equalityOnly =
true;
2440 equalityOnly = is64BitUnsignedCompare;
2442 equalityOnly = is32BitUnsignedCompare;
2460 if (SubIdx != PPC::sub_eq)
2472 bool FoundUse =
false;
2501 else if (
Value != 0) {
2520 int16_t Immed = (int16_t)
Value;
2540 PredsToUpdate.push_back(std::make_pair(&(
UseMI->
getOperand(0)), Pred));
2549 for (;
I !=
E && !noSub; --
I) {
2563 if ((OpC == PPC::CMPW || OpC == PPC::CMPLW ||
2564 OpC == PPC::CMPD || OpC == PPC::CMPLD) &&
2565 (IOpC == PPC::SUBF || IOpC == PPC::SUBF8) &&
2587 int MIOpC =
MI->getOpcode();
2588 if (MIOpC == PPC::ANDI_rec || MIOpC == PPC::ANDI8_rec ||
2589 MIOpC == PPC::ANDIS_rec || MIOpC == PPC::ANDIS8_rec)
2592 NewOpC = PPC::getRecordFormOpcode(MIOpC);
2610 if (!equalityOnly && (NewOpC == PPC::SUBF_rec || NewOpC == PPC::SUBF8_rec) &&
2620 bool ShouldSwap =
false;
2621 if (Sub &&
Value == 0) {
2627 ShouldSwap = !ShouldSwap;
2640 "Invalid predicate for equality-only optimization");
2647 assert((!equalityOnly || NewSubReg == PPC::sub_eq) &&
2648 "Invalid CR bit for equality-only optimization");
2650 if (NewSubReg == PPC::sub_lt)
2651 NewSubReg = PPC::sub_gt;
2652 else if (NewSubReg == PPC::sub_gt)
2653 NewSubReg = PPC::sub_lt;
2661 "Non-zero immediate support and ShouldSwap"
2662 "may conflict in updating predicate");
2670 BuildMI(*
MI->getParent(), std::next(MII),
MI->getDebugLoc(),
2671 get(TargetOpcode::COPY), CRReg)
2676 MI->clearRegisterDeads(PPC::CR0);
2678 if (MIOpC != NewOpC) {
2688 if (MIOpC == PPC::RLWINM || MIOpC == PPC::RLWINM8) {
2689 Register GPRRes =
MI->getOperand(0).getReg();
2690 int64_t SH =
MI->getOperand(2).getImm();
2691 int64_t MB =
MI->getOperand(3).getImm();
2692 int64_t ME =
MI->getOperand(4).getImm();
2695 bool MBInLoHWord = MB >= 16;
2696 bool MEInLoHWord = ME >= 16;
2699 if (MB <= ME && MBInLoHWord == MEInLoHWord && SH == 0) {
2700 Mask = ((1LLU << (32 - MB)) - 1) & ~((1LLU << (31 - ME)) - 1);
2702 Mask >>= MBInLoHWord ? 0 : 16;
2703 NewOpC = MIOpC == PPC::RLWINM
2704 ? (MBInLoHWord ? PPC::ANDI_rec : PPC::ANDIS_rec)
2705 : (MBInLoHWord ? PPC::ANDI8_rec : PPC::ANDIS8_rec);
2707 (ME - MB + 1 == SH) && (MB >= 16)) {
2711 Mask = ((1LLU << 32) - 1) & ~((1LLU << (32 - SH)) - 1);
2713 NewOpC = MIOpC == PPC::RLWINM ? PPC::ANDIS_rec : PPC::ANDIS8_rec;
2716 if (
Mask != ~0LLU) {
2717 MI->removeOperand(4);
2718 MI->removeOperand(3);
2719 MI->getOperand(2).setImm(
Mask);
2720 NumRcRotatesConvertedToRcAnd++;
2722 }
else if (MIOpC == PPC::RLDICL &&
MI->getOperand(2).getImm() == 0) {
2723 int64_t MB =
MI->getOperand(3).getImm();
2726 NewOpC = PPC::ANDI8_rec;
2727 MI->removeOperand(3);
2728 MI->getOperand(2).setImm(
Mask);
2729 NumRcRotatesConvertedToRcAnd++;
2734 MI->setDesc(NewDesc);
2738 *ImpDefs; ++ImpDefs)
2739 if (!
MI->definesRegister(*ImpDefs))
2740 MI->addOperand(*
MI->getParent()->getParent(),
2744 *ImpUses; ++ImpUses)
2745 if (!
MI->readsRegister(*ImpUses))
2746 MI->addOperand(*
MI->getParent()->getParent(),
2749 assert(
MI->definesRegister(PPC::CR0) &&
2750 "Record-form instruction does not define cr0?");
2755 for (
unsigned i = 0,
e = PredsToUpdate.size();
i <
e;
i++)
2756 PredsToUpdate[
i].first->setImm(PredsToUpdate[
i].second);
2758 for (
unsigned i = 0,
e = SubRegsToUpdate.size();
i <
e;
i++)
2759 SubRegsToUpdate[
i].first->setSubReg(SubRegsToUpdate[
i].second);
2766 int64_t &Offset,
bool &OffsetIsScalable,
unsigned &
Width,
2769 OffsetIsScalable =
false;
2772 BaseOps.push_back(BaseOp);
2804 case PPC::DFSTOREf64:
2805 return FirstOpc == SecondOpc;
2811 return SecondOpc == PPC::STW || SecondOpc == PPC::STW8;
2818 unsigned NumBytes)
const {
2824 "Only base registers and frame indices are supported.");
2842 unsigned FirstOpc = FirstLdSt.
getOpcode();
2843 unsigned SecondOpc = SecondLdSt.
getOpcode();
2855 int64_t Offset1 = 0, Offset2 = 0;
2856 unsigned Width1 = 0, Width2 = 0;
2863 assert(Base1 == &BaseOp1 && Base2 == &BaseOp2 &&
2864 "getMemOperandWithOffsetWidth return incorrect base op");
2866 assert(Offset1 <= Offset2 &&
"Caller should have ordered offsets.");
2867 return Offset1 + Width1 == Offset2;
2874 unsigned Opcode =
MI.getOpcode();
2878 const char *AsmStr =
MI.getOperand(0).getSymbolName();
2880 }
else if (Opcode == TargetOpcode::STACKMAP) {
2883 }
else if (Opcode == TargetOpcode::PATCHPOINT) {
2887 return get(Opcode).getSize();
2891 std::pair<unsigned, unsigned>
2894 return std::make_pair(TF &
Mask, TF & ~
Mask);
2899 using namespace PPCII;
2900 static const std::pair<unsigned, const char *> TargetFlags[] = {
2914 using namespace PPCII;
2915 static const std::pair<unsigned, const char *> TargetFlags[] = {
2938 unsigned UpperOpcode, LowerOpcode;
2939 switch (
MI.getOpcode()) {
2940 case PPC::DFLOADf32:
2941 UpperOpcode = PPC::LXSSP;
2942 LowerOpcode = PPC::LFS;
2944 case PPC::DFLOADf64:
2945 UpperOpcode = PPC::LXSD;
2946 LowerOpcode = PPC::LFD;
2948 case PPC::DFSTOREf32:
2949 UpperOpcode = PPC::STXSSP;
2950 LowerOpcode = PPC::STFS;
2952 case PPC::DFSTOREf64:
2953 UpperOpcode = PPC::STXSD;
2954 LowerOpcode = PPC::STFD;
2956 case PPC::XFLOADf32:
2957 UpperOpcode = PPC::LXSSPX;
2958 LowerOpcode = PPC::LFSX;
2960 case PPC::XFLOADf64:
2961 UpperOpcode = PPC::LXSDX;
2962 LowerOpcode = PPC::LFDX;
2964 case PPC::XFSTOREf32:
2965 UpperOpcode = PPC::STXSSPX;
2966 LowerOpcode = PPC::STFSX;
2968 case PPC::XFSTOREf64:
2969 UpperOpcode = PPC::STXSDX;
2970 LowerOpcode = PPC::STFDX;
2973 UpperOpcode = PPC::LXSIWAX;
2977 UpperOpcode = PPC::LXSIWZX;
2981 UpperOpcode = PPC::STXSIWX;
2988 Register TargetReg =
MI.getOperand(0).getReg();
2990 if ((TargetReg >= PPC::F0 && TargetReg <= PPC::F31) ||
2991 (TargetReg >= PPC::VSL0 && TargetReg <= PPC::VSL31))
2992 Opcode = LowerOpcode;
2994 Opcode = UpperOpcode;
2995 MI.setDesc(
get(Opcode));
3004 auto &
MBB = *
MI.getParent();
3005 auto DL =
MI.getDebugLoc();
3007 switch (
MI.getOpcode()) {
3008 case PPC::BUILD_UACC: {
3011 if (ACC - PPC::ACC0 != UACC - PPC::UACC0) {
3012 MCRegister SrcVSR = PPC::VSL0 + (UACC - PPC::UACC0) * 4;
3013 MCRegister DstVSR = PPC::VSL0 + (ACC - PPC::ACC0) * 4;
3017 for (
int VecNo = 0; VecNo < 4; VecNo++)
3019 .addReg(SrcVSR + VecNo)
3027 case PPC::KILL_PAIR: {
3028 MI.setDesc(
get(PPC::UNENCODED_NOP));
3029 MI.removeOperand(1);
3030 MI.removeOperand(0);
3033 case TargetOpcode::LOAD_STACK_GUARD: {
3035 "Only Linux target is expected to contain LOAD_STACK_GUARD");
3036 const int64_t Offset = Subtarget.
isPPC64() ? -0x7010 : -0x7008;
3044 case PPC::DFLOADf32:
3045 case PPC::DFLOADf64:
3046 case PPC::DFSTOREf32:
3047 case PPC::DFSTOREf64: {
3049 "Invalid D-Form Pseudo-ops on Pre-P9 target.");
3052 "D-form op must have register and immediate operands");
3055 case PPC::XFLOADf32:
3056 case PPC::XFSTOREf32:
3061 "Invalid X-Form Pseudo-ops on Pre-P8 target.");
3062 assert(
MI.getOperand(2).isReg() &&
MI.getOperand(1).isReg() &&
3063 "X-form op must have register and register operands");
3066 case PPC::XFLOADf64:
3067 case PPC::XFSTOREf64: {
3069 "Invalid X-Form Pseudo-ops on target that has no VSX.");
3070 assert(
MI.getOperand(2).isReg() &&
MI.getOperand(1).isReg() &&
3071 "X-form op must have register and register operands");
3074 case PPC::SPILLTOVSR_LD: {
3075 Register TargetReg =
MI.getOperand(0).getReg();
3076 if (PPC::VSFRCRegClass.
contains(TargetReg)) {
3077 MI.setDesc(
get(PPC::DFLOADf64));
3084 case PPC::SPILLTOVSR_ST: {
3086 if (PPC::VSFRCRegClass.
contains(SrcReg)) {
3087 NumStoreSPILLVSRRCAsVec++;
3088 MI.setDesc(
get(PPC::DFSTOREf64));
3091 NumStoreSPILLVSRRCAsGpr++;
3092 MI.setDesc(
get(PPC::STD));
3096 case PPC::SPILLTOVSR_LDX: {
3097 Register TargetReg =
MI.getOperand(0).getReg();
3098 if (PPC::VSFRCRegClass.
contains(TargetReg))
3099 MI.setDesc(
get(PPC::LXSDX));
3101 MI.setDesc(
get(PPC::LDX));
3104 case PPC::SPILLTOVSR_STX: {
3106 if (PPC::VSFRCRegClass.
contains(SrcReg)) {
3107 NumStoreSPILLVSRRCAsVec++;
3108 MI.setDesc(
get(PPC::STXSDX));
3110 NumStoreSPILLVSRRCAsGpr++;
3111 MI.setDesc(
get(PPC::STDX));
3117 case PPC::CFENCE8: {
3118 auto Val =
MI.getOperand(0).getReg();
3124 MI.setDesc(
get(PPC::ISYNC));
3125 MI.removeOperand(0);
3136 static unsigned selectReg(int64_t Imm1, int64_t Imm2,
unsigned CompareOpc,
3137 unsigned TrueReg,
unsigned FalseReg,
3138 unsigned CRSubReg) {
3140 if (CompareOpc == PPC::CMPWI || CompareOpc == PPC::CMPDI) {
3144 return Imm1 < Imm2 ? TrueReg : FalseReg;
3146 return Imm1 > Imm2 ? TrueReg : FalseReg;
3148 return Imm1 == Imm2 ? TrueReg : FalseReg;
3152 else if (CompareOpc == PPC::CMPLWI || CompareOpc == PPC::CMPLDI) {
3160 return Imm1 == Imm2 ? TrueReg : FalseReg;
3163 return PPC::NoRegister;
3168 int64_t
Imm)
const {
3169 assert(
MI.getOperand(OpNo).isReg() &&
"Operand must be a REG");
3171 Register InUseReg =
MI.getOperand(OpNo).getReg();
3172 MI.getOperand(OpNo).ChangeToImmediate(
Imm);
3180 int UseOpIdx =
MI.findRegisterUseOperandIdx(InUseReg,
false,
TRI);
3181 if (UseOpIdx >= 0) {
3191 MI.removeOperand(UseOpIdx);
3200 int OperandToKeep = LII.
SetCR ? 1 : 0;
3201 for (
int i =
MI.getNumOperands() - 1;
i > OperandToKeep;
i--)
3202 MI.removeOperand(
i);
3206 MI.setDesc(
get(LII.
Is64Bit ? PPC::ANDI8_rec : PPC::ANDI_rec));
3221 bool &SeenIntermediateUse)
const {
3222 assert(!
MI.getParent()->getParent()->getRegInfo().isSSA() &&
3223 "Should be called after register allocation.");
3227 SeenIntermediateUse =
false;
3228 for (; It !=
E; ++It) {
3229 if (It->modifiesRegister(
Reg,
TRI))
3231 if (It->readsRegister(
Reg,
TRI))
3232 SeenIntermediateUse =
true;
3240 int64_t
Imm)
const {
3242 "Register should be in non-SSA form after RA");
3243 bool isPPC64 = Subtarget.
isPPC64();
3257 assert(isPPC64 &&
"Materializing 64-bit immediate to single register is "
3258 "only supported in PPC64");
3260 if ((
Imm >> 32) & 0xFFFF)
3280 unsigned &OpNoForForwarding,
3281 bool &SeenIntermediateUse)
const {
3282 OpNoForForwarding = ~0U;
3290 for (
int i = 1,
e =
MI.getNumOperands();
i <
e;
i++) {
3291 if (!
MI.getOperand(
i).isReg())
3302 OpNoForForwarding =
i;
3317 unsigned Opc =
MI.getOpcode();
3318 bool ConvertibleImmForm =
3319 Opc == PPC::CMPWI || Opc == PPC::CMPLWI || Opc == PPC::CMPDI ||
3320 Opc == PPC::CMPLDI || Opc == PPC::ADDI || Opc == PPC::ADDI8 ||
3321 Opc == PPC::ORI || Opc == PPC::ORI8 || Opc == PPC::XORI ||
3322 Opc == PPC::XORI8 || Opc == PPC::RLDICL || Opc == PPC::RLDICL_rec ||
3323 Opc == PPC::RLDICL_32 || Opc == PPC::RLDICL_32_64 ||
3324 Opc == PPC::RLWINM || Opc == PPC::RLWINM_rec || Opc == PPC::RLWINM8 ||
3325 Opc == PPC::RLWINM8_rec;
3326 bool IsVFReg = (
MI.getNumOperands() &&
MI.getOperand(0).isReg())
3333 if ((Opc ==
PPC::OR || Opc == PPC::OR8) &&
3334 MI.getOperand(1).getReg() ==
MI.getOperand(2).getReg())
3336 for (
int i = 1,
e =
MI.getNumOperands();
i <
e;
i++) {
3338 SeenIntermediateUse =
false;
3355 OpNoForForwarding =
i;
3362 return OpNoForForwarding == ~0U ? nullptr :
DefMI;
3365 unsigned PPCInstrInfo::getSpillTarget()
const {
3369 return IsP10Variant ? 2 : Subtarget.
hasP9Vector() ? 1 : 0;
3372 const unsigned *PPCInstrInfo::getStoreOpcodesForSpillArray()
const {
3373 return StoreSpillOpcodesArray[getSpillTarget()];
3376 const unsigned *PPCInstrInfo::getLoadOpcodesForSpillArray()
const {
3377 return LoadSpillOpcodesArray[getSpillTarget()];
3381 unsigned RegNo)
const {
3394 "Instructions are not in same basic block");
3401 if (!Reads && !Writes) {
3403 "Must be a virtual register");
3409 bool IsKillSet =
false;
3422 if (UseIndex != -1) {
3430 clearOperandKillInfo(*EndMI,
i);
3439 for (; It !=
E; ++It) {
3441 if (It->isDebugInstr() || It->isPosition())
3447 for (
int i = 0,
e = It->getNumOperands();
i !=
e; ++
i)
3448 clearOperandKillInfo(*It,
i);
3453 if ((MO = It->findRegisterUseOperand(RegNo,
false, &
getRegisterInfo()))) {
3458 }
else if ((MO = It->findRegisterDefOperand(RegNo,
false,
true,
3461 assert(&*It == StartMI &&
"No new def between StartMI and EndMI.");
3467 if ((&*It) == StartMI)
3472 "RegNo should be killed or dead");
3508 unsigned ToBeDeletedReg = 0;
3509 int64_t OffsetImm = 0;
3510 unsigned XFormOpcode = 0;
3518 bool OtherIntermediateUse =
false;
3522 if (OtherIntermediateUse || !ADDMI)
3529 unsigned ScaleRegIdx = 0;
3530 int64_t OffsetAddi = 0;
3544 assert(ADDIMI &&
"There should be ADDIMI for valid ToBeChangedReg.");
3549 for (
auto It = ++Start; It != End; It++)
3558 (ScaleReg == PPC::R0 || ScaleReg == PPC::X0))
3563 if (NewDefFor(ToBeChangedReg, *ADDMI,
MI) || NewDefFor(ScaleReg, *ADDMI,
MI))
3579 MI.setDesc(
get(XFormOpcode));
3581 .ChangeToRegister(ScaleReg,
false,
false,
3585 .ChangeToRegister(ToBeChangedReg,
false,
false,
true);
3597 int64_t &
Imm)
const {
3601 if (Opc != PPC::ADDI && Opc != PPC::ADDI8)
3617 return Opc == PPC::ADD4 || Opc == PPC::ADD8;
3621 unsigned &ToBeDeletedReg,
3622 unsigned &XFormOpcode,
3626 if (!
MI.mayLoadOrStore())
3629 unsigned Opc =
MI.getOpcode();
3634 if (XFormOpcode == PPC::INSTRUCTION_LIST_END)
3648 if (!ImmOperand.
isImm())
3651 assert(RegOperand.
isReg() &&
"Instruction format is not right");
3654 if (!RegOperand.
isKill())
3657 ToBeDeletedReg = RegOperand.
getReg();
3658 OffsetImm = ImmOperand.
getImm();
3665 int64_t &OffsetAddi,
3666 int64_t OffsetImm)
const {
3673 bool OtherIntermediateUse =
false;
3694 if (OtherIntermediateUse || !ADDIMI)
3713 bool SeenIntermediateUse =
true;
3714 unsigned ForwardingOperand = ~0U;
3716 SeenIntermediateUse);
3719 assert(ForwardingOperand <
MI.getNumOperands() &&
3720 "The forwarding operand needs to be valid at this point");
3721 bool IsForwardingOperandKilled =
MI.getOperand(ForwardingOperand).isKill();
3722 bool KillFwdDefMI = !SeenIntermediateUse && IsForwardingOperandKilled;
3723 if (KilledDef && KillFwdDefMI)
3729 PPC::INSTRUCTION_LIST_END &&
3730 transformToNewImmFormFedByAdd(
MI, *
DefMI, ForwardingOperand))
3734 bool IsVFReg =
MI.getOperand(0).isReg()
3742 transformToImmFormFedByAdd(
MI, III, ForwardingOperand, *
DefMI,
3749 transformToImmFormFedByLI(
MI, III, ForwardingOperand, *
DefMI))
3754 if (!HasImmForm && simplifyToLI(
MI, *
DefMI, ForwardingOperand, KilledDef))
3763 Register FoldingReg =
MI.getOperand(1).getReg();
3767 if (SrcMI->
getOpcode() != PPC::RLWINM &&
3768 SrcMI->
getOpcode() != PPC::RLWINM_rec &&
3772 assert((
MI.getOperand(2).isImm() &&
MI.getOperand(3).isImm() &&
3775 "Invalid PPC::RLWINM Instruction!");
3783 assert((MEMI < 32 && MESrc < 32 && MBMI < 32 && MBSrc < 32) &&
3784 "Invalid PPC::RLWINM Instruction!");
3806 bool SrcMaskFull = (MBSrc - MESrc == 1) || (MBSrc == 0 && MESrc == 31);
3809 if ((MBMI > MEMI) && !SrcMaskFull)
3819 APInt RotatedSrcMask = MaskSrc.
rotl(SHMI);
3820 APInt FinalMask = RotatedSrcMask & MaskMI;
3825 if (FinalMask.
isZero()) {
3827 (
MI.getOpcode() == PPC::RLWINM8 ||
MI.getOpcode() == PPC::RLWINM8_rec);
3832 if (
MI.getOpcode() == PPC::RLWINM ||
MI.getOpcode() == PPC::RLWINM8) {
3834 MI.removeOperand(4);
3835 MI.removeOperand(3);
3836 MI.removeOperand(2);
3837 MI.getOperand(1).ChangeToImmediate(0);
3838 MI.setDesc(
get(Is64Bit ? PPC::LI8 : PPC::LI));
3841 MI.removeOperand(4);
3842 MI.removeOperand(3);
3843 MI.getOperand(2).setImm(0);
3844 MI.setDesc(
get(Is64Bit ? PPC::ANDI8_rec : PPC::ANDI_rec));
3847 MI.getOperand(1).setIsKill(
true);
3851 MI.getOperand(1).setIsKill(
false);
3867 uint16_t NewSH = (SHSrc + SHMI) % 32;
3868 MI.getOperand(2).setImm(NewSH);
3871 MI.getOperand(3).setImm(NewMB);
3872 MI.getOperand(4).setImm(NewME);
3876 MI.getOperand(1).setIsKill(
true);
3880 MI.getOperand(1).setIsKill(
false);
3910 default:
return false;
3918 III.
ImmOpcode = Opc == PPC::ADD4 ? PPC::ADDI : PPC::ADDI8;
3943 III.
ImmOpcode = Opc == PPC::SUBFC ? PPC::SUBFIC : PPC::SUBFIC8;
3951 III.
ImmOpcode = Opc == PPC::CMPW ? PPC::CMPWI : PPC::CMPDI;
3959 III.
ImmOpcode = Opc == PPC::CMPLW ? PPC::CMPLWI : PPC::CMPLDI;
3980 case PPC::OR8: III.
ImmOpcode = PPC::ORI8;
break;
3982 case PPC::XOR8: III.
ImmOpcode = PPC::XORI8;
break;
3987 case PPC::RLWNM_rec:
3988 case PPC::RLWNM8_rec:
4008 if (Opc == PPC::RLWNM || Opc == PPC::RLWNM8 || Opc == PPC::RLWNM_rec ||
4009 Opc == PPC::RLWNM8_rec)
4015 case PPC::RLWNM: III.
ImmOpcode = PPC::RLWINM;
break;
4016 case PPC::RLWNM8: III.
ImmOpcode = PPC::RLWINM8;
break;
4017 case PPC::RLWNM_rec:
4020 case PPC::RLWNM8_rec:
4023 case PPC::SLW: III.
ImmOpcode = PPC::RLWINM;
break;
4024 case PPC::SLW8: III.
ImmOpcode = PPC::RLWINM8;
break;
4031 case PPC::SRW: III.
ImmOpcode = PPC::RLWINM;
break;
4032 case PPC::SRW8: III.
ImmOpcode = PPC::RLWINM8;
break;
4052 case PPC::RLDCL_rec:
4054 case PPC::RLDCR_rec:
4070 if (Opc == PPC::RLDCL || Opc == PPC::RLDCL_rec || Opc == PPC::RLDCR ||
4071 Opc == PPC::RLDCR_rec)
4077 case PPC::RLDCL: III.
ImmOpcode = PPC::RLDICL;
break;
4078 case PPC::RLDCL_rec:
4081 case PPC::RLDCR: III.
ImmOpcode = PPC::RLDICR;
break;
4082 case PPC::RLDCR_rec:
4085 case PPC::SLD: III.
ImmOpcode = PPC::RLDICR;
break;
4089 case PPC::SRD: III.
ImmOpcode = PPC::RLDICL;
break;
4136 case PPC::LBZX: III.
ImmOpcode = PPC::LBZ;
break;
4137 case PPC::LBZX8: III.
ImmOpcode = PPC::LBZ8;
break;
4138 case PPC::LHZX: III.
ImmOpcode = PPC::LHZ;
break;
4139 case PPC::LHZX8: III.
ImmOpcode = PPC::LHZ8;
break;
4140 case PPC::LHAX: III.
ImmOpcode = PPC::LHA;
break;
4141 case PPC::LHAX8: III.
ImmOpcode = PPC::LHA8;
break;
4142 case PPC::LWZX: III.
ImmOpcode = PPC::LWZ;
break;
4143 case PPC::LWZX8: III.
ImmOpcode = PPC::LWZ8;
break;
4149 case PPC::LFSX: III.
ImmOpcode = PPC::LFS;
break;
4150 case PPC::LFDX: III.
ImmOpcode = PPC::LFD;
break;
4151 case PPC::STBX: III.
ImmOpcode = PPC::STB;
break;
4152 case PPC::STBX8: III.
ImmOpcode = PPC::STB8;
break;
4153 case PPC::STHX: III.
ImmOpcode = PPC::STH;
break;
4154 case PPC::STHX8: III.
ImmOpcode = PPC::STH8;
break;
4155 case PPC::STWX: III.
ImmOpcode = PPC::STW;
break;
4156 case PPC::STWX8: III.
ImmOpcode = PPC::STW8;
break;
4161 case PPC::STFSX: III.
ImmOpcode = PPC::STFS;
break;
4162 case PPC::STFDX: III.
ImmOpcode = PPC::STFD;
break;
4194 case PPC::LBZUX: III.
ImmOpcode = PPC::LBZU;
break;
4195 case PPC::LBZUX8: III.
ImmOpcode = PPC::LBZU8;
break;
4196 case PPC::LHZUX: III.
ImmOpcode = PPC::LHZU;
break;
4197 case PPC::LHZUX8: III.
ImmOpcode = PPC::LHZU8;
break;
4198 case PPC::LHAUX: III.
ImmOpcode = PPC::LHAU;
break;
4199 case PPC::LHAUX8: III.
ImmOpcode = PPC::LHAU8;
break;
4200 case PPC::LWZUX: III.
ImmOpcode = PPC::LWZU;
break;
4201 case PPC::LWZUX8: III.
ImmOpcode = PPC::LWZU8;
break;
4206 case PPC::LFSUX: III.
ImmOpcode = PPC::LFSU;
break;
4207 case PPC::LFDUX: III.
ImmOpcode = PPC::LFDU;
break;
4208 case PPC::STBUX: III.
ImmOpcode = PPC::STBU;
break;
4209 case PPC::STBUX8: III.
ImmOpcode = PPC::STBU8;
break;
4210 case PPC::STHUX: III.
ImmOpcode = PPC::STHU;
break;
4211 case PPC::STHUX8: III.
ImmOpcode = PPC::STHU8;
break;
4212 case PPC::STWUX: III.
ImmOpcode = PPC::STWU;
break;
4213 case PPC::STWUX8: III.
ImmOpcode = PPC::STWU8;
break;
4218 case PPC::STFSUX: III.
ImmOpcode = PPC::STFSU;
break;
4219 case PPC::STFDUX: III.
ImmOpcode = PPC::STFDU;
break;
4232 case PPC::XFLOADf32:
4233 case PPC::XFLOADf64:
4234 case PPC::XFSTOREf32:
4235 case PPC::XFSTOREf64:
4263 case PPC::XFLOADf32:
4277 case PPC::XFLOADf64:
4295 case PPC::XFSTOREf32:
4309 case PPC::XFSTOREf64:
4320 assert(Op1 != Op2 &&
"Cannot swap operand with itself.");
4322 unsigned MaxOp =
std::max(Op1, Op2);
4323 unsigned MinOp =
std::min(Op1, Op2);
4331 if (MaxOp - MinOp == 1 &&
MI.getNumOperands() == MinOp) {
4332 MI.addOperand(MOp2);
4333 MI.addOperand(MOp1);
4338 unsigned TotalOps =
MI.getNumOperands() + 2;
4339 for (
unsigned i =
MI.getNumOperands() - 1;
i >= MinOp;
i--) {
4340 MOps.push_back(
MI.getOperand(
i));
4341 MI.removeOperand(
i);
4344 MI.addOperand(MOp2);
4346 for (
unsigned i =
MI.getNumOperands();
i < TotalOps;
i++) {
4348 MI.addOperand(MOp1);
4350 MI.addOperand(MOps.back());
4361 unsigned OpNoForForwarding
4401 unsigned Opc =
DefMI.getOpcode();
4402 if (Opc != PPC::ADDItocL && Opc != PPC::ADDI && Opc != PPC::ADDI8)
4406 "Add inst must have at least three operands");
4407 RegMO = &
DefMI.getOperand(1);
4408 ImmMO = &
DefMI.getOperand(2);
4411 if (!RegMO->
isReg())
4420 bool PPCInstrInfo::isRegElgibleForForwarding(
4423 bool &IsFwdFeederRegKilled)
const {
4440 for (; It !=
E; ++It) {
4444 IsFwdFeederRegKilled =
true;
4446 if ((&*It) == &
DefMI)
4459 bool PPCInstrInfo::isImmElgibleForForwarding(
const MachineOperand &ImmMO,
4463 int64_t BaseImm)
const {
4465 if (
DefMI.getOpcode() == PPC::ADDItocL) {
4486 if (ImmMO.
isImm()) {
4491 APInt ActualValue(64, ImmMO.
getImm() + BaseImm,
true);
4496 Imm = SignExtend64<16>(ImmMO.
getImm() + BaseImm);
4512 unsigned OpNoForForwarding,
4514 if ((
DefMI.getOpcode() != PPC::LI &&
DefMI.getOpcode() != PPC::LI8) ||
4515 !
DefMI.getOperand(1).isImm())
4522 int64_t Immediate =
DefMI.getOperand(1).getImm();
4524 int64_t SExtImm = SignExtend64<16>(Immediate);
4526 bool IsForwardingOperandKilled =
MI.getOperand(OpNoForForwarding).isKill();
4527 Register ForwardingOperandReg =
MI.getOperand(OpNoForForwarding).getReg();
4529 bool ReplaceWithLI =
false;
4530 bool Is64BitLI =
false;
4533 unsigned Opc =
MI.getOpcode();
4554 bool Changed =
false;
4556 int64_t Comparand =
MI.getOperand(2).getImm();
4557 int64_t SExtComparand = ((
uint64_t)Comparand & ~0x7FFFuLL) != 0
4558 ? (Comparand | 0xFFFFFFFFFFFF0000)
4562 unsigned UseOpc = CompareUseMI.getOpcode();
4563 if (UseOpc !=
PPC::ISEL && UseOpc != PPC::ISEL8)
4565 unsigned CRSubReg = CompareUseMI.getOperand(3).getSubReg();
4566 Register TrueReg = CompareUseMI.getOperand(1).getReg();
4567 Register FalseReg = CompareUseMI.getOperand(2).getReg();
4568 unsigned RegToCopy =
4569 selectReg(SExtImm, SExtComparand, Opc, TrueReg, FalseReg, CRSubReg);
4570 if (RegToCopy == PPC::NoRegister)
4573 if (RegToCopy == PPC::ZERO || RegToCopy == PPC::ZERO8) {
4574 CompareUseMI.setDesc(
get(UseOpc == PPC::ISEL8 ? PPC::LI8 : PPC::LI));
4576 CompareUseMI.removeOperand(3);
4577 CompareUseMI.removeOperand(2);
4581 dbgs() <<
"Found LI -> CMPI -> ISEL, replacing with a copy.\n");
4585 CompareUseMI.setDesc(
get(PPC::COPY));
4586 CompareUseMI.removeOperand(3);
4587 CompareUseMI.removeOperand(RegToCopy == TrueReg ? 2 : 1);
4588 CmpIselsConverted++;
4597 MissedConvertibleImmediateInstrs++;
4605 int64_t Addend =
MI.getOperand(2).getImm();
4607 ReplaceWithLI =
true;
4608 Is64BitLI = Opc == PPC::ADDI8;
4609 NewImm = Addend + SExtImm;
4615 case PPC::SUBFIC8: {
4617 if (
MI.getNumOperands() > 3 && !
MI.getOperand(3).isDead())
4619 int64_t Minuend =
MI.getOperand(2).getImm();
4621 ReplaceWithLI =
true;
4622 Is64BitLI = Opc == PPC::SUBFIC8;
4623 NewImm = Minuend - SExtImm;
4629 case PPC::RLDICL_rec:
4630 case PPC::RLDICL_32:
4631 case PPC::RLDICL_32_64: {
4633 int64_t SH =
MI.getOperand(2).getImm();
4634 int64_t MB =
MI.getOperand(3).getImm();
4635 APInt InVal((Opc == PPC::RLDICL || Opc == PPC::RLDICL_rec) ? 64 : 32,
4637 InVal = InVal.rotl(SH);
4638 uint64_t Mask = MB == 0 ? -1LLU : (1LLU << (63 - MB + 1)) - 1;
4643 if (isUInt<15>(InVal.getSExtValue()) ||
4644 (Opc == PPC::RLDICL_rec &&
isUInt<16>(InVal.getSExtValue()))) {
4645 ReplaceWithLI =
true;
4646 Is64BitLI = Opc != PPC::RLDICL_32;
4647 NewImm = InVal.getSExtValue();
4648 SetCR = Opc == PPC::RLDICL_rec;
4655 case PPC::RLWINM_rec:
4656 case PPC::RLWINM8_rec: {
4657 int64_t SH =
MI.getOperand(2).getImm();
4658 int64_t MB =
MI.getOperand(3).getImm();
4659 int64_t ME =
MI.getOperand(4).getImm();
4660 APInt InVal(32, SExtImm,
true);
4661 InVal = InVal.rotl(SH);
4667 bool ValueFits = isUInt<15>(InVal.getSExtValue());
4668 ValueFits |= ((Opc == PPC::RLWINM_rec || Opc == PPC::RLWINM8_rec) &&
4671 ReplaceWithLI =
true;
4672 Is64BitLI = Opc == PPC::RLWINM8 || Opc == PPC::RLWINM8_rec;
4673 NewImm = InVal.getSExtValue();
4674 SetCR = Opc == PPC::RLWINM_rec || Opc == PPC::RLWINM8_rec;
4683 int64_t LogicalImm =
MI.getOperand(2).getImm();
4685 if (Opc == PPC::ORI || Opc == PPC::ORI8)
4686 Result = LogicalImm | SExtImm;
4688 Result = LogicalImm ^ SExtImm;
4690 ReplaceWithLI =
true;
4691 Is64BitLI = Opc == PPC::ORI8 || Opc == PPC::XORI8;
4699 if (ReplaceWithLI) {
4704 bool ImmChanged = (SExtImm & NewImm) != NewImm;
4705 if (PostRA && ImmChanged)
4712 DefMI.getOperand(1).setImm(NewImm);
4718 assert(Immediate &&
"Transformation converted zero to non-zero?");
4721 }
else if (ImmChanged)
4736 if (KilledDef && SetCR)
4737 *KilledDef =
nullptr;
4744 if (IsForwardingOperandKilled)
4754 bool PPCInstrInfo::transformToNewImmFormFedByAdd(
4764 if (!
MI.mayLoadOrStore())
4769 assert((XFormOpcode != PPC::INSTRUCTION_LIST_END) &&
4770 "MI must have x-form opcode");
4774 bool IsVFReg =
MI.getOperand(0).isReg()
4788 if (!ImmOperandMI.
isImm())
4794 if (!isDefMIElgibleForForwarding(
DefMI, III, ImmMO, RegMO))
4796 assert(ImmMO && RegMO &&
"Imm and Reg operand must have been set");
4801 int64_t ImmBase = ImmOperandMI.
getImm();
4803 if (!isImmElgibleForForwarding(*ImmMO,
DefMI, III,
Imm, ImmBase))
4807 unsigned ForwardKilledOperandReg = ~0U;
4827 if (
DefMI.getParent() ==
MI.getParent()) {
4829 auto IsKilledFor = [&](
unsigned Reg) {
4833 for (; It !=
E; ++It) {
4834 if (It->killsRegister(
Reg))
4843 if (ForwardKilledOperandReg != ~0U)
4856 bool PPCInstrInfo::transformToImmFormFedByAdd(
4866 if (!isUseMIElgibleForForwarding(
MI, III, OpNoForForwarding))
4873 if (!isDefMIElgibleForForwarding(
DefMI, III, ImmMO, RegMO))
4875 assert(ImmMO && RegMO &&
"Imm and Reg operand must have been set");
4880 if (!isImmElgibleForForwarding(*ImmMO,
DefMI, III,
Imm))
4883 bool IsFwdFeederRegKilled =
false;
4885 if (!isRegElgibleForForwarding(*RegMO,
DefMI,
MI, KillDefMI,
4886 IsFwdFeederRegKilled))