43#define AARCH64_EXPAND_PSEUDO_NAME "AArch64 pseudo instruction expansion pass"
67 unsigned ContiguousOpc,
unsigned StridedOpc);
78 unsigned LdarOp,
unsigned StlrOp,
unsigned CmpOp,
79 unsigned ExtendImm,
unsigned ZeroReg,
95 struct ConditionalBlocks {
113char AArch64ExpandPseudo::ID = 0;
125 assert(MO.isReg() && MO.getReg());
140 uint64_t RenamableState =
142 uint64_t
Imm =
MI.getOperand(1).getImm();
144 if (DstReg == AArch64::XZR || DstReg == AArch64::WZR) {
147 MI.eraseFromParent();
155 SmallVector<MachineInstrBuilder, 4> MIBS;
157 bool LastItem = std::next(
I) ==
E;
162 case AArch64::ORRWri:
163 case AArch64::ORRXri:
166 .
add(
MI.getOperand(0))
167 .
addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
171 bool DstIsDead =
MI.getOperand(0).isDead();
181 case AArch64::EONXrs:
182 case AArch64::EORXrs:
183 case AArch64::ORRWrs:
184 case AArch64::ORRXrs: {
186 bool DstIsDead =
MI.getOperand(0).isDead();
196 case AArch64::ANDXri:
197 case AArch64::EORXri:
200 .
add(
MI.getOperand(0))
201 .
addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
205 bool DstIsDead =
MI.getOperand(0).isDead();
215 case AArch64::MOVNWi:
216 case AArch64::MOVNXi:
217 case AArch64::MOVZWi:
218 case AArch64::MOVZXi: {
219 bool DstIsDead =
MI.getOperand(0).isDead();
227 case AArch64::MOVKWi:
228 case AArch64::MOVKXi: {
230 bool DstIsDead =
MI.getOperand(0).isDead();
243 MI.eraseFromParent();
247bool AArch64ExpandPseudo::expandCMP_SWAP(
249 unsigned StlrOp,
unsigned CmpOp,
unsigned ExtendImm,
unsigned ZeroReg,
253 const MachineOperand &Dest =
MI.getOperand(0);
254 Register StatusReg =
MI.getOperand(1).getReg();
255 bool StatusDead =
MI.getOperand(1).isDead();
258 assert(!
MI.getOperand(2).isUndef() &&
"cannot handle undef");
260 Register DesiredReg =
MI.getOperand(3).getReg();
269 MF->
insert(++LoadCmpBB->getIterator(), StoreBB);
270 MF->
insert(++StoreBB->getIterator(), DoneBB);
278 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::MOVZWi), StatusReg)
282 BuildMI(LoadCmpBB, MIMD,
TII->get(CmpOp), ZeroReg)
286 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::Bcc))
290 LoadCmpBB->addSuccessor(DoneBB);
291 LoadCmpBB->addSuccessor(StoreBB);
296 BuildMI(StoreBB, MIMD,
TII->get(StlrOp), StatusReg)
299 BuildMI(StoreBB, MIMD,
TII->get(AArch64::CBNZW))
302 StoreBB->addSuccessor(LoadCmpBB);
303 StoreBB->addSuccessor(DoneBB);
305 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
306 DoneBB->transferSuccessors(&
MBB);
311 MI.eraseFromParent();
314 LivePhysRegs LiveRegs;
319 StoreBB->clearLiveIns();
321 LoadCmpBB->clearLiveIns();
327bool AArch64ExpandPseudo::expandCMP_SWAP_128(
332 MachineOperand &DestLo =
MI.getOperand(0);
333 MachineOperand &DestHi =
MI.getOperand(1);
334 Register StatusReg =
MI.getOperand(2).getReg();
335 bool StatusDead =
MI.getOperand(2).isDead();
338 assert(!
MI.getOperand(3).isUndef() &&
"cannot handle undef");
340 Register DesiredLoReg =
MI.getOperand(4).getReg();
341 Register DesiredHiReg =
MI.getOperand(5).getReg();
342 Register NewLoReg =
MI.getOperand(6).getReg();
343 Register NewHiReg =
MI.getOperand(7).getReg();
345 unsigned LdxpOp, StxpOp;
347 switch (
MI.getOpcode()) {
348 case AArch64::CMP_SWAP_128_MONOTONIC:
349 LdxpOp = AArch64::LDXPX;
350 StxpOp = AArch64::STXPX;
352 case AArch64::CMP_SWAP_128_RELEASE:
353 LdxpOp = AArch64::LDXPX;
354 StxpOp = AArch64::STLXPX;
356 case AArch64::CMP_SWAP_128_ACQUIRE:
357 LdxpOp = AArch64::LDAXPX;
358 StxpOp = AArch64::STXPX;
360 case AArch64::CMP_SWAP_128:
361 LdxpOp = AArch64::LDAXPX;
362 StxpOp = AArch64::STLXPX;
375 MF->
insert(++LoadCmpBB->getIterator(), StoreBB);
376 MF->
insert(++StoreBB->getIterator(), FailBB);
377 MF->
insert(++FailBB->getIterator(), DoneBB);
388 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::SUBSXrs), AArch64::XZR)
392 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CSINCWr), StatusReg)
396 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::SUBSXrs), AArch64::XZR)
400 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CSINCWr), StatusReg)
404 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CBNZW))
407 LoadCmpBB->addSuccessor(FailBB);
408 LoadCmpBB->addSuccessor(StoreBB);
413 BuildMI(StoreBB, MIMD,
TII->get(StxpOp), StatusReg)
417 BuildMI(StoreBB, MIMD,
TII->get(AArch64::CBNZW))
421 StoreBB->addSuccessor(LoadCmpBB);
422 StoreBB->addSuccessor(DoneBB);
427 BuildMI(FailBB, MIMD,
TII->get(StxpOp), StatusReg)
431 BuildMI(FailBB, MIMD,
TII->get(AArch64::CBNZW))
434 FailBB->addSuccessor(LoadCmpBB);
435 FailBB->addSuccessor(DoneBB);
437 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
438 DoneBB->transferSuccessors(&
MBB);
443 MI.eraseFromParent();
446 LivePhysRegs LiveRegs;
453 FailBB->clearLiveIns();
455 StoreBB->clearLiveIns();
457 LoadCmpBB->clearLiveIns();
501bool AArch64ExpandPseudo::expand_DestructiveOp(
503 MachineBasicBlock &
MBB,
510 bool DstIsDead =
MI.getOperand(0).isDead();
512 unsigned PredIdx, DOPIdx, SrcIdx, Src2Idx;
517 if (DstReg ==
MI.getOperand(3).getReg()) {
519 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(1, 3, 2);
526 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(1, 2, 3);
529 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(2, 3, 3);
532 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 2, 3, 4);
533 if (DstReg ==
MI.getOperand(3).getReg()) {
535 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 3, 4, 2);
537 }
else if (DstReg ==
MI.getOperand(4).getReg()) {
539 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 4, 3, 2);
546 std::tie(DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 1, 2);
555 bool DOPRegIsUnique =
false;
558 DOPRegIsUnique = DstReg !=
MI.getOperand(SrcIdx).getReg();
563 DstReg !=
MI.getOperand(DOPIdx).getReg() ||
564 MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(SrcIdx).getReg();
569 DOPRegIsUnique =
true;
573 DstReg !=
MI.getOperand(DOPIdx).getReg() ||
574 (
MI.getOperand(DOPIdx).
getReg() !=
MI.getOperand(SrcIdx).getReg() &&
575 MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(Src2Idx).getReg());
591 uint64_t ElementSize =
TII->getElementSizeForOpcode(Opcode);
592 unsigned MovPrfx, LSLZero, MovPrfxZero;
593 switch (ElementSize) {
596 MovPrfx = AArch64::MOVPRFX_ZZ;
597 LSLZero = AArch64::LSL_ZPmI_B;
598 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_B;
601 MovPrfx = AArch64::MOVPRFX_ZZ;
602 LSLZero = AArch64::LSL_ZPmI_H;
603 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_H;
606 MovPrfx = AArch64::MOVPRFX_ZZ;
607 LSLZero = AArch64::LSL_ZPmI_S;
608 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_S;
611 MovPrfx = AArch64::MOVPRFX_ZZ;
612 LSLZero = AArch64::LSL_ZPmI_D;
613 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_D;
625 MachineInstrBuilder PRFX, DOP;
632 "The destructive operand should be unique");
634 "This instruction is unpredicated");
639 .
addReg(
MI.getOperand(PredIdx).getReg())
640 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState);
655 .
add(
MI.getOperand(PredIdx))
659 }
else if (DstReg !=
MI.getOperand(DOPIdx).getReg()) {
660 assert(DOPRegIsUnique &&
"The destructive operand should be unique");
663 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState);
677 DOP.
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
678 .
add(
MI.getOperand(PredIdx))
679 .
add(
MI.getOperand(SrcIdx));
685 DOP.
add(
MI.getOperand(PredIdx))
686 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
687 .
add(
MI.getOperand(SrcIdx));
690 DOP.
add(
MI.getOperand(PredIdx))
691 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
692 .
add(
MI.getOperand(SrcIdx))
693 .
add(
MI.getOperand(Src2Idx));
696 DOP.
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
697 .
add(
MI.getOperand(SrcIdx))
698 .
add(
MI.getOperand(Src2Idx));
703 transferImpOps(
MI, PRFX, DOP);
706 transferImpOps(
MI, DOP, DOP);
708 MI.eraseFromParent();
712bool AArch64ExpandPseudo::expandSetTagLoop(
718 Register AddressReg =
MI.getOperand(1).getReg();
722 bool ZeroData =
MI.getOpcode() == AArch64::STZGloop_wback;
723 const unsigned OpCode1 =
724 ZeroData ? AArch64::STZGPostIndex : AArch64::STGPostIndex;
725 const unsigned OpCode2 =
726 ZeroData ? AArch64::STZ2GPostIndex : AArch64::ST2GPostIndex;
728 unsigned Size =
MI.getOperand(2).getImm();
730 if (
Size % (16 * 2) != 0) {
746 MF->
insert(++LoopBB->getIterator(), DoneBB);
765 LoopBB->addSuccessor(LoopBB);
766 LoopBB->addSuccessor(DoneBB);
768 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
769 DoneBB->transferSuccessors(&
MBB);
774 MI.eraseFromParent();
776 LivePhysRegs LiveRegs;
781 LoopBB->clearLiveIns();
783 DoneBB->clearLiveIns();
789bool AArch64ExpandPseudo::expandSVESpillFill(MachineBasicBlock &
MBB,
791 unsigned Opc,
unsigned N) {
792 assert((
Opc == AArch64::LDR_ZXI ||
Opc == AArch64::STR_ZXI ||
793 Opc == AArch64::LDR_PXI ||
Opc == AArch64::STR_PXI) &&
794 "Unexpected opcode");
795 unsigned RState = (
Opc == AArch64::LDR_ZXI ||
Opc == AArch64::LDR_PXI)
798 unsigned sub0 = (
Opc == AArch64::LDR_ZXI ||
Opc == AArch64::STR_ZXI)
801 const TargetRegisterInfo *
TRI =
805 int ImmOffset =
MI.getOperand(2).getImm() +
Offset;
806 bool Kill = (
Offset + 1 ==
N) ?
MI.getOperand(1).isKill() :
false;
807 assert(ImmOffset >= -256 && ImmOffset < 256 &&
808 "Immediate spill offset out of range");
815 MI.eraseFromParent();
826 unsigned RegMaskStartIdx) {
835 while (!
MBBI->getOperand(RegMaskStartIdx).isRegMask()) {
837 assert(MOP.
isReg() &&
"can only add register operands");
839 MOP.
getReg(),
false,
true,
false,
845 Call->addOperand(MO);
856 unsigned RegMaskStartIdx) {
857 unsigned Opc = CallTarget.
isGlobal() ? AArch64::BL : AArch64::BLR;
860 "invalid operand for regular call");
864bool AArch64ExpandPseudo::expandCALL_RVMARKER(
872 MachineOperand &RVTarget =
MI.getOperand(0);
873 bool DoEmitMarker =
MI.getOperand(1).getImm();
874 assert(RVTarget.
isGlobal() &&
"invalid operand for attached call");
876 MachineInstr *OriginalCall =
nullptr;
878 if (
MI.getOpcode() == AArch64::BLRA_RVMARKER) {
880 const MachineOperand &CallTarget =
MI.getOperand(2);
881 const MachineOperand &
Key =
MI.getOperand(3);
882 const MachineOperand &IntDisc =
MI.getOperand(4);
883 const MachineOperand &AddrDisc =
MI.getOperand(5);
887 "Invalid auth call key");
889 MachineOperand
Ops[] = {CallTarget,
Key, IntDisc, AddrDisc};
894 assert(
MI.getOpcode() == AArch64::BLR_RVMARKER &&
"unknown rvmarker MI");
911 if (
MI.shouldUpdateAdditionalCallInfo())
914 MI.eraseFromParent();
916 std::next(RVCall->getIterator()));
920bool AArch64ExpandPseudo::expandCALL_BTI(MachineBasicBlock &
MBB,
940 if (
MI.shouldUpdateAdditionalCallInfo())
943 MI.eraseFromParent();
948bool AArch64ExpandPseudo::expandStoreSwiftAsyncContext(
956 if (STI.getTargetTriple().getArchName() !=
"arm64e") {
973 unsigned Opc =
Offset >= 0 ? AArch64::ADDXri : AArch64::SUBXri;
1005AArch64ExpandPseudo::ConditionalBlocks
1006AArch64ExpandPseudo::expandConditionalPseudo(MachineBasicBlock &
MBB,
1009 MachineInstrBuilder &Branch) {
1012 "Unexpected unreachable in block");
1018 MachineInstr &PrevMI = *std::prev(
MBBI);
1019 MachineBasicBlock *CondBB =
MBB.
splitAt(PrevMI,
true);
1020 MachineBasicBlock *EndBB =
1021 std::next(
MBBI) == CondBB->
end()
1034 return {*CondBB, *EndBB};
1038AArch64ExpandPseudo::expandRestoreZASave(MachineBasicBlock &
MBB,
1040 MachineInstr &
MI = *
MBBI;
1044 MachineInstrBuilder
Branch =
1047 auto [CondBB, EndBB] = expandConditionalPseudo(
MBB,
MBBI,
DL, Branch);
1049 MachineInstrBuilder MIB =
1052 for (
unsigned I = 2;
I <
MI.getNumOperands(); ++
I)
1053 MIB.
add(
MI.getOperand(
I));
1057 MI.eraseFromParent();
1068 [[maybe_unused]]
auto *RI =
MBB.getParent()->getSubtarget().getRegisterInfo();
1074 auto [CondBB, EndBB] = expandConditionalPseudo(
MBB,
MBBI,
DL, Branch);
1079 for (
unsigned I = 3;
I <
MI.getNumOperands(); ++
I)
1080 MIB.
add(
MI.getOperand(
I));
1083 .
addImm(AArch64SysReg::TPIDR2_EL0)
1085 bool ZeroZA =
MI.getOperand(1).getImm() != 0;
1086 bool ZeroZT0 =
MI.getOperand(2).getImm() != 0;
1088 assert(
MI.definesRegister(AArch64::ZAB0, RI) &&
"should define ZA!");
1094 assert(
MI.definesRegister(AArch64::ZT0, RI) &&
"should define ZT0!");
1099 MI.eraseFromParent();
1104AArch64ExpandPseudo::expandCondSMToggle(MachineBasicBlock &
MBB,
1106 MachineInstr &
MI = *
MBBI;
1113 MI.getParent()->successors().begin() ==
1114 MI.getParent()->successors().end()) {
1115 MI.eraseFromParent();
1158 switch (
MI.getOperand(2).getImm()) {
1162 Opc = AArch64::TBNZW;
1165 Opc = AArch64::TBZW;
1168 auto PStateSM =
MI.getOperand(3).getReg();
1170 unsigned SMReg32 =
TRI->getSubReg(PStateSM, AArch64::sub_32);
1171 MachineInstrBuilder Tbx =
1174 auto [CondBB, EndBB] = expandConditionalPseudo(
MBB,
MBBI,
DL, Tbx);
1176 MachineInstrBuilder MIB =
BuildMI(CondBB, CondBB.
back(),
MI.getDebugLoc(),
1177 TII->get(AArch64::MSRpstatesvcrImm1));
1181 MIB.
add(
MI.getOperand(0));
1182 MIB.
add(
MI.getOperand(1));
1183 for (
unsigned i = 4; i <
MI.getNumOperands(); ++i)
1184 MIB.
add(
MI.getOperand(i));
1186 MI.eraseFromParent();
1190bool AArch64ExpandPseudo::expandMultiVecPseudo(
1192 TargetRegisterClass ContiguousClass, TargetRegisterClass StridedClass,
1193 unsigned ContiguousOp,
unsigned StridedOpc) {
1194 MachineInstr &
MI = *
MBBI;
1208 .
add(
MI.getOperand(0))
1209 .
add(
MI.getOperand(1))
1210 .
add(
MI.getOperand(2))
1211 .
add(
MI.getOperand(3));
1212 transferImpOps(
MI, MIB, MIB);
1213 MI.eraseFromParent();
1217bool AArch64ExpandPseudo::expandFormTuplePseudo(
1221 MachineInstr &
MI = *
MBBI;
1222 Register ReturnTuple =
MI.getOperand(0).getReg();
1224 const TargetRegisterInfo *
TRI =
1226 for (
unsigned I = 0;
I <
Size; ++
I) {
1227 Register FormTupleOpReg =
MI.getOperand(
I + 1).getReg();
1229 TRI->getSubReg(ReturnTuple, AArch64::zsub0 +
I);
1232 if (FormTupleOpReg != ReturnTupleSubReg)
1239 MI.eraseFromParent();
1245bool AArch64ExpandPseudo::expandMI(MachineBasicBlock &
MBB,
1248 MachineInstr &
MI = *
MBBI;
1249 unsigned Opcode =
MI.getOpcode();
1253 if (OrigInstr != -1) {
1254 auto &Orig =
TII->get(OrigInstr);
1257 return expand_DestructiveOp(
MI,
MBB,
MBBI);
1265 case AArch64::BSPv8i8:
1266 case AArch64::BSPv16i8: {
1268 if (DstReg ==
MI.getOperand(3).getReg()) {
1271 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BITv8i8
1272 : AArch64::BITv16i8))
1273 .
add(
MI.getOperand(0))
1274 .
add(
MI.getOperand(3))
1275 .
add(
MI.getOperand(2))
1276 .
add(
MI.getOperand(1));
1277 transferImpOps(
MI,
I,
I);
1278 }
else if (DstReg ==
MI.getOperand(2).getReg()) {
1281 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BIFv8i8
1282 : AArch64::BIFv16i8))
1283 .
add(
MI.getOperand(0))
1284 .
add(
MI.getOperand(2))
1285 .
add(
MI.getOperand(3))
1286 .
add(
MI.getOperand(1));
1287 transferImpOps(
MI,
I,
I);
1290 if (DstReg ==
MI.getOperand(1).getReg()) {
1293 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BSLv8i8
1294 : AArch64::BSLv16i8))
1295 .
add(
MI.getOperand(0))
1296 .
add(
MI.getOperand(1))
1297 .
add(
MI.getOperand(2))
1298 .
add(
MI.getOperand(3));
1299 transferImpOps(
MI,
I,
I);
1304 MI.getOperand(1).isKill() &&
1305 MI.getOperand(1).getReg() !=
MI.getOperand(2).getReg() &&
1306 MI.getOperand(1).getReg() !=
MI.getOperand(3).getReg());
1308 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::ORRv8i8
1309 : AArch64::ORRv16i8))
1313 .
addReg(
MI.getOperand(1).getReg(), RegState)
1314 .
addReg(
MI.getOperand(1).getReg(), RegState);
1317 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BSLv8i8
1318 : AArch64::BSLv16i8))
1319 .
add(
MI.getOperand(0))
1322 MI.getOperand(0).isRenamable()))
1323 .
add(
MI.getOperand(2))
1324 .
add(
MI.getOperand(3));
1325 transferImpOps(
MI, I2, I2);
1328 MI.eraseFromParent();
1332 case AArch64::ADDWrr:
1333 case AArch64::SUBWrr:
1334 case AArch64::ADDXrr:
1335 case AArch64::SUBXrr:
1336 case AArch64::ADDSWrr:
1337 case AArch64::SUBSWrr:
1338 case AArch64::ADDSXrr:
1339 case AArch64::SUBSXrr:
1340 case AArch64::ANDWrr:
1341 case AArch64::ANDXrr:
1342 case AArch64::BICWrr:
1343 case AArch64::BICXrr:
1344 case AArch64::ANDSWrr:
1345 case AArch64::ANDSXrr:
1346 case AArch64::BICSWrr:
1347 case AArch64::BICSXrr:
1348 case AArch64::EONWrr:
1349 case AArch64::EONXrr:
1350 case AArch64::EORWrr:
1351 case AArch64::EORXrr:
1352 case AArch64::ORNWrr:
1353 case AArch64::ORNXrr:
1354 case AArch64::ORRWrr:
1355 case AArch64::ORRXrr: {
1357 switch (
MI.getOpcode()) {
1360 case AArch64::ADDWrr: Opcode = AArch64::ADDWrs;
break;
1361 case AArch64::SUBWrr: Opcode = AArch64::SUBWrs;
break;
1362 case AArch64::ADDXrr: Opcode = AArch64::ADDXrs;
break;
1363 case AArch64::SUBXrr: Opcode = AArch64::SUBXrs;
break;
1364 case AArch64::ADDSWrr: Opcode = AArch64::ADDSWrs;
break;
1365 case AArch64::SUBSWrr: Opcode = AArch64::SUBSWrs;
break;
1366 case AArch64::ADDSXrr: Opcode = AArch64::ADDSXrs;
break;
1367 case AArch64::SUBSXrr: Opcode = AArch64::SUBSXrs;
break;
1368 case AArch64::ANDWrr: Opcode = AArch64::ANDWrs;
break;
1369 case AArch64::ANDXrr: Opcode = AArch64::ANDXrs;
break;
1370 case AArch64::BICWrr: Opcode = AArch64::BICWrs;
break;
1371 case AArch64::BICXrr: Opcode = AArch64::BICXrs;
break;
1372 case AArch64::ANDSWrr: Opcode = AArch64::ANDSWrs;
break;
1373 case AArch64::ANDSXrr: Opcode = AArch64::ANDSXrs;
break;
1374 case AArch64::BICSWrr: Opcode = AArch64::BICSWrs;
break;
1375 case AArch64::BICSXrr: Opcode = AArch64::BICSXrs;
break;
1376 case AArch64::EONWrr: Opcode = AArch64::EONWrs;
break;
1377 case AArch64::EONXrr: Opcode = AArch64::EONXrs;
break;
1378 case AArch64::EORWrr: Opcode = AArch64::EORWrs;
break;
1379 case AArch64::EORXrr: Opcode = AArch64::EORXrs;
break;
1380 case AArch64::ORNWrr: Opcode = AArch64::ORNWrs;
break;
1381 case AArch64::ORNXrr: Opcode = AArch64::ORNXrs;
break;
1382 case AArch64::ORRWrr: Opcode = AArch64::ORRWrs;
break;
1383 case AArch64::ORRXrr: Opcode = AArch64::ORRXrs;
break;
1387 MachineInstr *NewMI = MF.CreateMachineInstr(
1388 TII->get(Opcode),
MI.getDebugLoc(),
true);
1390 MachineInstrBuilder MIB1(MF, NewMI);
1391 MIB1->setPCSections(MF,
MI.getPCSections());
1393 .add(
MI.getOperand(1))
1394 .add(
MI.getOperand(2))
1396 transferImpOps(
MI, MIB1, MIB1);
1397 if (
auto DebugNumber =
MI.peekDebugInstrNum())
1399 MI.eraseFromParent();
1403 case AArch64::LOADgot: {
1406 const MachineOperand &MO1 =
MI.getOperand(1);
1412 TII->get(AArch64::LDRXl), DstReg);
1420 "Only expect globals, externalsymbols, or constant pools");
1425 MachineFunction &MF = *
MI.getParent()->getParent();
1427 MachineInstrBuilder MIB1 =
1430 MachineInstrBuilder MIB2;
1431 if (MF.
getSubtarget<AArch64Subtarget>().isTargetILP32()) {
1433 unsigned Reg32 =
TRI->getSubReg(DstReg, AArch64::sub_32);
1434 unsigned DstFlags =
MI.getOperand(0).getTargetFlags();
1442 .
add(
MI.getOperand(0))
1457 "Only expect globals, externalsymbols, or constant pools");
1468 if (
MI.peekDebugInstrNum() != 0)
1470 transferImpOps(
MI, MIB1, MIB2);
1472 MI.eraseFromParent();
1475 case AArch64::MOVaddrBA: {
1476 MachineFunction &MF = *
MI.getParent()->getParent();
1477 if (MF.
getSubtarget<AArch64Subtarget>().isTargetMachO()) {
1482 assert(
MI.getOperand(1).getOffset() == 0 &&
"unexpected offset");
1492 TII->get(AArch64::LDRXui), DstReg)
1496 transferImpOps(
MI, MIB1, MIB2);
1497 MI.eraseFromParent();
1502 case AArch64::MOVaddr:
1503 case AArch64::MOVaddrJT:
1504 case AArch64::MOVaddrCP:
1505 case AArch64::MOVaddrTLS:
1506 case AArch64::MOVaddrEXT: {
1509 assert(DstReg != AArch64::XZR);
1510 MachineInstrBuilder MIB1 =
1512 .
add(
MI.getOperand(1));
1522 auto Tag =
MI.getOperand(1);
1524 Tag.setOffset(0x100000000);
1531 MachineInstrBuilder MIB2 =
1533 .
add(
MI.getOperand(0))
1535 .
add(
MI.getOperand(2))
1538 transferImpOps(
MI, MIB1, MIB2);
1539 MI.eraseFromParent();
1542 case AArch64::ADDlowTLS:
1545 .
add(
MI.getOperand(0))
1546 .
add(
MI.getOperand(1))
1547 .
add(
MI.getOperand(2))
1549 MI.eraseFromParent();
1552 case AArch64::MOVbaseTLS: {
1554 auto SysReg = AArch64SysReg::TPIDR_EL0;
1556 if (MF->
getSubtarget<AArch64Subtarget>().useEL3ForTP())
1557 SysReg = AArch64SysReg::TPIDR_EL3;
1558 else if (MF->
getSubtarget<AArch64Subtarget>().useEL2ForTP())
1559 SysReg = AArch64SysReg::TPIDR_EL2;
1560 else if (MF->
getSubtarget<AArch64Subtarget>().useEL1ForTP())
1561 SysReg = AArch64SysReg::TPIDR_EL1;
1562 else if (MF->
getSubtarget<AArch64Subtarget>().useROEL0ForTP())
1563 SysReg = AArch64SysReg::TPIDRRO_EL0;
1566 MI.eraseFromParent();
1570 case AArch64::MOVi32imm:
1572 case AArch64::MOVi64imm:
1574 case AArch64::RET_ReallyLR: {
1580 MachineInstrBuilder MIB =
1583 transferImpOps(
MI, MIB, MIB);
1584 MI.eraseFromParent();
1587 case AArch64::CMP_SWAP_8:
1588 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRB, AArch64::STLXRB,
1591 AArch64::WZR, NextMBBI);
1592 case AArch64::CMP_SWAP_16:
1593 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRH, AArch64::STLXRH,
1596 AArch64::WZR, NextMBBI);
1597 case AArch64::CMP_SWAP_32:
1598 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRW, AArch64::STLXRW,
1601 AArch64::WZR, NextMBBI);
1602 case AArch64::CMP_SWAP_64:
1603 return expandCMP_SWAP(
MBB,
MBBI,
1604 AArch64::LDAXRX, AArch64::STLXRX, AArch64::SUBSXrs,
1606 AArch64::XZR, NextMBBI);
1607 case AArch64::CMP_SWAP_128:
1608 case AArch64::CMP_SWAP_128_RELEASE:
1609 case AArch64::CMP_SWAP_128_ACQUIRE:
1610 case AArch64::CMP_SWAP_128_MONOTONIC:
1611 return expandCMP_SWAP_128(
MBB,
MBBI, NextMBBI);
1613 case AArch64::AESMCrrTied:
1614 case AArch64::AESIMCrrTied: {
1615 MachineInstrBuilder MIB =
1617 TII->get(Opcode == AArch64::AESMCrrTied ? AArch64::AESMCrr :
1619 .
add(
MI.getOperand(0))
1620 .
add(
MI.getOperand(1));
1621 transferImpOps(
MI, MIB, MIB);
1622 MI.eraseFromParent();
1625 case AArch64::IRGstack: {
1627 const AArch64FunctionInfo *AFI = MF.
getInfo<AArch64FunctionInfo>();
1628 const AArch64FrameLowering *TFI =
1629 MF.
getSubtarget<AArch64Subtarget>().getFrameLowering();
1636 StackOffset FrameRegOffset = TFI->resolveFrameOffsetReference(
1642 if (FrameRegOffset) {
1644 SrcReg =
MI.getOperand(0).getReg();
1646 FrameRegOffset,
TII);
1649 .
add(
MI.getOperand(0))
1651 .
add(
MI.getOperand(2));
1652 MI.eraseFromParent();
1655 case AArch64::TAGPstack: {
1656 int64_t
Offset =
MI.getOperand(2).getImm();
1658 TII->get(
Offset >= 0 ? AArch64::ADDG : AArch64::SUBG))
1659 .
add(
MI.getOperand(0))
1660 .
add(
MI.getOperand(1))
1662 .
add(
MI.getOperand(4));
1663 MI.eraseFromParent();
1666 case AArch64::STGloop_wback:
1667 case AArch64::STZGloop_wback:
1668 return expandSetTagLoop(
MBB,
MBBI, NextMBBI);
1669 case AArch64::STGloop:
1670 case AArch64::STZGloop:
1672 "Non-writeback variants of STGloop / STZGloop should not "
1673 "survive past PrologEpilogInserter.");
1674 case AArch64::STR_ZZZZXI:
1675 case AArch64::STR_ZZZZXI_STRIDED_CONTIGUOUS:
1676 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 4);
1677 case AArch64::STR_ZZZXI:
1678 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 3);
1679 case AArch64::STR_ZZXI:
1680 case AArch64::STR_ZZXI_STRIDED_CONTIGUOUS:
1681 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 2);
1682 case AArch64::STR_PPXI:
1683 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_PXI, 2);
1684 case AArch64::LDR_ZZZZXI:
1685 case AArch64::LDR_ZZZZXI_STRIDED_CONTIGUOUS:
1686 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 4);
1687 case AArch64::LDR_ZZZXI:
1688 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 3);
1689 case AArch64::LDR_ZZXI:
1690 case AArch64::LDR_ZZXI_STRIDED_CONTIGUOUS:
1691 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 2);
1692 case AArch64::LDR_PPXI:
1693 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_PXI, 2);
1694 case AArch64::BLR_RVMARKER:
1695 case AArch64::BLRA_RVMARKER:
1696 return expandCALL_RVMARKER(
MBB,
MBBI);
1697 case AArch64::BLR_BTI:
1698 return expandCALL_BTI(
MBB,
MBBI);
1699 case AArch64::StoreSwiftAsyncContext:
1700 return expandStoreSwiftAsyncContext(
MBB,
MBBI);
1701 case AArch64::RestoreZAPseudo:
1702 case AArch64::CommitZASavePseudo:
1703 case AArch64::MSRpstatePseudo: {
1704 auto *NewMBB = [&] {
1706 case AArch64::RestoreZAPseudo:
1707 return expandRestoreZASave(
MBB,
MBBI);
1708 case AArch64::CommitZASavePseudo:
1709 return expandCommitZASave(
MBB,
MBBI);
1710 case AArch64::MSRpstatePseudo:
1711 return expandCondSMToggle(
MBB,
MBBI);
1720 case AArch64::InOutZAUsePseudo:
1721 case AArch64::RequiresZASavePseudo:
1722 case AArch64::RequiresZT0SavePseudo:
1723 case AArch64::SMEStateAllocPseudo:
1724 case AArch64::COALESCER_BARRIER_FPR16:
1725 case AArch64::COALESCER_BARRIER_FPR32:
1726 case AArch64::COALESCER_BARRIER_FPR64:
1727 case AArch64::COALESCER_BARRIER_FPR128:
1728 MI.eraseFromParent();
1730 case AArch64::LD1B_2Z_IMM_PSEUDO:
1731 return expandMultiVecPseudo(
1732 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1733 AArch64::LD1B_2Z_IMM, AArch64::LD1B_2Z_STRIDED_IMM);
1734 case AArch64::LD1H_2Z_IMM_PSEUDO:
1735 return expandMultiVecPseudo(
1736 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1737 AArch64::LD1H_2Z_IMM, AArch64::LD1H_2Z_STRIDED_IMM);
1738 case AArch64::LD1W_2Z_IMM_PSEUDO:
1739 return expandMultiVecPseudo(
1740 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1741 AArch64::LD1W_2Z_IMM, AArch64::LD1W_2Z_STRIDED_IMM);
1742 case AArch64::LD1D_2Z_IMM_PSEUDO:
1743 return expandMultiVecPseudo(
1744 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1745 AArch64::LD1D_2Z_IMM, AArch64::LD1D_2Z_STRIDED_IMM);
1746 case AArch64::LDNT1B_2Z_IMM_PSEUDO:
1747 return expandMultiVecPseudo(
1748 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1749 AArch64::LDNT1B_2Z_IMM, AArch64::LDNT1B_2Z_STRIDED_IMM);
1750 case AArch64::LDNT1H_2Z_IMM_PSEUDO:
1751 return expandMultiVecPseudo(
1752 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1753 AArch64::LDNT1H_2Z_IMM, AArch64::LDNT1H_2Z_STRIDED_IMM);
1754 case AArch64::LDNT1W_2Z_IMM_PSEUDO:
1755 return expandMultiVecPseudo(
1756 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1757 AArch64::LDNT1W_2Z_IMM, AArch64::LDNT1W_2Z_STRIDED_IMM);
1758 case AArch64::LDNT1D_2Z_IMM_PSEUDO:
1759 return expandMultiVecPseudo(
1760 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1761 AArch64::LDNT1D_2Z_IMM, AArch64::LDNT1D_2Z_STRIDED_IMM);
1762 case AArch64::LD1B_2Z_PSEUDO:
1763 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1764 AArch64::ZPR2StridedRegClass, AArch64::LD1B_2Z,
1765 AArch64::LD1B_2Z_STRIDED);
1766 case AArch64::LD1H_2Z_PSEUDO:
1767 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1768 AArch64::ZPR2StridedRegClass, AArch64::LD1H_2Z,
1769 AArch64::LD1H_2Z_STRIDED);
1770 case AArch64::LD1W_2Z_PSEUDO:
1771 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1772 AArch64::ZPR2StridedRegClass, AArch64::LD1W_2Z,
1773 AArch64::LD1W_2Z_STRIDED);
1774 case AArch64::LD1D_2Z_PSEUDO:
1775 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1776 AArch64::ZPR2StridedRegClass, AArch64::LD1D_2Z,
1777 AArch64::LD1D_2Z_STRIDED);
1778 case AArch64::LDNT1B_2Z_PSEUDO:
1779 return expandMultiVecPseudo(
1780 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1781 AArch64::LDNT1B_2Z, AArch64::LDNT1B_2Z_STRIDED);
1782 case AArch64::LDNT1H_2Z_PSEUDO:
1783 return expandMultiVecPseudo(
1784 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1785 AArch64::LDNT1H_2Z, AArch64::LDNT1H_2Z_STRIDED);
1786 case AArch64::LDNT1W_2Z_PSEUDO:
1787 return expandMultiVecPseudo(
1788 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1789 AArch64::LDNT1W_2Z, AArch64::LDNT1W_2Z_STRIDED);
1790 case AArch64::LDNT1D_2Z_PSEUDO:
1791 return expandMultiVecPseudo(
1792 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1793 AArch64::LDNT1D_2Z, AArch64::LDNT1D_2Z_STRIDED);
1794 case AArch64::LD1B_4Z_IMM_PSEUDO:
1795 return expandMultiVecPseudo(
1796 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1797 AArch64::LD1B_4Z_IMM, AArch64::LD1B_4Z_STRIDED_IMM);
1798 case AArch64::LD1H_4Z_IMM_PSEUDO:
1799 return expandMultiVecPseudo(
1800 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1801 AArch64::LD1H_4Z_IMM, AArch64::LD1H_4Z_STRIDED_IMM);
1802 case AArch64::LD1W_4Z_IMM_PSEUDO:
1803 return expandMultiVecPseudo(
1804 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1805 AArch64::LD1W_4Z_IMM, AArch64::LD1W_4Z_STRIDED_IMM);
1806 case AArch64::LD1D_4Z_IMM_PSEUDO:
1807 return expandMultiVecPseudo(
1808 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1809 AArch64::LD1D_4Z_IMM, AArch64::LD1D_4Z_STRIDED_IMM);
1810 case AArch64::LDNT1B_4Z_IMM_PSEUDO:
1811 return expandMultiVecPseudo(
1812 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1813 AArch64::LDNT1B_4Z_IMM, AArch64::LDNT1B_4Z_STRIDED_IMM);
1814 case AArch64::LDNT1H_4Z_IMM_PSEUDO:
1815 return expandMultiVecPseudo(
1816 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1817 AArch64::LDNT1H_4Z_IMM, AArch64::LDNT1H_4Z_STRIDED_IMM);
1818 case AArch64::LDNT1W_4Z_IMM_PSEUDO:
1819 return expandMultiVecPseudo(
1820 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1821 AArch64::LDNT1W_4Z_IMM, AArch64::LDNT1W_4Z_STRIDED_IMM);
1822 case AArch64::LDNT1D_4Z_IMM_PSEUDO:
1823 return expandMultiVecPseudo(
1824 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1825 AArch64::LDNT1D_4Z_IMM, AArch64::LDNT1D_4Z_STRIDED_IMM);
1826 case AArch64::LD1B_4Z_PSEUDO:
1827 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1828 AArch64::ZPR4StridedRegClass, AArch64::LD1B_4Z,
1829 AArch64::LD1B_4Z_STRIDED);
1830 case AArch64::LD1H_4Z_PSEUDO:
1831 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1832 AArch64::ZPR4StridedRegClass, AArch64::LD1H_4Z,
1833 AArch64::LD1H_4Z_STRIDED);
1834 case AArch64::LD1W_4Z_PSEUDO:
1835 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1836 AArch64::ZPR4StridedRegClass, AArch64::LD1W_4Z,
1837 AArch64::LD1W_4Z_STRIDED);
1838 case AArch64::LD1D_4Z_PSEUDO:
1839 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1840 AArch64::ZPR4StridedRegClass, AArch64::LD1D_4Z,
1841 AArch64::LD1D_4Z_STRIDED);
1842 case AArch64::LDNT1B_4Z_PSEUDO:
1843 return expandMultiVecPseudo(
1844 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1845 AArch64::LDNT1B_4Z, AArch64::LDNT1B_4Z_STRIDED);
1846 case AArch64::LDNT1H_4Z_PSEUDO:
1847 return expandMultiVecPseudo(
1848 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1849 AArch64::LDNT1H_4Z, AArch64::LDNT1H_4Z_STRIDED);
1850 case AArch64::LDNT1W_4Z_PSEUDO:
1851 return expandMultiVecPseudo(
1852 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1853 AArch64::LDNT1W_4Z, AArch64::LDNT1W_4Z_STRIDED);
1854 case AArch64::LDNT1D_4Z_PSEUDO:
1855 return expandMultiVecPseudo(
1856 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1857 AArch64::LDNT1D_4Z, AArch64::LDNT1D_4Z_STRIDED);
1858 case AArch64::FORM_TRANSPOSED_REG_TUPLE_X2_PSEUDO:
1859 return expandFormTuplePseudo(
MBB,
MBBI, NextMBBI, 2);
1860 case AArch64::FORM_TRANSPOSED_REG_TUPLE_X4_PSEUDO:
1861 return expandFormTuplePseudo(
MBB,
MBBI, NextMBBI, 4);
1868bool AArch64ExpandPseudo::expandMBB(MachineBasicBlock &
MBB) {
1881bool AArch64ExpandPseudo::runOnMachineFunction(MachineFunction &MF) {
1885 for (
auto &
MBB : MF)
1892 return new AArch64ExpandPseudo();
#define AARCH64_EXPAND_PSEUDO_NAME
MachineInstrBuilder & UseMI
static MachineInstr * createCallWithOps(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const AArch64InstrInfo *TII, unsigned Opcode, ArrayRef< MachineOperand > ExplicitOps, unsigned RegMaskStartIdx)
static constexpr unsigned ZERO_ALL_ZA_MASK
static MachineInstr * createCall(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const AArch64InstrInfo *TII, MachineOperand &CallTarget, unsigned RegMaskStartIdx)
MachineInstrBuilder MachineInstrBuilder & DefMI
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
MachineBasicBlock MachineBasicBlock::iterator MBBI
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
const HexagonInstrInfo * TII
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
This file implements the LivePhysRegs utility for tracking liveness of physical registers.
This file declares the MachineConstantPool class which is an abstract constant pool to keep track of ...
Register const TargetRegisterInfo * TRI
Promote Memory to Register
static MCRegister getReg(const MCDisassembler *D, unsigned RC, unsigned RegNo)
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
unsigned getTaggedBasePointerOffset() const
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
FunctionPass class - This class is used to implement most global optimizations.
Describe properties that are true of each instruction in the target description file.
LLVM_ABI instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
const BasicBlock * getBasicBlock() const
Return the LLVM basic block that this instance corresponded to originally.
LLVM_ABI void addSuccessor(MachineBasicBlock *Succ, BranchProbability Prob=BranchProbability::getUnknown())
Add Succ as a successor of this MachineBasicBlock.
LLVM_ABI MachineBasicBlock * splitAt(MachineInstr &SplitInst, bool UpdateLiveIns=true, LiveIntervals *LIS=nullptr)
Split a basic block into 2 pieces at SplitPoint.
LLVM_ABI void eraseFromParent()
This method unlinks 'this' from the containing function and deletes it.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
iterator_range< succ_iterator > successors()
MachineInstrBundleIterator< MachineInstr > iterator
unsigned getConstantPoolIndex(const Constant *C, Align Alignment)
getConstantPoolIndex - Create a new entry in the constant pool or return an existing one.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void moveAdditionalCallInfo(const MachineInstr *Old, const MachineInstr *New)
Move the call site info from Old to \New call site info.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
Ty * getInfo()
getInfo - Keep track of various per-function pieces of information for backends that would like to do...
MachineConstantPool * getConstantPool()
getConstantPool - Return the constant pool object for the current function.
MachineBasicBlock * CreateMachineBasicBlock(const BasicBlock *BB=nullptr, std::optional< UniqueBBID > BBID=std::nullopt)
CreateMachineInstr - Allocate a new MachineInstr.
void insert(iterator MBBI, MachineBasicBlock *MBB)
const TargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
const MachineInstrBuilder & addExternalSymbol(const char *FnName, unsigned TargetFlags=0) const
const MachineInstrBuilder & setMIFlag(MachineInstr::MIFlag Flag) const
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addConstantPoolIndex(unsigned Idx, int Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addGlobalAddress(const GlobalValue *GV, int64_t Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
const MachineInstrBuilder & cloneMemRefs(const MachineInstr &OtherMI) const
const MachineInstrBuilder & addUse(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register use operand.
const MachineInstrBuilder & setMIFlags(unsigned Flags) const
MachineInstr * getInstr() const
If conversion operators fail, use this method to get the MachineInstr explicitly.
const MachineInstrBuilder & addDef(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register definition operand.
Representation of each machine instruction.
void setDebugInstrNum(unsigned Num)
Set instruction number of this MachineInstr.
MachineOperand class - Representation of each machine instruction operand.
const GlobalValue * getGlobal() const
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isCPI() const
isCPI - Tests if this is a MO_ConstantPoolIndex operand.
bool isSymbol() const
isSymbol - Tests if this is a MO_ExternalSymbol operand.
unsigned getTargetFlags() const
bool isGlobal() const
isGlobal - Tests if this is a MO_GlobalAddress operand.
const char * getSymbolName() const
Register getReg() const
getReg - Returns the register number.
static MachineOperand CreateReg(Register Reg, bool isDef, bool isImp=false, bool isKill=false, bool isDead=false, bool isUndef=false, bool isEarlyClobber=false, unsigned SubReg=0, bool isDebug=false, bool isInternalRead=false, bool isRenamable=false)
int64_t getOffset() const
Return the offset from the symbol in this operand.
MCRegister asMCReg() const
Utility to check-convert this value to a MCRegister.
void push_back(const T &Elt)
StringRef - Represent a constant reference to a string, i.e.
CodeModel::Model getCodeModel() const
Returns the code model.
ArrayRef< MCPhysReg > getRegisters() const
virtual const TargetRegisterInfo * getRegisterInfo() const =0
Return the target's register information.
self_iterator getIterator()
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ MO_NC
MO_NC - Indicates whether the linker is expected to check the symbol reference for overflow.
@ MO_PAGEOFF
MO_PAGEOFF - A symbol operand with this flag represents the offset of that symbol within a 4K page.
@ MO_PREL
MO_PREL - Indicates that the bits of the symbol operand represented by MO_G0 etc are PC relative.
@ MO_PAGE
MO_PAGE - A symbol operand with this flag represents the pc-relative offset of the 4K page containing...
@ MO_TAGGED
MO_TAGGED - With MO_PAGE, indicates that the page includes a memory tag in bits 56-63.
@ MO_G3
MO_G3 - A symbol operand with this flag (granule 3) represents the high 16-bits of a 64-bit address,...
static unsigned getArithExtendImm(AArch64_AM::ShiftExtendType ET, unsigned Imm)
getArithExtendImm - Encode the extend type and shift amount for an arithmetic instruction: imm: 3-bit...
static unsigned getShifterImm(AArch64_AM::ShiftExtendType ST, unsigned Imm)
getShifterImm - Encode the shift type and amount: imm: 6-bit shift amount shifter: 000 ==> lsl 001 ==...
void expandMOVImm(uint64_t Imm, unsigned BitSize, SmallVectorImpl< ImmInsnModel > &Insn)
Expand a MOVi32imm or MOVi64imm pseudo instruction to one or more real move-immediate instructions to...
int getSVERevInstr(uint16_t Opcode)
@ Destructive2xRegImmUnpred
@ DestructiveInstTypeMask
@ DestructiveUnaryPassthru
@ DestructiveTernaryCommWithRev
@ DestructiveBinaryCommWithRev
int getSVEPseudoMap(uint16_t Opcode)
int getSVENonRevInstr(uint16_t Opcode)
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Implicit
Not emitted register (e.g. carry, or temporary result).
@ Renamable
Register that may be renamed.
@ Define
Register definition.
@ Kill
The last use of a register.
@ Undef
Value of the register doesn't matter.
BaseReg
Stack frame base register. Bit 0 of FREInfo.Info.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
LLVM_ABI void finalizeBundle(MachineBasicBlock &MBB, MachineBasicBlock::instr_iterator FirstMI, MachineBasicBlock::instr_iterator LastMI)
finalizeBundle - Finalize a machine instruction bundle which includes a sequence of instructions star...
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
APFloat abs(APFloat X)
Returns the absolute value of the argument.
unsigned getDeadRegState(bool B)
FunctionPass * createAArch64ExpandPseudoPass()
Returns an instance of the pseudo instruction expansion pass.
void emitFrameOffset(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const DebugLoc &DL, unsigned DestReg, unsigned SrcReg, StackOffset Offset, const TargetInstrInfo *TII, MachineInstr::MIFlag=MachineInstr::NoFlags, bool SetNZCV=false, bool NeedsWinCFI=false, bool *HasWinCFI=nullptr, bool EmitCFAOffset=false, StackOffset InitialOffset={}, unsigned FrameReg=AArch64::SP)
emitFrameOffset - Emit instructions as needed to set DestReg to SrcReg plus Offset.
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
unsigned getKillRegState(bool B)
unsigned getRenamableRegState(bool B)
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
void computeAndAddLiveIns(LivePhysRegs &LiveRegs, MachineBasicBlock &MBB)
Convenience function combining computeLiveIns() and addLiveIns().