47#define AARCH64_EXPAND_PSEUDO_NAME "AArch64 pseudo instruction expansion pass"
75 unsigned LdarOp,
unsigned StlrOp,
unsigned CmpOp,
76 unsigned ExtendImm,
unsigned ZeroReg,
100char AArch64ExpandPseudo::ID = 0;
112 assert(MO.isReg() && MO.getReg());
131 if (DstReg == AArch64::XZR || DstReg == AArch64::WZR) {
134 MI.eraseFromParent();
144 bool LastItem = std::next(
I) ==
E;
149 case AArch64::ORRWri:
150 case AArch64::ORRXri:
153 .
add(
MI.getOperand(0))
154 .
addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
158 bool DstIsDead =
MI.getOperand(0).isDead();
168 case AArch64::ANDXri:
171 .
add(
MI.getOperand(0))
172 .
addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
176 bool DstIsDead =
MI.getOperand(0).isDead();
186 case AArch64::MOVNWi:
187 case AArch64::MOVNXi:
188 case AArch64::MOVZWi:
189 case AArch64::MOVZXi: {
190 bool DstIsDead =
MI.getOperand(0).isDead();
198 case AArch64::MOVKWi:
199 case AArch64::MOVKXi: {
201 bool DstIsDead =
MI.getOperand(0).isDead();
214 MI.eraseFromParent();
218bool AArch64ExpandPseudo::expandCMP_SWAP(
220 unsigned StlrOp,
unsigned CmpOp,
unsigned ExtendImm,
unsigned ZeroReg,
225 Register StatusReg =
MI.getOperand(1).getReg();
226 bool StatusDead =
MI.getOperand(1).isDead();
229 assert(!
MI.getOperand(2).isUndef() &&
"cannot handle undef");
231 Register DesiredReg =
MI.getOperand(3).getReg();
240 MF->
insert(++LoadCmpBB->getIterator(), StoreBB);
241 MF->
insert(++StoreBB->getIterator(), DoneBB);
249 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::MOVZWi), StatusReg)
253 BuildMI(LoadCmpBB, MIMD,
TII->get(CmpOp), ZeroReg)
257 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::Bcc))
261 LoadCmpBB->addSuccessor(DoneBB);
262 LoadCmpBB->addSuccessor(StoreBB);
267 BuildMI(StoreBB, MIMD,
TII->get(StlrOp), StatusReg)
270 BuildMI(StoreBB, MIMD,
TII->get(AArch64::CBNZW))
273 StoreBB->addSuccessor(LoadCmpBB);
274 StoreBB->addSuccessor(DoneBB);
276 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
277 DoneBB->transferSuccessors(&
MBB);
282 MI.eraseFromParent();
290 StoreBB->clearLiveIns();
292 LoadCmpBB->clearLiveIns();
298bool AArch64ExpandPseudo::expandCMP_SWAP_128(
305 Register StatusReg =
MI.getOperand(2).getReg();
306 bool StatusDead =
MI.getOperand(2).isDead();
309 assert(!
MI.getOperand(3).isUndef() &&
"cannot handle undef");
311 Register DesiredLoReg =
MI.getOperand(4).getReg();
312 Register DesiredHiReg =
MI.getOperand(5).getReg();
313 Register NewLoReg =
MI.getOperand(6).getReg();
314 Register NewHiReg =
MI.getOperand(7).getReg();
316 unsigned LdxpOp, StxpOp;
318 switch (
MI.getOpcode()) {
319 case AArch64::CMP_SWAP_128_MONOTONIC:
320 LdxpOp = AArch64::LDXPX;
321 StxpOp = AArch64::STXPX;
323 case AArch64::CMP_SWAP_128_RELEASE:
324 LdxpOp = AArch64::LDXPX;
325 StxpOp = AArch64::STLXPX;
327 case AArch64::CMP_SWAP_128_ACQUIRE:
328 LdxpOp = AArch64::LDAXPX;
329 StxpOp = AArch64::STXPX;
331 case AArch64::CMP_SWAP_128:
332 LdxpOp = AArch64::LDAXPX;
333 StxpOp = AArch64::STLXPX;
346 MF->
insert(++LoadCmpBB->getIterator(), StoreBB);
347 MF->
insert(++StoreBB->getIterator(), FailBB);
348 MF->
insert(++FailBB->getIterator(), DoneBB);
359 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::SUBSXrs), AArch64::XZR)
363 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CSINCWr), StatusReg)
367 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::SUBSXrs), AArch64::XZR)
371 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CSINCWr), StatusReg)
375 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CBNZW))
378 LoadCmpBB->addSuccessor(FailBB);
379 LoadCmpBB->addSuccessor(StoreBB);
384 BuildMI(StoreBB, MIMD,
TII->get(StxpOp), StatusReg)
388 BuildMI(StoreBB, MIMD,
TII->get(AArch64::CBNZW))
392 StoreBB->addSuccessor(LoadCmpBB);
393 StoreBB->addSuccessor(DoneBB);
398 BuildMI(FailBB, MIMD,
TII->get(StxpOp), StatusReg)
402 BuildMI(FailBB, MIMD,
TII->get(AArch64::CBNZW))
405 FailBB->addSuccessor(LoadCmpBB);
406 FailBB->addSuccessor(DoneBB);
408 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
409 DoneBB->transferSuccessors(&
MBB);
414 MI.eraseFromParent();
424 FailBB->clearLiveIns();
426 StoreBB->clearLiveIns();
428 LoadCmpBB->clearLiveIns();
472bool AArch64ExpandPseudo::expand_DestructiveOp(
481 bool DstIsDead =
MI.getOperand(0).isDead();
483 unsigned PredIdx, DOPIdx, SrcIdx, Src2Idx;
488 if (DstReg ==
MI.getOperand(3).getReg()) {
490 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(1, 3, 2);
497 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(1, 2, 3);
500 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(2, 3, 3);
503 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 2, 3, 4);
504 if (DstReg ==
MI.getOperand(3).getReg()) {
506 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 3, 4, 2);
508 }
else if (DstReg ==
MI.getOperand(4).getReg()) {
510 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 4, 3, 2);
521 bool DOPRegIsUnique =
false;
524 DOPRegIsUnique = DstReg !=
MI.getOperand(SrcIdx).getReg();
529 DstReg !=
MI.getOperand(DOPIdx).getReg() ||
530 MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(SrcIdx).getReg();
534 DOPRegIsUnique =
true;
538 DstReg !=
MI.getOperand(DOPIdx).getReg() ||
539 (
MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(SrcIdx).getReg() &&
540 MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(Src2Idx).getReg());
556 uint64_t ElementSize =
TII->getElementSizeForOpcode(Opcode);
557 unsigned MovPrfx, LSLZero, MovPrfxZero;
558 switch (ElementSize) {
561 MovPrfx = AArch64::MOVPRFX_ZZ;
562 LSLZero = AArch64::LSL_ZPmI_B;
563 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_B;
566 MovPrfx = AArch64::MOVPRFX_ZZ;
567 LSLZero = AArch64::LSL_ZPmI_H;
568 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_H;
571 MovPrfx = AArch64::MOVPRFX_ZZ;
572 LSLZero = AArch64::LSL_ZPmI_S;
573 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_S;
576 MovPrfx = AArch64::MOVPRFX_ZZ;
577 LSLZero = AArch64::LSL_ZPmI_D;
578 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_D;
594 "The destructive operand should be unique");
596 "This instruction is unpredicated");
601 .
addReg(
MI.getOperand(PredIdx).getReg())
602 .
addReg(
MI.getOperand(DOPIdx).getReg());
616 .
add(
MI.getOperand(PredIdx))
620 }
else if (DstReg !=
MI.getOperand(DOPIdx).getReg()) {
621 assert(DOPRegIsUnique &&
"The destructive operand should be unique");
624 .
addReg(
MI.getOperand(DOPIdx).getReg());
637 .
add(
MI.getOperand(PredIdx))
638 .
add(
MI.getOperand(SrcIdx));
644 DOP.
add(
MI.getOperand(PredIdx))
646 .
add(
MI.getOperand(SrcIdx));
649 DOP.
add(
MI.getOperand(PredIdx))
651 .
add(
MI.getOperand(SrcIdx))
652 .
add(
MI.getOperand(Src2Idx));
658 transferImpOps(
MI, PRFX, DOP);
660 transferImpOps(
MI, DOP, DOP);
662 MI.eraseFromParent();
666bool AArch64ExpandPseudo::expandSetTagLoop(
672 Register AddressReg =
MI.getOperand(1).getReg();
676 bool ZeroData =
MI.getOpcode() == AArch64::STZGloop_wback;
677 const unsigned OpCode1 =
678 ZeroData ? AArch64::STZGPostIndex : AArch64::STGPostIndex;
679 const unsigned OpCode2 =
680 ZeroData ? AArch64::STZ2GPostIndex : AArch64::ST2GPostIndex;
682 unsigned Size =
MI.getOperand(2).getImm();
684 if (
Size % (16 * 2) != 0) {
700 MF->
insert(++LoopBB->getIterator(), DoneBB);
719 LoopBB->addSuccessor(LoopBB);
720 LoopBB->addSuccessor(DoneBB);
722 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
723 DoneBB->transferSuccessors(&
MBB);
728 MI.eraseFromParent();
735 LoopBB->clearLiveIns();
737 DoneBB->clearLiveIns();
745 unsigned Opc,
unsigned N) {
750 int ImmOffset =
MI.getOperand(2).getImm() +
Offset;
751 bool Kill = (
Offset + 1 ==
N) ?
MI.getOperand(1).isKill() :
false;
752 assert(ImmOffset >= -256 && ImmOffset < 256 &&
753 "Immediate spill offset out of range");
756 TRI->getSubReg(
MI.getOperand(0).getReg(), AArch64::zsub0 +
Offset),
761 MI.eraseFromParent();
765bool AArch64ExpandPseudo::expandCALL_RVMARKER(
778 "invalid operand for regular call");
779 assert(RVTarget.
isGlobal() &&
"invalid operand for attached call");
780 unsigned Opc = CallTarget.
isGlobal() ? AArch64::BL : AArch64::BLR;
784 unsigned RegMaskStartIdx = 2;
787 while (!
MI.getOperand(RegMaskStartIdx).isRegMask()) {
788 auto MOP =
MI.getOperand(RegMaskStartIdx);
789 assert(MOP.isReg() &&
"can only add register operands");
791 MOP.getReg(),
false,
true));
808 if (
MI.shouldUpdateCallSiteInfo())
811 MI.eraseFromParent();
813 std::next(RVCall->getIterator()));
828 "invalid operand for regular call");
829 unsigned Opc = CallTarget.
isGlobal() ? AArch64::BL : AArch64::BLR;
832 Call->addOperand(CallTarget);
841 if (
MI.shouldUpdateCallSiteInfo())
844 MI.eraseFromParent();
849bool AArch64ExpandPseudo::expandStoreSwiftAsyncContext(
857 if (STI.getTargetTriple().getArchName() !=
"arm64e") {
874 unsigned Opc =
Offset >= 0 ? AArch64::ADDXri : AArch64::SUBXri;
911 MI.getParent()->successors().begin() !=
912 MI.getParent()->successors().end()) &&
913 "Unexpected unreachable in block that restores ZA");
918 .
add(
MI.getOperand(0));
940 for (
unsigned I = 2;
I <
MI.getNumOperands(); ++
I)
941 MIB.
add(
MI.getOperand(
I));
944 MI.eraseFromParent();
958 MI.getParent()->successors().begin() ==
959 MI.getParent()->successors().end()) {
960 MI.eraseFromParent();
1002 auto PStateSM =
MI.getOperand(2).getReg();
1003 bool IsStreamingCallee =
MI.getOperand(3).getImm();
1004 unsigned Opc = IsStreamingCallee ? AArch64::TBZX : AArch64::TBNZX;
1026 TII->get(AArch64::MSRpstatesvcrImm1));
1030 MIB.
add(
MI.getOperand(0));
1031 MIB.
add(
MI.getOperand(1));
1032 for (
unsigned i = 4; i <
MI.getNumOperands(); ++i)
1033 MIB.
add(
MI.getOperand(i));
1037 MI.eraseFromParent();
1047 unsigned Opcode =
MI.getOpcode();
1051 if (OrigInstr != -1) {
1052 auto &Orig =
TII->get(OrigInstr);
1055 return expand_DestructiveOp(
MI,
MBB,
MBBI);
1063 case AArch64::BSPv8i8:
1064 case AArch64::BSPv16i8: {
1066 if (DstReg ==
MI.getOperand(3).getReg()) {
1069 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BITv8i8
1070 : AArch64::BITv16i8))
1071 .
add(
MI.getOperand(0))
1072 .
add(
MI.getOperand(3))
1073 .
add(
MI.getOperand(2))
1074 .
add(
MI.getOperand(1));
1075 }
else if (DstReg ==
MI.getOperand(2).getReg()) {
1078 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BIFv8i8
1079 : AArch64::BIFv16i8))
1080 .
add(
MI.getOperand(0))
1081 .
add(
MI.getOperand(2))
1082 .
add(
MI.getOperand(3))
1083 .
add(
MI.getOperand(1));
1086 if (DstReg ==
MI.getOperand(1).getReg()) {
1088 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BSLv8i8
1089 : AArch64::BSLv16i8))
1090 .
add(
MI.getOperand(0))
1091 .
add(
MI.getOperand(1))
1092 .
add(
MI.getOperand(2))
1093 .
add(
MI.getOperand(3));
1096 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::ORRv8i8
1097 : AArch64::ORRv16i8))
1101 .
add(
MI.getOperand(1))
1102 .
add(
MI.getOperand(1));
1104 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BSLv8i8
1105 : AArch64::BSLv16i8))
1106 .
add(
MI.getOperand(0))
1110 .
add(
MI.getOperand(2))
1111 .
add(
MI.getOperand(3));
1114 MI.eraseFromParent();
1118 case AArch64::ADDWrr:
1119 case AArch64::SUBWrr:
1120 case AArch64::ADDXrr:
1121 case AArch64::SUBXrr:
1122 case AArch64::ADDSWrr:
1123 case AArch64::SUBSWrr:
1124 case AArch64::ADDSXrr:
1125 case AArch64::SUBSXrr:
1126 case AArch64::ANDWrr:
1127 case AArch64::ANDXrr:
1128 case AArch64::BICWrr:
1129 case AArch64::BICXrr:
1130 case AArch64::ANDSWrr:
1131 case AArch64::ANDSXrr:
1132 case AArch64::BICSWrr:
1133 case AArch64::BICSXrr:
1134 case AArch64::EONWrr:
1135 case AArch64::EONXrr:
1136 case AArch64::EORWrr:
1137 case AArch64::EORXrr:
1138 case AArch64::ORNWrr:
1139 case AArch64::ORNXrr:
1140 case AArch64::ORRWrr:
1141 case AArch64::ORRXrr: {
1143 switch (
MI.getOpcode()) {
1146 case AArch64::ADDWrr: Opcode = AArch64::ADDWrs;
break;
1147 case AArch64::SUBWrr: Opcode = AArch64::SUBWrs;
break;
1148 case AArch64::ADDXrr: Opcode = AArch64::ADDXrs;
break;
1149 case AArch64::SUBXrr: Opcode = AArch64::SUBXrs;
break;
1150 case AArch64::ADDSWrr: Opcode = AArch64::ADDSWrs;
break;
1151 case AArch64::SUBSWrr: Opcode = AArch64::SUBSWrs;
break;
1152 case AArch64::ADDSXrr: Opcode = AArch64::ADDSXrs;
break;
1153 case AArch64::SUBSXrr: Opcode = AArch64::SUBSXrs;
break;
1154 case AArch64::ANDWrr: Opcode = AArch64::ANDWrs;
break;
1155 case AArch64::ANDXrr: Opcode = AArch64::ANDXrs;
break;
1156 case AArch64::BICWrr: Opcode = AArch64::BICWrs;
break;
1157 case AArch64::BICXrr: Opcode = AArch64::BICXrs;
break;
1158 case AArch64::ANDSWrr: Opcode = AArch64::ANDSWrs;
break;
1159 case AArch64::ANDSXrr: Opcode = AArch64::ANDSXrs;
break;
1160 case AArch64::BICSWrr: Opcode = AArch64::BICSWrs;
break;
1161 case AArch64::BICSXrr: Opcode = AArch64::BICSXrs;
break;
1162 case AArch64::EONWrr: Opcode = AArch64::EONWrs;
break;
1163 case AArch64::EONXrr: Opcode = AArch64::EONXrs;
break;
1164 case AArch64::EORWrr: Opcode = AArch64::EORWrs;
break;
1165 case AArch64::EORXrr: Opcode = AArch64::EORXrs;
break;
1166 case AArch64::ORNWrr: Opcode = AArch64::ORNWrs;
break;
1167 case AArch64::ORNXrr: Opcode = AArch64::ORNXrs;
break;
1168 case AArch64::ORRWrr: Opcode = AArch64::ORRWrs;
break;
1169 case AArch64::ORRXrr: Opcode = AArch64::ORRXrs;
break;
1174 TII->get(Opcode),
MI.getDebugLoc(),
true);
1177 MIB1->setPCSections(MF,
MI.getPCSections());
1179 .add(
MI.getOperand(1))
1180 .add(
MI.getOperand(2))
1182 transferImpOps(
MI, MIB1, MIB1);
1183 if (
auto DebugNumber =
MI.peekDebugInstrNum())
1185 MI.eraseFromParent();
1189 case AArch64::LOADgot: {
1198 TII->get(AArch64::LDRXl), DstReg);
1206 "Only expect globals, externalsymbols, or constant pools");
1219 unsigned Reg32 =
TRI->getSubReg(DstReg, AArch64::sub_32);
1220 unsigned DstFlags =
MI.getOperand(0).getTargetFlags();
1228 .
add(
MI.getOperand(0))
1243 "Only expect globals, externalsymbols, or constant pools");
1251 transferImpOps(
MI, MIB1, MIB2);
1253 MI.eraseFromParent();
1256 case AArch64::MOVaddrBA: {
1263 assert(
MI.getOperand(1).getOffset() == 0 &&
"unexpected offset");
1273 TII->get(AArch64::LDRXui), DstReg)
1277 transferImpOps(
MI, MIB1, MIB2);
1278 MI.eraseFromParent();
1283 case AArch64::MOVaddr:
1284 case AArch64::MOVaddrJT:
1285 case AArch64::MOVaddrCP:
1286 case AArch64::MOVaddrTLS:
1287 case AArch64::MOVaddrEXT: {
1290 assert(DstReg != AArch64::XZR);
1293 .
add(
MI.getOperand(1));
1303 auto Tag =
MI.getOperand(1);
1305 Tag.setOffset(0x100000000);
1314 .
add(
MI.getOperand(0))
1316 .
add(
MI.getOperand(2))
1319 transferImpOps(
MI, MIB1, MIB2);
1320 MI.eraseFromParent();
1323 case AArch64::ADDlowTLS:
1326 .
add(
MI.getOperand(0))
1327 .
add(
MI.getOperand(1))
1328 .
add(
MI.getOperand(2))
1330 MI.eraseFromParent();
1333 case AArch64::MOVbaseTLS: {
1335 auto SysReg = AArch64SysReg::TPIDR_EL0;
1338 SysReg = AArch64SysReg::TPIDR_EL3;
1340 SysReg = AArch64SysReg::TPIDR_EL2;
1342 SysReg = AArch64SysReg::TPIDR_EL1;
1345 MI.eraseFromParent();
1349 case AArch64::MOVi32imm:
1351 case AArch64::MOVi64imm:
1353 case AArch64::RET_ReallyLR: {
1362 transferImpOps(
MI, MIB, MIB);
1363 MI.eraseFromParent();
1366 case AArch64::CMP_SWAP_8:
1367 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRB, AArch64::STLXRB,
1370 AArch64::WZR, NextMBBI);
1371 case AArch64::CMP_SWAP_16:
1372 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRH, AArch64::STLXRH,
1375 AArch64::WZR, NextMBBI);
1376 case AArch64::CMP_SWAP_32:
1377 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRW, AArch64::STLXRW,
1380 AArch64::WZR, NextMBBI);
1381 case AArch64::CMP_SWAP_64:
1382 return expandCMP_SWAP(
MBB,
MBBI,
1383 AArch64::LDAXRX, AArch64::STLXRX, AArch64::SUBSXrs,
1385 AArch64::XZR, NextMBBI);
1386 case AArch64::CMP_SWAP_128:
1387 case AArch64::CMP_SWAP_128_RELEASE:
1388 case AArch64::CMP_SWAP_128_ACQUIRE:
1389 case AArch64::CMP_SWAP_128_MONOTONIC:
1390 return expandCMP_SWAP_128(
MBB,
MBBI, NextMBBI);
1392 case AArch64::AESMCrrTied:
1393 case AArch64::AESIMCrrTied: {
1396 TII->get(Opcode == AArch64::AESMCrrTied ? AArch64::AESMCrr :
1398 .
add(
MI.getOperand(0))
1399 .
add(
MI.getOperand(1));
1400 transferImpOps(
MI, MIB, MIB);
1401 MI.eraseFromParent();
1404 case AArch64::IRGstack: {
1415 StackOffset FrameRegOffset = TFI->resolveFrameOffsetReference(
1416 MF, BaseOffset,
false ,
false , FrameReg,
1420 if (FrameRegOffset) {
1422 SrcReg =
MI.getOperand(0).getReg();
1424 FrameRegOffset,
TII);
1427 .
add(
MI.getOperand(0))
1429 .
add(
MI.getOperand(2));
1430 MI.eraseFromParent();
1433 case AArch64::TAGPstack: {
1434 int64_t
Offset =
MI.getOperand(2).getImm();
1436 TII->get(
Offset >= 0 ? AArch64::ADDG : AArch64::SUBG))
1437 .
add(
MI.getOperand(0))
1438 .
add(
MI.getOperand(1))
1440 .
add(
MI.getOperand(4));
1441 MI.eraseFromParent();
1444 case AArch64::STGloop_wback:
1445 case AArch64::STZGloop_wback:
1446 return expandSetTagLoop(
MBB,
MBBI, NextMBBI);
1447 case AArch64::STGloop:
1448 case AArch64::STZGloop:
1450 "Non-writeback variants of STGloop / STZGloop should not "
1451 "survive past PrologEpilogInserter.");
1452 case AArch64::STR_ZZZZXI:
1453 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 4);
1454 case AArch64::STR_ZZZXI:
1455 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 3);
1456 case AArch64::STR_ZZXI:
1457 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 2);
1458 case AArch64::LDR_ZZZZXI:
1459 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 4);
1460 case AArch64::LDR_ZZZXI:
1461 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 3);
1462 case AArch64::LDR_ZZXI:
1463 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 2);
1464 case AArch64::BLR_RVMARKER:
1465 return expandCALL_RVMARKER(
MBB,
MBBI);
1466 case AArch64::BLR_BTI:
1467 return expandCALL_BTI(
MBB,
MBBI);
1468 case AArch64::StoreSwiftAsyncContext:
1469 return expandStoreSwiftAsyncContext(
MBB,
MBBI);
1470 case AArch64::RestoreZAPseudo: {
1471 auto *NewMBB = expandRestoreZA(
MBB,
MBBI);
1476 case AArch64::MSRpstatePseudo: {
1477 auto *NewMBB = expandCondSMToggle(
MBB,
MBBI);
1482 case AArch64::OBSCURE_COPY: {
1483 if (
MI.getOperand(0).getReg() !=
MI.getOperand(1).getReg()) {
1485 .
add(
MI.getOperand(0))
1487 .
add(
MI.getOperand(1))
1490 MI.eraseFromParent();
1516 for (
auto &
MBB : MF)
1523 return new AArch64ExpandPseudo();
#define AARCH64_EXPAND_PSEUDO_NAME
MachineInstrBuilder & UseMI
MachineInstrBuilder MachineInstrBuilder & DefMI
SmallVector< AArch64_IMM::ImmInsnModel, 4 > Insn
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
MachineBasicBlock MachineBasicBlock::iterator MBBI
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This file defines the DenseMap class.
const HexagonInstrInfo * TII
This file implements the LivePhysRegs utility for tracking liveness of physical registers.
This file declares the MachineConstantPool class which is an abstract constant pool to keep track of ...
unsigned const TargetRegisterInfo * TRI
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
AArch64FunctionInfo - This class is derived from MachineFunctionInfo and contains private AArch64-spe...
unsigned getTaggedBasePointerOffset() const
bool isTargetILP32() const
bool isTargetMachO() const
The address of a basic block.
FunctionPass class - This class is used to implement most global optimizations.
A set of physical registers with utility functions to track liveness when walking backward/forward th...
Describe properties that are true of each instruction in the target description file.
unsigned getNumOperands() const
Return the number of declared MachineOperands for this MachineInstruction.
instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
const BasicBlock * getBasicBlock() const
Return the LLVM basic block that this instance corresponded to originally.
void addSuccessor(MachineBasicBlock *Succ, BranchProbability Prob=BranchProbability::getUnknown())
Add Succ as a successor of this MachineBasicBlock.
MachineBasicBlock * splitAt(MachineInstr &SplitInst, bool UpdateLiveIns=true, LiveIntervals *LIS=nullptr)
Split a basic block into 2 pieces at SplitPoint.
void eraseFromParent()
This method unlinks 'this' from the containing function and deletes it.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
iterator_range< succ_iterator > successors()
The MachineConstantPool class keeps track of constants referenced by a function which must be spilled...
unsigned getConstantPoolIndex(const Constant *C, Align Alignment)
getConstantPoolIndex - Create a new entry in the constant pool or return an existing one.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
MachineBasicBlock * CreateMachineBasicBlock(const BasicBlock *bb=nullptr)
CreateMachineBasicBlock - Allocate a new MachineBasicBlock.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
MachineInstr * CreateMachineInstr(const MCInstrDesc &MCID, DebugLoc DL, bool NoImplicit=false)
CreateMachineInstr - Allocate a new MachineInstr.
const LLVMTargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
Ty * getInfo()
getInfo - Keep track of various per-function pieces of information for backends that would like to do...
MachineConstantPool * getConstantPool()
getConstantPool - Return the constant pool object for the current function.
void moveCallSiteInfo(const MachineInstr *Old, const MachineInstr *New)
Move the call site info from Old to \New call site info.
void insert(iterator MBBI, MachineBasicBlock *MBB)
const MachineInstrBuilder & addExternalSymbol(const char *FnName, unsigned TargetFlags=0) const
const MachineInstrBuilder & setMIFlag(MachineInstr::MIFlag Flag) const
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addConstantPoolIndex(unsigned Idx, int Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addGlobalAddress(const GlobalValue *GV, int64_t Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
const MachineInstrBuilder & cloneMemRefs(const MachineInstr &OtherMI) const
const MachineInstrBuilder & addUse(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register use operand.
const MachineInstrBuilder & setMIFlags(unsigned Flags) const
MachineInstr * getInstr() const
If conversion operators fail, use this method to get the MachineInstr explicitly.
const MachineInstrBuilder & addDef(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register definition operand.
Representation of each machine instruction.
void setDebugInstrNum(unsigned Num)
Set instruction number of this MachineInstr.
void addOperand(MachineFunction &MF, const MachineOperand &Op)
Add the specified operand to the instruction.
MachineOperand class - Representation of each machine instruction operand.
const GlobalValue * getGlobal() const
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isCPI() const
isCPI - Tests if this is a MO_ConstantPoolIndex operand.
bool isSymbol() const
isSymbol - Tests if this is a MO_ExternalSymbol operand.
unsigned getTargetFlags() const
bool isGlobal() const
isGlobal - Tests if this is a MO_GlobalAddress operand.
const char * getSymbolName() const
Register getReg() const
getReg - Returns the register number.
static MachineOperand CreateReg(Register Reg, bool isDef, bool isImp=false, bool isKill=false, bool isDead=false, bool isUndef=false, bool isEarlyClobber=false, unsigned SubReg=0, bool isDebug=false, bool isInternalRead=false, bool isRenamable=false)
int64_t getOffset() const
Return the offset from the symbol in this operand.
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
Wrapper class representing virtual and physical registers.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StackOffset holds a fixed and a scalable offset in bytes.
StringRef - Represent a constant reference to a string, i.e.
CodeModel::Model getCodeModel() const
Returns the code model.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
virtual const TargetInstrInfo * getInstrInfo() const
self_iterator getIterator()
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ MO_NC
MO_NC - Indicates whether the linker is expected to check the symbol reference for overflow.
@ MO_PAGEOFF
MO_PAGEOFF - A symbol operand with this flag represents the offset of that symbol within a 4K page.
@ MO_PREL
MO_PREL - Indicates that the bits of the symbol operand represented by MO_G0 etc are PC relative.
@ MO_PAGE
MO_PAGE - A symbol operand with this flag represents the pc-relative offset of the 4K page containing...
@ MO_TAGGED
MO_TAGGED - With MO_PAGE, indicates that the page includes a memory tag in bits 56-63.
@ MO_G3
MO_G3 - A symbol operand with this flag (granule 3) represents the high 16-bits of a 64-bit address,...
static unsigned getArithExtendImm(AArch64_AM::ShiftExtendType ET, unsigned Imm)
getArithExtendImm - Encode the extend type and shift amount for an arithmetic instruction: imm: 3-bit...
static unsigned getShifterImm(AArch64_AM::ShiftExtendType ST, unsigned Imm)
getShifterImm - Encode the shift type and amount: imm: 6-bit shift amount shifter: 000 ==> lsl 001 ==...
void expandMOVImm(uint64_t Imm, unsigned BitSize, SmallVectorImpl< ImmInsnModel > &Insn)
Expand a MOVi32imm or MOVi64imm pseudo instruction to one or more real move-immediate instructions to...
int getSVERevInstr(uint16_t Opcode)
@ DestructiveInstTypeMask
@ DestructiveUnaryPassthru
@ DestructiveTernaryCommWithRev
@ DestructiveBinaryCommWithRev
int getSVEPseudoMap(uint16_t Opcode)
int getSVENonRevInstr(uint16_t Opcode)
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Implicit
Not emitted register (e.g. carry, or temporary result).
@ Renamable
Register that may be renamed.
@ Define
Register definition.
@ Kill
The last use of a register.
@ Undef
Value of the register doesn't matter.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
void finalizeBundle(MachineBasicBlock &MBB, MachineBasicBlock::instr_iterator FirstMI, MachineBasicBlock::instr_iterator LastMI)
finalizeBundle - Finalize a machine instruction bundle which includes a sequence of instructions star...
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
APFloat abs(APFloat X)
Returns the absolute value of the argument.
unsigned getDeadRegState(bool B)
void initializeAArch64ExpandPseudoPass(PassRegistry &)
FunctionPass * createAArch64ExpandPseudoPass()
Returns an instance of the pseudo instruction expansion pass.
void emitFrameOffset(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const DebugLoc &DL, unsigned DestReg, unsigned SrcReg, StackOffset Offset, const TargetInstrInfo *TII, MachineInstr::MIFlag=MachineInstr::NoFlags, bool SetNZCV=false, bool NeedsWinCFI=false, bool *HasWinCFI=nullptr, bool EmitCFAOffset=false, StackOffset InitialOffset={}, unsigned FrameReg=AArch64::SP)
emitFrameOffset - Emit instructions as needed to set DestReg to SrcReg plus Offset.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
unsigned getKillRegState(bool B)
unsigned getRenamableRegState(bool B)
void computeAndAddLiveIns(LivePhysRegs &LiveRegs, MachineBasicBlock &MBB)
Convenience function combining computeLiveIns() and addLiveIns().
This struct is a compact representation of a valid (non-zero power of two) alignment.