43#define AARCH64_EXPAND_PSEUDO_NAME "AArch64 pseudo instruction expansion pass"
67 unsigned ContiguousOpc,
unsigned StridedOpc);
78 unsigned LdarOp,
unsigned StlrOp,
unsigned CmpOp,
79 unsigned ExtendImm,
unsigned ZeroReg,
104char AArch64ExpandPseudo::ID = 0;
116 assert(MO.isReg() && MO.getReg());
131 uint64_t RenamableState =
133 uint64_t
Imm =
MI.getOperand(1).getImm();
135 if (DstReg == AArch64::XZR || DstReg == AArch64::WZR) {
138 MI.eraseFromParent();
146 SmallVector<MachineInstrBuilder, 4> MIBS;
148 bool LastItem = std::next(
I) ==
E;
153 case AArch64::ORRWri:
154 case AArch64::ORRXri:
157 .
add(
MI.getOperand(0))
158 .
addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
162 bool DstIsDead =
MI.getOperand(0).isDead();
172 case AArch64::ORRWrs:
173 case AArch64::ORRXrs: {
175 bool DstIsDead =
MI.getOperand(0).isDead();
185 case AArch64::ANDXri:
186 case AArch64::EORXri:
189 .
add(
MI.getOperand(0))
190 .
addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
194 bool DstIsDead =
MI.getOperand(0).isDead();
204 case AArch64::MOVNWi:
205 case AArch64::MOVNXi:
206 case AArch64::MOVZWi:
207 case AArch64::MOVZXi: {
208 bool DstIsDead =
MI.getOperand(0).isDead();
216 case AArch64::MOVKWi:
217 case AArch64::MOVKXi: {
219 bool DstIsDead =
MI.getOperand(0).isDead();
232 MI.eraseFromParent();
236bool AArch64ExpandPseudo::expandCMP_SWAP(
238 unsigned StlrOp,
unsigned CmpOp,
unsigned ExtendImm,
unsigned ZeroReg,
242 const MachineOperand &Dest =
MI.getOperand(0);
243 Register StatusReg =
MI.getOperand(1).getReg();
244 bool StatusDead =
MI.getOperand(1).isDead();
247 assert(!
MI.getOperand(2).isUndef() &&
"cannot handle undef");
249 Register DesiredReg =
MI.getOperand(3).getReg();
258 MF->
insert(++LoadCmpBB->getIterator(), StoreBB);
259 MF->
insert(++StoreBB->getIterator(), DoneBB);
267 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::MOVZWi), StatusReg)
271 BuildMI(LoadCmpBB, MIMD,
TII->get(CmpOp), ZeroReg)
275 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::Bcc))
279 LoadCmpBB->addSuccessor(DoneBB);
280 LoadCmpBB->addSuccessor(StoreBB);
285 BuildMI(StoreBB, MIMD,
TII->get(StlrOp), StatusReg)
288 BuildMI(StoreBB, MIMD,
TII->get(AArch64::CBNZW))
291 StoreBB->addSuccessor(LoadCmpBB);
292 StoreBB->addSuccessor(DoneBB);
294 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
295 DoneBB->transferSuccessors(&
MBB);
300 MI.eraseFromParent();
303 LivePhysRegs LiveRegs;
308 StoreBB->clearLiveIns();
310 LoadCmpBB->clearLiveIns();
316bool AArch64ExpandPseudo::expandCMP_SWAP_128(
321 MachineOperand &DestLo =
MI.getOperand(0);
322 MachineOperand &DestHi =
MI.getOperand(1);
323 Register StatusReg =
MI.getOperand(2).getReg();
324 bool StatusDead =
MI.getOperand(2).isDead();
327 assert(!
MI.getOperand(3).isUndef() &&
"cannot handle undef");
329 Register DesiredLoReg =
MI.getOperand(4).getReg();
330 Register DesiredHiReg =
MI.getOperand(5).getReg();
331 Register NewLoReg =
MI.getOperand(6).getReg();
332 Register NewHiReg =
MI.getOperand(7).getReg();
334 unsigned LdxpOp, StxpOp;
336 switch (
MI.getOpcode()) {
337 case AArch64::CMP_SWAP_128_MONOTONIC:
338 LdxpOp = AArch64::LDXPX;
339 StxpOp = AArch64::STXPX;
341 case AArch64::CMP_SWAP_128_RELEASE:
342 LdxpOp = AArch64::LDXPX;
343 StxpOp = AArch64::STLXPX;
345 case AArch64::CMP_SWAP_128_ACQUIRE:
346 LdxpOp = AArch64::LDAXPX;
347 StxpOp = AArch64::STXPX;
349 case AArch64::CMP_SWAP_128:
350 LdxpOp = AArch64::LDAXPX;
351 StxpOp = AArch64::STLXPX;
364 MF->
insert(++LoadCmpBB->getIterator(), StoreBB);
365 MF->
insert(++StoreBB->getIterator(), FailBB);
366 MF->
insert(++FailBB->getIterator(), DoneBB);
377 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::SUBSXrs), AArch64::XZR)
381 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CSINCWr), StatusReg)
385 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::SUBSXrs), AArch64::XZR)
389 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CSINCWr), StatusReg)
393 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CBNZW))
396 LoadCmpBB->addSuccessor(FailBB);
397 LoadCmpBB->addSuccessor(StoreBB);
402 BuildMI(StoreBB, MIMD,
TII->get(StxpOp), StatusReg)
406 BuildMI(StoreBB, MIMD,
TII->get(AArch64::CBNZW))
410 StoreBB->addSuccessor(LoadCmpBB);
411 StoreBB->addSuccessor(DoneBB);
416 BuildMI(FailBB, MIMD,
TII->get(StxpOp), StatusReg)
420 BuildMI(FailBB, MIMD,
TII->get(AArch64::CBNZW))
423 FailBB->addSuccessor(LoadCmpBB);
424 FailBB->addSuccessor(DoneBB);
426 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
427 DoneBB->transferSuccessors(&
MBB);
432 MI.eraseFromParent();
435 LivePhysRegs LiveRegs;
442 FailBB->clearLiveIns();
444 StoreBB->clearLiveIns();
446 LoadCmpBB->clearLiveIns();
490bool AArch64ExpandPseudo::expand_DestructiveOp(
492 MachineBasicBlock &
MBB,
499 bool DstIsDead =
MI.getOperand(0).isDead();
501 unsigned PredIdx, DOPIdx, SrcIdx, Src2Idx;
506 if (DstReg ==
MI.getOperand(3).getReg()) {
508 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(1, 3, 2);
515 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(1, 2, 3);
518 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(2, 3, 3);
521 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 2, 3, 4);
522 if (DstReg ==
MI.getOperand(3).getReg()) {
524 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 3, 4, 2);
526 }
else if (DstReg ==
MI.getOperand(4).getReg()) {
528 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 4, 3, 2);
535 std::tie(DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 1, 2);
544 bool DOPRegIsUnique =
false;
547 DOPRegIsUnique = DstReg !=
MI.getOperand(SrcIdx).getReg();
552 DstReg !=
MI.getOperand(DOPIdx).getReg() ||
553 MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(SrcIdx).getReg();
558 DOPRegIsUnique =
true;
562 DstReg !=
MI.getOperand(DOPIdx).getReg() ||
563 (
MI.getOperand(DOPIdx).
getReg() !=
MI.getOperand(SrcIdx).getReg() &&
564 MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(Src2Idx).getReg());
580 uint64_t ElementSize =
TII->getElementSizeForOpcode(Opcode);
581 unsigned MovPrfx, LSLZero, MovPrfxZero;
582 switch (ElementSize) {
585 MovPrfx = AArch64::MOVPRFX_ZZ;
586 LSLZero = AArch64::LSL_ZPmI_B;
587 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_B;
590 MovPrfx = AArch64::MOVPRFX_ZZ;
591 LSLZero = AArch64::LSL_ZPmI_H;
592 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_H;
595 MovPrfx = AArch64::MOVPRFX_ZZ;
596 LSLZero = AArch64::LSL_ZPmI_S;
597 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_S;
600 MovPrfx = AArch64::MOVPRFX_ZZ;
601 LSLZero = AArch64::LSL_ZPmI_D;
602 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_D;
614 MachineInstrBuilder PRFX, DOP;
621 "The destructive operand should be unique");
623 "This instruction is unpredicated");
628 .
addReg(
MI.getOperand(PredIdx).getReg())
629 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState);
644 .
add(
MI.getOperand(PredIdx))
648 }
else if (DstReg !=
MI.getOperand(DOPIdx).getReg()) {
649 assert(DOPRegIsUnique &&
"The destructive operand should be unique");
652 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState);
666 DOP.
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
667 .
add(
MI.getOperand(PredIdx))
668 .
add(
MI.getOperand(SrcIdx));
674 DOP.
add(
MI.getOperand(PredIdx))
675 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
676 .
add(
MI.getOperand(SrcIdx));
679 DOP.
add(
MI.getOperand(PredIdx))
680 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
681 .
add(
MI.getOperand(SrcIdx))
682 .
add(
MI.getOperand(Src2Idx));
685 DOP.
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
686 .
add(
MI.getOperand(SrcIdx))
687 .
add(
MI.getOperand(Src2Idx));
692 transferImpOps(
MI, PRFX, DOP);
695 transferImpOps(
MI, DOP, DOP);
697 MI.eraseFromParent();
701bool AArch64ExpandPseudo::expandSetTagLoop(
707 Register AddressReg =
MI.getOperand(1).getReg();
711 bool ZeroData =
MI.getOpcode() == AArch64::STZGloop_wback;
712 const unsigned OpCode1 =
713 ZeroData ? AArch64::STZGPostIndex : AArch64::STGPostIndex;
714 const unsigned OpCode2 =
715 ZeroData ? AArch64::STZ2GPostIndex : AArch64::ST2GPostIndex;
717 unsigned Size =
MI.getOperand(2).getImm();
719 if (
Size % (16 * 2) != 0) {
735 MF->
insert(++LoopBB->getIterator(), DoneBB);
754 LoopBB->addSuccessor(LoopBB);
755 LoopBB->addSuccessor(DoneBB);
757 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
758 DoneBB->transferSuccessors(&
MBB);
763 MI.eraseFromParent();
765 LivePhysRegs LiveRegs;
770 LoopBB->clearLiveIns();
772 DoneBB->clearLiveIns();
778bool AArch64ExpandPseudo::expandSVESpillFill(MachineBasicBlock &
MBB,
780 unsigned Opc,
unsigned N) {
781 assert((
Opc == AArch64::LDR_ZXI ||
Opc == AArch64::STR_ZXI ||
782 Opc == AArch64::LDR_PXI ||
Opc == AArch64::STR_PXI) &&
783 "Unexpected opcode");
784 unsigned RState = (
Opc == AArch64::LDR_ZXI ||
Opc == AArch64::LDR_PXI)
787 unsigned sub0 = (
Opc == AArch64::LDR_ZXI ||
Opc == AArch64::STR_ZXI)
790 const TargetRegisterInfo *
TRI =
794 int ImmOffset =
MI.getOperand(2).getImm() +
Offset;
795 bool Kill = (
Offset + 1 ==
N) ?
MI.getOperand(1).isKill() :
false;
796 assert(ImmOffset >= -256 && ImmOffset < 256 &&
797 "Immediate spill offset out of range");
804 MI.eraseFromParent();
815 unsigned RegMaskStartIdx) {
824 while (!
MBBI->getOperand(RegMaskStartIdx).isRegMask()) {
826 assert(MOP.
isReg() &&
"can only add register operands");
828 MOP.
getReg(),
false,
true,
false,
834 Call->addOperand(MO);
845 unsigned RegMaskStartIdx) {
846 unsigned Opc = CallTarget.
isGlobal() ? AArch64::BL : AArch64::BLR;
849 "invalid operand for regular call");
853bool AArch64ExpandPseudo::expandCALL_RVMARKER(
861 MachineOperand &RVTarget =
MI.getOperand(0);
862 bool DoEmitMarker =
MI.getOperand(1).getImm();
863 assert(RVTarget.
isGlobal() &&
"invalid operand for attached call");
865 MachineInstr *OriginalCall =
nullptr;
867 if (
MI.getOpcode() == AArch64::BLRA_RVMARKER) {
869 const MachineOperand &CallTarget =
MI.getOperand(2);
870 const MachineOperand &
Key =
MI.getOperand(3);
871 const MachineOperand &IntDisc =
MI.getOperand(4);
872 const MachineOperand &AddrDisc =
MI.getOperand(5);
876 "Invalid auth call key");
878 MachineOperand
Ops[] = {CallTarget,
Key, IntDisc, AddrDisc};
883 assert(
MI.getOpcode() == AArch64::BLR_RVMARKER &&
"unknown rvmarker MI");
900 if (
MI.shouldUpdateAdditionalCallInfo())
903 MI.eraseFromParent();
905 std::next(RVCall->getIterator()));
909bool AArch64ExpandPseudo::expandCALL_BTI(MachineBasicBlock &
MBB,
929 if (
MI.shouldUpdateAdditionalCallInfo())
932 MI.eraseFromParent();
937bool AArch64ExpandPseudo::expandStoreSwiftAsyncContext(
945 if (STI.getTargetTriple().getArchName() !=
"arm64e") {
962 unsigned Opc =
Offset >= 0 ? AArch64::ADDXri : AArch64::SUBXri;
999 bool IsRestoreZA =
MI.getOpcode() == AArch64::RestoreZAPseudo;
1000 assert((
MI.getOpcode() == AArch64::RestoreZAPseudo ||
1001 MI.getOpcode() == AArch64::CommitZASavePseudo) &&
1002 "Expected ZA commit or restore");
1004 MI.getParent()->successors().begin() !=
1005 MI.getParent()->successors().end()) &&
1006 "Unexpected unreachable in block that restores ZA");
1012 TII->get(IsRestoreZA ? AArch64::CBZX : AArch64::CBNZX))
1013 .
add(
MI.getOperand(0));
1026 Branch.addMBB(SMBB);
1029 MBB.addSuccessor(EndBB);
1035 for (
unsigned I = 2;
I <
MI.getNumOperands(); ++
I)
1036 MIB.
add(
MI.getOperand(
I));
1042 [[maybe_unused]]
auto *
TRI =
1043 MBB.getParent()->getSubtarget().getRegisterInfo();
1046 .
addImm(AArch64SysReg::TPIDR2_EL0)
1048 bool ZeroZA =
MI.getOperand(1).getImm() != 0;
1050 assert(
MI.definesRegister(AArch64::ZAB0,
TRI) &&
"should define ZA!");
1058 MI.eraseFromParent();
1063AArch64ExpandPseudo::expandCondSMToggle(MachineBasicBlock &
MBB,
1065 MachineInstr &
MI = *
MBBI;
1072 MI.getParent()->successors().begin() ==
1073 MI.getParent()->successors().end()) {
1074 MI.eraseFromParent();
1117 switch (
MI.getOperand(2).getImm()) {
1121 Opc = AArch64::TBNZW;
1124 Opc = AArch64::TBZW;
1127 auto PStateSM =
MI.getOperand(3).getReg();
1129 unsigned SMReg32 =
TRI->getSubReg(PStateSM, AArch64::sub_32);
1130 MachineInstrBuilder Tbx =
1137 MachineInstr &PrevMI = *std::prev(
MBBI);
1138 MachineBasicBlock *SMBB =
MBB.
splitAt(PrevMI,
true);
1139 MachineBasicBlock *EndBB = std::next(
MI.getIterator()) == SMBB->
end()
1150 MachineInstrBuilder MIB =
BuildMI(*SMBB, SMBB->
begin(),
MI.getDebugLoc(),
1151 TII->get(AArch64::MSRpstatesvcrImm1));
1155 MIB.
add(
MI.getOperand(0));
1156 MIB.
add(
MI.getOperand(1));
1157 for (
unsigned i = 4; i <
MI.getNumOperands(); ++i)
1158 MIB.
add(
MI.getOperand(i));
1162 MI.eraseFromParent();
1166bool AArch64ExpandPseudo::expandMultiVecPseudo(
1168 TargetRegisterClass ContiguousClass, TargetRegisterClass StridedClass,
1169 unsigned ContiguousOp,
unsigned StridedOpc) {
1170 MachineInstr &
MI = *
MBBI;
1184 .
add(
MI.getOperand(0))
1185 .
add(
MI.getOperand(1))
1186 .
add(
MI.getOperand(2))
1187 .
add(
MI.getOperand(3));
1188 transferImpOps(
MI, MIB, MIB);
1189 MI.eraseFromParent();
1193bool AArch64ExpandPseudo::expandFormTuplePseudo(
1197 MachineInstr &
MI = *
MBBI;
1198 Register ReturnTuple =
MI.getOperand(0).getReg();
1200 const TargetRegisterInfo *
TRI =
1202 for (
unsigned I = 0;
I <
Size; ++
I) {
1203 Register FormTupleOpReg =
MI.getOperand(
I + 1).getReg();
1205 TRI->getSubReg(ReturnTuple, AArch64::zsub0 +
I);
1208 if (FormTupleOpReg != ReturnTupleSubReg)
1215 MI.eraseFromParent();
1221bool AArch64ExpandPseudo::expandMI(MachineBasicBlock &
MBB,
1224 MachineInstr &
MI = *
MBBI;
1225 unsigned Opcode =
MI.getOpcode();
1229 if (OrigInstr != -1) {
1230 auto &Orig =
TII->get(OrigInstr);
1233 return expand_DestructiveOp(
MI,
MBB,
MBBI);
1241 case AArch64::BSPv8i8:
1242 case AArch64::BSPv16i8: {
1244 if (DstReg ==
MI.getOperand(3).getReg()) {
1247 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BITv8i8
1248 : AArch64::BITv16i8))
1249 .
add(
MI.getOperand(0))
1250 .
add(
MI.getOperand(3))
1251 .
add(
MI.getOperand(2))
1252 .
add(
MI.getOperand(1));
1253 transferImpOps(
MI,
I,
I);
1254 }
else if (DstReg ==
MI.getOperand(2).getReg()) {
1257 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BIFv8i8
1258 : AArch64::BIFv16i8))
1259 .
add(
MI.getOperand(0))
1260 .
add(
MI.getOperand(2))
1261 .
add(
MI.getOperand(3))
1262 .
add(
MI.getOperand(1));
1263 transferImpOps(
MI,
I,
I);
1266 if (DstReg ==
MI.getOperand(1).getReg()) {
1269 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BSLv8i8
1270 : AArch64::BSLv16i8))
1271 .
add(
MI.getOperand(0))
1272 .
add(
MI.getOperand(1))
1273 .
add(
MI.getOperand(2))
1274 .
add(
MI.getOperand(3));
1275 transferImpOps(
MI,
I,
I);
1280 MI.getOperand(1).isKill() &&
1281 MI.getOperand(1).getReg() !=
MI.getOperand(2).getReg() &&
1282 MI.getOperand(1).getReg() !=
MI.getOperand(3).getReg());
1284 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::ORRv8i8
1285 : AArch64::ORRv16i8))
1289 .
addReg(
MI.getOperand(1).getReg(), RegState)
1290 .
addReg(
MI.getOperand(1).getReg(), RegState);
1293 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BSLv8i8
1294 : AArch64::BSLv16i8))
1295 .
add(
MI.getOperand(0))
1298 MI.getOperand(0).isRenamable()))
1299 .
add(
MI.getOperand(2))
1300 .
add(
MI.getOperand(3));
1301 transferImpOps(
MI, I2, I2);
1304 MI.eraseFromParent();
1308 case AArch64::ADDWrr:
1309 case AArch64::SUBWrr:
1310 case AArch64::ADDXrr:
1311 case AArch64::SUBXrr:
1312 case AArch64::ADDSWrr:
1313 case AArch64::SUBSWrr:
1314 case AArch64::ADDSXrr:
1315 case AArch64::SUBSXrr:
1316 case AArch64::ANDWrr:
1317 case AArch64::ANDXrr:
1318 case AArch64::BICWrr:
1319 case AArch64::BICXrr:
1320 case AArch64::ANDSWrr:
1321 case AArch64::ANDSXrr:
1322 case AArch64::BICSWrr:
1323 case AArch64::BICSXrr:
1324 case AArch64::EONWrr:
1325 case AArch64::EONXrr:
1326 case AArch64::EORWrr:
1327 case AArch64::EORXrr:
1328 case AArch64::ORNWrr:
1329 case AArch64::ORNXrr:
1330 case AArch64::ORRWrr:
1331 case AArch64::ORRXrr: {
1333 switch (
MI.getOpcode()) {
1336 case AArch64::ADDWrr: Opcode = AArch64::ADDWrs;
break;
1337 case AArch64::SUBWrr: Opcode = AArch64::SUBWrs;
break;
1338 case AArch64::ADDXrr: Opcode = AArch64::ADDXrs;
break;
1339 case AArch64::SUBXrr: Opcode = AArch64::SUBXrs;
break;
1340 case AArch64::ADDSWrr: Opcode = AArch64::ADDSWrs;
break;
1341 case AArch64::SUBSWrr: Opcode = AArch64::SUBSWrs;
break;
1342 case AArch64::ADDSXrr: Opcode = AArch64::ADDSXrs;
break;
1343 case AArch64::SUBSXrr: Opcode = AArch64::SUBSXrs;
break;
1344 case AArch64::ANDWrr: Opcode = AArch64::ANDWrs;
break;
1345 case AArch64::ANDXrr: Opcode = AArch64::ANDXrs;
break;
1346 case AArch64::BICWrr: Opcode = AArch64::BICWrs;
break;
1347 case AArch64::BICXrr: Opcode = AArch64::BICXrs;
break;
1348 case AArch64::ANDSWrr: Opcode = AArch64::ANDSWrs;
break;
1349 case AArch64::ANDSXrr: Opcode = AArch64::ANDSXrs;
break;
1350 case AArch64::BICSWrr: Opcode = AArch64::BICSWrs;
break;
1351 case AArch64::BICSXrr: Opcode = AArch64::BICSXrs;
break;
1352 case AArch64::EONWrr: Opcode = AArch64::EONWrs;
break;
1353 case AArch64::EONXrr: Opcode = AArch64::EONXrs;
break;
1354 case AArch64::EORWrr: Opcode = AArch64::EORWrs;
break;
1355 case AArch64::EORXrr: Opcode = AArch64::EORXrs;
break;
1356 case AArch64::ORNWrr: Opcode = AArch64::ORNWrs;
break;
1357 case AArch64::ORNXrr: Opcode = AArch64::ORNXrs;
break;
1358 case AArch64::ORRWrr: Opcode = AArch64::ORRWrs;
break;
1359 case AArch64::ORRXrr: Opcode = AArch64::ORRXrs;
break;
1363 MachineInstr *NewMI = MF.CreateMachineInstr(
1364 TII->get(Opcode),
MI.getDebugLoc(),
true);
1366 MachineInstrBuilder MIB1(MF, NewMI);
1367 MIB1->setPCSections(MF,
MI.getPCSections());
1369 .add(
MI.getOperand(1))
1370 .add(
MI.getOperand(2))
1372 transferImpOps(
MI, MIB1, MIB1);
1373 if (
auto DebugNumber =
MI.peekDebugInstrNum())
1375 MI.eraseFromParent();
1379 case AArch64::LOADgot: {
1382 const MachineOperand &MO1 =
MI.getOperand(1);
1388 TII->get(AArch64::LDRXl), DstReg);
1396 "Only expect globals, externalsymbols, or constant pools");
1401 MachineFunction &MF = *
MI.getParent()->getParent();
1403 MachineInstrBuilder MIB1 =
1406 MachineInstrBuilder MIB2;
1407 if (MF.
getSubtarget<AArch64Subtarget>().isTargetILP32()) {
1409 unsigned Reg32 =
TRI->getSubReg(DstReg, AArch64::sub_32);
1410 unsigned DstFlags =
MI.getOperand(0).getTargetFlags();
1418 .
add(
MI.getOperand(0))
1433 "Only expect globals, externalsymbols, or constant pools");
1444 if (
MI.peekDebugInstrNum() != 0)
1446 transferImpOps(
MI, MIB1, MIB2);
1448 MI.eraseFromParent();
1451 case AArch64::MOVaddrBA: {
1452 MachineFunction &MF = *
MI.getParent()->getParent();
1453 if (MF.
getSubtarget<AArch64Subtarget>().isTargetMachO()) {
1458 assert(
MI.getOperand(1).getOffset() == 0 &&
"unexpected offset");
1468 TII->get(AArch64::LDRXui), DstReg)
1472 transferImpOps(
MI, MIB1, MIB2);
1473 MI.eraseFromParent();
1478 case AArch64::MOVaddr:
1479 case AArch64::MOVaddrJT:
1480 case AArch64::MOVaddrCP:
1481 case AArch64::MOVaddrTLS:
1482 case AArch64::MOVaddrEXT: {
1485 assert(DstReg != AArch64::XZR);
1486 MachineInstrBuilder MIB1 =
1488 .
add(
MI.getOperand(1));
1498 auto Tag =
MI.getOperand(1);
1500 Tag.setOffset(0x100000000);
1507 MachineInstrBuilder MIB2 =
1509 .
add(
MI.getOperand(0))
1511 .
add(
MI.getOperand(2))
1514 transferImpOps(
MI, MIB1, MIB2);
1515 MI.eraseFromParent();
1518 case AArch64::ADDlowTLS:
1521 .
add(
MI.getOperand(0))
1522 .
add(
MI.getOperand(1))
1523 .
add(
MI.getOperand(2))
1525 MI.eraseFromParent();
1528 case AArch64::MOVbaseTLS: {
1530 auto SysReg = AArch64SysReg::TPIDR_EL0;
1532 if (MF->
getSubtarget<AArch64Subtarget>().useEL3ForTP())
1533 SysReg = AArch64SysReg::TPIDR_EL3;
1534 else if (MF->
getSubtarget<AArch64Subtarget>().useEL2ForTP())
1535 SysReg = AArch64SysReg::TPIDR_EL2;
1536 else if (MF->
getSubtarget<AArch64Subtarget>().useEL1ForTP())
1537 SysReg = AArch64SysReg::TPIDR_EL1;
1538 else if (MF->
getSubtarget<AArch64Subtarget>().useROEL0ForTP())
1539 SysReg = AArch64SysReg::TPIDRRO_EL0;
1542 MI.eraseFromParent();
1546 case AArch64::MOVi32imm:
1548 case AArch64::MOVi64imm:
1550 case AArch64::RET_ReallyLR: {
1556 MachineInstrBuilder MIB =
1559 transferImpOps(
MI, MIB, MIB);
1560 MI.eraseFromParent();
1563 case AArch64::CMP_SWAP_8:
1564 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRB, AArch64::STLXRB,
1567 AArch64::WZR, NextMBBI);
1568 case AArch64::CMP_SWAP_16:
1569 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRH, AArch64::STLXRH,
1572 AArch64::WZR, NextMBBI);
1573 case AArch64::CMP_SWAP_32:
1574 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRW, AArch64::STLXRW,
1577 AArch64::WZR, NextMBBI);
1578 case AArch64::CMP_SWAP_64:
1579 return expandCMP_SWAP(
MBB,
MBBI,
1580 AArch64::LDAXRX, AArch64::STLXRX, AArch64::SUBSXrs,
1582 AArch64::XZR, NextMBBI);
1583 case AArch64::CMP_SWAP_128:
1584 case AArch64::CMP_SWAP_128_RELEASE:
1585 case AArch64::CMP_SWAP_128_ACQUIRE:
1586 case AArch64::CMP_SWAP_128_MONOTONIC:
1587 return expandCMP_SWAP_128(
MBB,
MBBI, NextMBBI);
1589 case AArch64::AESMCrrTied:
1590 case AArch64::AESIMCrrTied: {
1591 MachineInstrBuilder MIB =
1593 TII->get(Opcode == AArch64::AESMCrrTied ? AArch64::AESMCrr :
1595 .
add(
MI.getOperand(0))
1596 .
add(
MI.getOperand(1));
1597 transferImpOps(
MI, MIB, MIB);
1598 MI.eraseFromParent();
1601 case AArch64::IRGstack: {
1603 const AArch64FunctionInfo *AFI = MF.
getInfo<AArch64FunctionInfo>();
1604 const AArch64FrameLowering *TFI =
1605 MF.
getSubtarget<AArch64Subtarget>().getFrameLowering();
1612 StackOffset FrameRegOffset = TFI->resolveFrameOffsetReference(
1613 MF, BaseOffset,
false ,
false , FrameReg,
1617 if (FrameRegOffset) {
1619 SrcReg =
MI.getOperand(0).getReg();
1621 FrameRegOffset,
TII);
1624 .
add(
MI.getOperand(0))
1626 .
add(
MI.getOperand(2));
1627 MI.eraseFromParent();
1630 case AArch64::TAGPstack: {
1631 int64_t
Offset =
MI.getOperand(2).getImm();
1633 TII->get(
Offset >= 0 ? AArch64::ADDG : AArch64::SUBG))
1634 .
add(
MI.getOperand(0))
1635 .
add(
MI.getOperand(1))
1637 .
add(
MI.getOperand(4));
1638 MI.eraseFromParent();
1641 case AArch64::STGloop_wback:
1642 case AArch64::STZGloop_wback:
1643 return expandSetTagLoop(
MBB,
MBBI, NextMBBI);
1644 case AArch64::STGloop:
1645 case AArch64::STZGloop:
1647 "Non-writeback variants of STGloop / STZGloop should not "
1648 "survive past PrologEpilogInserter.");
1649 case AArch64::STR_ZZZZXI:
1650 case AArch64::STR_ZZZZXI_STRIDED_CONTIGUOUS:
1651 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 4);
1652 case AArch64::STR_ZZZXI:
1653 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 3);
1654 case AArch64::STR_ZZXI:
1655 case AArch64::STR_ZZXI_STRIDED_CONTIGUOUS:
1656 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 2);
1657 case AArch64::STR_PPXI:
1658 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_PXI, 2);
1659 case AArch64::LDR_ZZZZXI:
1660 case AArch64::LDR_ZZZZXI_STRIDED_CONTIGUOUS:
1661 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 4);
1662 case AArch64::LDR_ZZZXI:
1663 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 3);
1664 case AArch64::LDR_ZZXI:
1665 case AArch64::LDR_ZZXI_STRIDED_CONTIGUOUS:
1666 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 2);
1667 case AArch64::LDR_PPXI:
1668 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_PXI, 2);
1669 case AArch64::BLR_RVMARKER:
1670 case AArch64::BLRA_RVMARKER:
1671 return expandCALL_RVMARKER(
MBB,
MBBI);
1672 case AArch64::BLR_BTI:
1673 return expandCALL_BTI(
MBB,
MBBI);
1674 case AArch64::StoreSwiftAsyncContext:
1675 return expandStoreSwiftAsyncContext(
MBB,
MBBI);
1676 case AArch64::CommitZASavePseudo:
1677 case AArch64::RestoreZAPseudo: {
1678 auto *NewMBB = expandCommitOrRestoreZASave(
MBB,
MBBI);
1683 case AArch64::MSRpstatePseudo: {
1684 auto *NewMBB = expandCondSMToggle(
MBB,
MBBI);
1689 case AArch64::InOutZAUsePseudo:
1690 case AArch64::RequiresZASavePseudo:
1691 case AArch64::SMEStateAllocPseudo:
1692 case AArch64::COALESCER_BARRIER_FPR16:
1693 case AArch64::COALESCER_BARRIER_FPR32:
1694 case AArch64::COALESCER_BARRIER_FPR64:
1695 case AArch64::COALESCER_BARRIER_FPR128:
1696 MI.eraseFromParent();
1698 case AArch64::LD1B_2Z_IMM_PSEUDO:
1699 return expandMultiVecPseudo(
1700 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1701 AArch64::LD1B_2Z_IMM, AArch64::LD1B_2Z_STRIDED_IMM);
1702 case AArch64::LD1H_2Z_IMM_PSEUDO:
1703 return expandMultiVecPseudo(
1704 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1705 AArch64::LD1H_2Z_IMM, AArch64::LD1H_2Z_STRIDED_IMM);
1706 case AArch64::LD1W_2Z_IMM_PSEUDO:
1707 return expandMultiVecPseudo(
1708 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1709 AArch64::LD1W_2Z_IMM, AArch64::LD1W_2Z_STRIDED_IMM);
1710 case AArch64::LD1D_2Z_IMM_PSEUDO:
1711 return expandMultiVecPseudo(
1712 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1713 AArch64::LD1D_2Z_IMM, AArch64::LD1D_2Z_STRIDED_IMM);
1714 case AArch64::LDNT1B_2Z_IMM_PSEUDO:
1715 return expandMultiVecPseudo(
1716 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1717 AArch64::LDNT1B_2Z_IMM, AArch64::LDNT1B_2Z_STRIDED_IMM);
1718 case AArch64::LDNT1H_2Z_IMM_PSEUDO:
1719 return expandMultiVecPseudo(
1720 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1721 AArch64::LDNT1H_2Z_IMM, AArch64::LDNT1H_2Z_STRIDED_IMM);
1722 case AArch64::LDNT1W_2Z_IMM_PSEUDO:
1723 return expandMultiVecPseudo(
1724 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1725 AArch64::LDNT1W_2Z_IMM, AArch64::LDNT1W_2Z_STRIDED_IMM);
1726 case AArch64::LDNT1D_2Z_IMM_PSEUDO:
1727 return expandMultiVecPseudo(
1728 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1729 AArch64::LDNT1D_2Z_IMM, AArch64::LDNT1D_2Z_STRIDED_IMM);
1730 case AArch64::LD1B_2Z_PSEUDO:
1731 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1732 AArch64::ZPR2StridedRegClass, AArch64::LD1B_2Z,
1733 AArch64::LD1B_2Z_STRIDED);
1734 case AArch64::LD1H_2Z_PSEUDO:
1735 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1736 AArch64::ZPR2StridedRegClass, AArch64::LD1H_2Z,
1737 AArch64::LD1H_2Z_STRIDED);
1738 case AArch64::LD1W_2Z_PSEUDO:
1739 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1740 AArch64::ZPR2StridedRegClass, AArch64::LD1W_2Z,
1741 AArch64::LD1W_2Z_STRIDED);
1742 case AArch64::LD1D_2Z_PSEUDO:
1743 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1744 AArch64::ZPR2StridedRegClass, AArch64::LD1D_2Z,
1745 AArch64::LD1D_2Z_STRIDED);
1746 case AArch64::LDNT1B_2Z_PSEUDO:
1747 return expandMultiVecPseudo(
1748 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1749 AArch64::LDNT1B_2Z, AArch64::LDNT1B_2Z_STRIDED);
1750 case AArch64::LDNT1H_2Z_PSEUDO:
1751 return expandMultiVecPseudo(
1752 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1753 AArch64::LDNT1H_2Z, AArch64::LDNT1H_2Z_STRIDED);
1754 case AArch64::LDNT1W_2Z_PSEUDO:
1755 return expandMultiVecPseudo(
1756 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1757 AArch64::LDNT1W_2Z, AArch64::LDNT1W_2Z_STRIDED);
1758 case AArch64::LDNT1D_2Z_PSEUDO:
1759 return expandMultiVecPseudo(
1760 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1761 AArch64::LDNT1D_2Z, AArch64::LDNT1D_2Z_STRIDED);
1762 case AArch64::LD1B_4Z_IMM_PSEUDO:
1763 return expandMultiVecPseudo(
1764 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1765 AArch64::LD1B_4Z_IMM, AArch64::LD1B_4Z_STRIDED_IMM);
1766 case AArch64::LD1H_4Z_IMM_PSEUDO:
1767 return expandMultiVecPseudo(
1768 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1769 AArch64::LD1H_4Z_IMM, AArch64::LD1H_4Z_STRIDED_IMM);
1770 case AArch64::LD1W_4Z_IMM_PSEUDO:
1771 return expandMultiVecPseudo(
1772 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1773 AArch64::LD1W_4Z_IMM, AArch64::LD1W_4Z_STRIDED_IMM);
1774 case AArch64::LD1D_4Z_IMM_PSEUDO:
1775 return expandMultiVecPseudo(
1776 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1777 AArch64::LD1D_4Z_IMM, AArch64::LD1D_4Z_STRIDED_IMM);
1778 case AArch64::LDNT1B_4Z_IMM_PSEUDO:
1779 return expandMultiVecPseudo(
1780 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1781 AArch64::LDNT1B_4Z_IMM, AArch64::LDNT1B_4Z_STRIDED_IMM);
1782 case AArch64::LDNT1H_4Z_IMM_PSEUDO:
1783 return expandMultiVecPseudo(
1784 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1785 AArch64::LDNT1H_4Z_IMM, AArch64::LDNT1H_4Z_STRIDED_IMM);
1786 case AArch64::LDNT1W_4Z_IMM_PSEUDO:
1787 return expandMultiVecPseudo(
1788 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1789 AArch64::LDNT1W_4Z_IMM, AArch64::LDNT1W_4Z_STRIDED_IMM);
1790 case AArch64::LDNT1D_4Z_IMM_PSEUDO:
1791 return expandMultiVecPseudo(
1792 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1793 AArch64::LDNT1D_4Z_IMM, AArch64::LDNT1D_4Z_STRIDED_IMM);
1794 case AArch64::LD1B_4Z_PSEUDO:
1795 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1796 AArch64::ZPR4StridedRegClass, AArch64::LD1B_4Z,
1797 AArch64::LD1B_4Z_STRIDED);
1798 case AArch64::LD1H_4Z_PSEUDO:
1799 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1800 AArch64::ZPR4StridedRegClass, AArch64::LD1H_4Z,
1801 AArch64::LD1H_4Z_STRIDED);
1802 case AArch64::LD1W_4Z_PSEUDO:
1803 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1804 AArch64::ZPR4StridedRegClass, AArch64::LD1W_4Z,
1805 AArch64::LD1W_4Z_STRIDED);
1806 case AArch64::LD1D_4Z_PSEUDO:
1807 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1808 AArch64::ZPR4StridedRegClass, AArch64::LD1D_4Z,
1809 AArch64::LD1D_4Z_STRIDED);
1810 case AArch64::LDNT1B_4Z_PSEUDO:
1811 return expandMultiVecPseudo(
1812 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1813 AArch64::LDNT1B_4Z, AArch64::LDNT1B_4Z_STRIDED);
1814 case AArch64::LDNT1H_4Z_PSEUDO:
1815 return expandMultiVecPseudo(
1816 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1817 AArch64::LDNT1H_4Z, AArch64::LDNT1H_4Z_STRIDED);
1818 case AArch64::LDNT1W_4Z_PSEUDO:
1819 return expandMultiVecPseudo(
1820 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1821 AArch64::LDNT1W_4Z, AArch64::LDNT1W_4Z_STRIDED);
1822 case AArch64::LDNT1D_4Z_PSEUDO:
1823 return expandMultiVecPseudo(
1824 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1825 AArch64::LDNT1D_4Z, AArch64::LDNT1D_4Z_STRIDED);
1826 case AArch64::FORM_TRANSPOSED_REG_TUPLE_X2_PSEUDO:
1827 return expandFormTuplePseudo(
MBB,
MBBI, NextMBBI, 2);
1828 case AArch64::FORM_TRANSPOSED_REG_TUPLE_X4_PSEUDO:
1829 return expandFormTuplePseudo(
MBB,
MBBI, NextMBBI, 4);
1836bool AArch64ExpandPseudo::expandMBB(MachineBasicBlock &
MBB) {
1849bool AArch64ExpandPseudo::runOnMachineFunction(MachineFunction &MF) {
1850 TII =
static_cast<const AArch64InstrInfo *
>(MF.
getSubtarget().getInstrInfo());
1853 for (
auto &
MBB : MF)
1860 return new AArch64ExpandPseudo();
#define AARCH64_EXPAND_PSEUDO_NAME
MachineInstrBuilder & UseMI
static MachineInstr * createCallWithOps(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const AArch64InstrInfo *TII, unsigned Opcode, ArrayRef< MachineOperand > ExplicitOps, unsigned RegMaskStartIdx)
static constexpr unsigned ZERO_ALL_ZA_MASK
static MachineInstr * createCall(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const AArch64InstrInfo *TII, MachineOperand &CallTarget, unsigned RegMaskStartIdx)
MachineInstrBuilder MachineInstrBuilder & DefMI
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
MachineBasicBlock MachineBasicBlock::iterator MBBI
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
const HexagonInstrInfo * TII
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
This file implements the LivePhysRegs utility for tracking liveness of physical registers.
This file declares the MachineConstantPool class which is an abstract constant pool to keep track of ...
Register const TargetRegisterInfo * TRI
Promote Memory to Register
static unsigned getReg(const MCDisassembler *D, unsigned RC, unsigned RegNo)
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
unsigned getTaggedBasePointerOffset() const
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
FunctionPass class - This class is used to implement most global optimizations.
Describe properties that are true of each instruction in the target description file.
LLVM_ABI instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
const BasicBlock * getBasicBlock() const
Return the LLVM basic block that this instance corresponded to originally.
LLVM_ABI void addSuccessor(MachineBasicBlock *Succ, BranchProbability Prob=BranchProbability::getUnknown())
Add Succ as a successor of this MachineBasicBlock.
LLVM_ABI MachineBasicBlock * splitAt(MachineInstr &SplitInst, bool UpdateLiveIns=true, LiveIntervals *LIS=nullptr)
Split a basic block into 2 pieces at SplitPoint.
LLVM_ABI void eraseFromParent()
This method unlinks 'this' from the containing function and deletes it.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
iterator_range< succ_iterator > successors()
MachineInstrBundleIterator< MachineInstr > iterator
unsigned getConstantPoolIndex(const Constant *C, Align Alignment)
getConstantPoolIndex - Create a new entry in the constant pool or return an existing one.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void moveAdditionalCallInfo(const MachineInstr *Old, const MachineInstr *New)
Move the call site info from Old to \New call site info.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
Ty * getInfo()
getInfo - Keep track of various per-function pieces of information for backends that would like to do...
MachineConstantPool * getConstantPool()
getConstantPool - Return the constant pool object for the current function.
MachineBasicBlock * CreateMachineBasicBlock(const BasicBlock *BB=nullptr, std::optional< UniqueBBID > BBID=std::nullopt)
CreateMachineInstr - Allocate a new MachineInstr.
void insert(iterator MBBI, MachineBasicBlock *MBB)
const TargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
const MachineInstrBuilder & addExternalSymbol(const char *FnName, unsigned TargetFlags=0) const
const MachineInstrBuilder & setMIFlag(MachineInstr::MIFlag Flag) const
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addConstantPoolIndex(unsigned Idx, int Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addGlobalAddress(const GlobalValue *GV, int64_t Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
const MachineInstrBuilder & cloneMemRefs(const MachineInstr &OtherMI) const
const MachineInstrBuilder & addUse(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register use operand.
const MachineInstrBuilder & setMIFlags(unsigned Flags) const
MachineInstr * getInstr() const
If conversion operators fail, use this method to get the MachineInstr explicitly.
const MachineInstrBuilder & addDef(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register definition operand.
Representation of each machine instruction.
void setDebugInstrNum(unsigned Num)
Set instruction number of this MachineInstr.
MachineOperand class - Representation of each machine instruction operand.
const GlobalValue * getGlobal() const
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isCPI() const
isCPI - Tests if this is a MO_ConstantPoolIndex operand.
bool isSymbol() const
isSymbol - Tests if this is a MO_ExternalSymbol operand.
unsigned getTargetFlags() const
bool isGlobal() const
isGlobal - Tests if this is a MO_GlobalAddress operand.
const char * getSymbolName() const
Register getReg() const
getReg - Returns the register number.
static MachineOperand CreateReg(Register Reg, bool isDef, bool isImp=false, bool isKill=false, bool isDead=false, bool isUndef=false, bool isEarlyClobber=false, unsigned SubReg=0, bool isDebug=false, bool isInternalRead=false, bool isRenamable=false)
int64_t getOffset() const
Return the offset from the symbol in this operand.
MCRegister asMCReg() const
Utility to check-convert this value to a MCRegister.
void push_back(const T &Elt)
StringRef - Represent a constant reference to a string, i.e.
CodeModel::Model getCodeModel() const
Returns the code model.
ArrayRef< MCPhysReg > getRegisters() const
virtual const TargetRegisterInfo * getRegisterInfo() const =0
Return the target's register information.
self_iterator getIterator()
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ MO_NC
MO_NC - Indicates whether the linker is expected to check the symbol reference for overflow.
@ MO_PAGEOFF
MO_PAGEOFF - A symbol operand with this flag represents the offset of that symbol within a 4K page.
@ MO_PREL
MO_PREL - Indicates that the bits of the symbol operand represented by MO_G0 etc are PC relative.
@ MO_PAGE
MO_PAGE - A symbol operand with this flag represents the pc-relative offset of the 4K page containing...
@ MO_TAGGED
MO_TAGGED - With MO_PAGE, indicates that the page includes a memory tag in bits 56-63.
@ MO_G3
MO_G3 - A symbol operand with this flag (granule 3) represents the high 16-bits of a 64-bit address,...
static unsigned getArithExtendImm(AArch64_AM::ShiftExtendType ET, unsigned Imm)
getArithExtendImm - Encode the extend type and shift amount for an arithmetic instruction: imm: 3-bit...
static unsigned getShifterImm(AArch64_AM::ShiftExtendType ST, unsigned Imm)
getShifterImm - Encode the shift type and amount: imm: 6-bit shift amount shifter: 000 ==> lsl 001 ==...
void expandMOVImm(uint64_t Imm, unsigned BitSize, SmallVectorImpl< ImmInsnModel > &Insn)
Expand a MOVi32imm or MOVi64imm pseudo instruction to one or more real move-immediate instructions to...
int getSVERevInstr(uint16_t Opcode)
@ Destructive2xRegImmUnpred
@ DestructiveInstTypeMask
@ DestructiveUnaryPassthru
@ DestructiveTernaryCommWithRev
@ DestructiveBinaryCommWithRev
int getSVEPseudoMap(uint16_t Opcode)
int getSVENonRevInstr(uint16_t Opcode)
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Implicit
Not emitted register (e.g. carry, or temporary result).
@ Renamable
Register that may be renamed.
@ Define
Register definition.
@ Kill
The last use of a register.
@ Undef
Value of the register doesn't matter.
BaseReg
Stack frame base register. Bit 0 of FREInfo.Info.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
LLVM_ABI void finalizeBundle(MachineBasicBlock &MBB, MachineBasicBlock::instr_iterator FirstMI, MachineBasicBlock::instr_iterator LastMI)
finalizeBundle - Finalize a machine instruction bundle which includes a sequence of instructions star...
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
APFloat abs(APFloat X)
Returns the absolute value of the argument.
unsigned getDeadRegState(bool B)
FunctionPass * createAArch64ExpandPseudoPass()
Returns an instance of the pseudo instruction expansion pass.
void emitFrameOffset(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const DebugLoc &DL, unsigned DestReg, unsigned SrcReg, StackOffset Offset, const TargetInstrInfo *TII, MachineInstr::MIFlag=MachineInstr::NoFlags, bool SetNZCV=false, bool NeedsWinCFI=false, bool *HasWinCFI=nullptr, bool EmitCFAOffset=false, StackOffset InitialOffset={}, unsigned FrameReg=AArch64::SP)
emitFrameOffset - Emit instructions as needed to set DestReg to SrcReg plus Offset.
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
unsigned getKillRegState(bool B)
unsigned getRenamableRegState(bool B)
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
void computeAndAddLiveIns(LivePhysRegs &LiveRegs, MachineBasicBlock &MBB)
Convenience function combining computeLiveIns() and addLiveIns().