50#define DEBUG_TYPE "regalloc"
54STATISTIC(NumCoalesced,
"Number of copies coalesced");
70 bool ClearVirtRegs_ =
true) :
72 ShouldAllocateClass(
F),
73 StackSlotForVirtReg(-1),
74 ClearVirtRegs(ClearVirtRegs_) {
99 bool Reloaded =
false;
102 explicit LiveReg(
Register VirtReg) : VirtReg(VirtReg) {}
104 unsigned getSparseSetIndex()
const {
112 LiveRegMap LiveVirtRegs;
146 std::vector<unsigned> RegUnitStates;
153 RegUnitSet UsedInInstr;
154 RegUnitSet PhysRegUses;
159 void setPhysRegState(
MCPhysReg PhysReg,
unsigned NewState);
160 bool isPhysRegFree(
MCPhysReg PhysReg)
const;
163 void markRegUsedInInstr(
MCPhysReg PhysReg) {
165 UsedInInstr.insert(*Units);
169 bool isClobberedByRegMasks(
MCPhysReg PhysReg)
const {
176 bool isRegUsedInInstr(
MCPhysReg PhysReg,
bool LookAtPhysRegUses)
const {
177 if (LookAtPhysRegUses && isClobberedByRegMasks(PhysReg))
180 if (UsedInInstr.count(*Units))
182 if (LookAtPhysRegUses && PhysRegUses.count(*Units))
190 void markPhysRegUsedInInstr(
MCPhysReg PhysReg) {
192 PhysRegUses.insert(*Units);
196 void unmarkRegUsedInInstr(
MCPhysReg PhysReg) {
198 UsedInInstr.erase(*Units);
205 spillImpossible = ~0
u
218 MachineFunctionProperties::Property::NoPHIs);
224 MachineFunctionProperties::Property::NoVRegs);
232 MachineFunctionProperties::Property::IsSSA);
240 void addRegClassDefCounts(std::vector<unsigned> &RegClassDefCounts,
252 unsigned calcSpillCost(
MCPhysReg PhysReg)
const;
264 bool LookAtPhysRegUses =
false);
271 bool LookAtPhysRegUses =
false);
284 bool shouldAllocateRegister(
const Register Reg)
const;
285 int getStackSpaceFor(
Register VirtReg);
287 MCPhysReg AssignedReg,
bool Kill,
bool LiveOut);
294 void dumpState()
const;
299char RegAllocFast::ID = 0;
307 return ShouldAllocateClass(*
TRI, RC);
310void RegAllocFast::setPhysRegState(
MCPhysReg PhysReg,
unsigned NewState) {
312 RegUnitStates[*UI] = NewState;
315bool RegAllocFast::isPhysRegFree(
MCPhysReg PhysReg)
const {
317 if (RegUnitStates[*UI] != regFree)
325int RegAllocFast::getStackSpaceFor(
Register VirtReg) {
327 int SS = StackSlotForVirtReg[VirtReg];
334 unsigned Size =
TRI->getSpillSize(RC);
335 Align Alignment =
TRI->getSpillAlign(RC);
339 StackSlotForVirtReg[VirtReg] = FrameIdx;
351 for (; &*
I !=
A && &*
I !=
B; ++
I)
358bool RegAllocFast::mayLiveOut(
Register VirtReg) {
371 if (DefInst.getParent() !=
MBB) {
376 SelfLoopDef = &DefInst;
387 static const unsigned Limit = 8;
389 for (
const MachineInstr &UseInst :
MRI->use_nodbg_instructions(VirtReg)) {
390 if (UseInst.getParent() !=
MBB || ++
C >= Limit) {
399 if (SelfLoopDef == &UseInst ||
411bool RegAllocFast::mayLiveIn(
Register VirtReg) {
416 static const unsigned Limit = 8;
419 if (DefInst.getParent() !=
MBB || ++
C >= Limit) {
431 MCPhysReg AssignedReg,
bool Kill,
bool LiveOut) {
434 int FI = getStackSpaceFor(VirtReg);
451 SpilledOperandsMap[MO->getParent()].push_back(MO);
452 for (
auto MISpilledOperands : SpilledOperandsMap) {
458 *
MBB, Before, *MISpilledOperands.first, FI, MISpilledOperands.second);
461 LLVM_DEBUG(
dbgs() <<
"Inserting debug info due to spill:\n" << *NewDV);
470 LLVM_DEBUG(
dbgs() <<
"Cloning debug info due to live out spill\n");
486 LRIDbgOperands.clear();
494 int FI = getStackSpaceFor(VirtReg);
505RegAllocFast::getMBBBeginInsertionPoint(
515 if (!
TII->isBasicBlockPrologue(*
I))
533 if (LiveVirtRegs.empty())
540 setPhysRegState(Reg, regLiveIn);
549 = getMBBBeginInsertionPoint(
MBB, PrologLiveIns);
550 for (
const LiveReg &LR : LiveVirtRegs) {
556 if (RegUnitStates[FirstUnit] == regLiveIn)
560 "no reload in start block. Missing vreg def?");
562 if (PrologLiveIns.
count(PhysReg)) {
566 reload(
MBB.
begin(), LR.VirtReg, PhysReg);
568 reload(InsertBefore, LR.VirtReg, PhysReg);
570 LiveVirtRegs.clear();
578 bool displacedAny = displacePhysReg(
MI, Reg);
579 setPhysRegState(Reg, regPreAssigned);
580 markRegUsedInInstr(Reg);
585 bool displacedAny = displacePhysReg(
MI, Reg);
586 setPhysRegState(Reg, regPreAssigned);
594 bool displacedAny =
false;
598 switch (
unsigned VirtReg = RegUnitStates[Unit]) {
600 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
601 assert(LRI != LiveVirtRegs.end() &&
"datastructures in sync");
604 reload(ReloadBefore, VirtReg, LRI->PhysReg);
606 setPhysRegState(LRI->PhysReg, regFree);
608 LRI->Reloaded =
true;
613 RegUnitStates[Unit] = regFree;
623void RegAllocFast::freePhysReg(
MCPhysReg PhysReg) {
627 switch (
unsigned VirtReg = RegUnitStates[FirstUnit]) {
633 setPhysRegState(PhysReg, regFree);
636 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
637 assert(LRI != LiveVirtRegs.end());
639 setPhysRegState(LRI->PhysReg, regFree);
650unsigned RegAllocFast::calcSpillCost(
MCPhysReg PhysReg)
const {
652 switch (
unsigned VirtReg = RegUnitStates[*UI]) {
658 return spillImpossible;
660 bool SureSpill = StackSlotForVirtReg[VirtReg] != -1 ||
661 findLiveVirtReg(VirtReg)->LiveOut;
662 return SureSpill ? spillClean : spillDirty;
669void RegAllocFast::assignDanglingDebugValues(
MachineInstr &Definition,
671 auto UDBGValIter = DanglingDbgValues.
find(VirtReg);
672 if (UDBGValIter == DanglingDbgValues.
end())
678 if (!
DbgValue->hasDebugOperandForReg(VirtReg))
686 if (
I->modifiesRegister(Reg,
TRI) || --Limit == 0) {
705void RegAllocFast::assignVirtToPhysReg(
MachineInstr &AtMI, LiveReg &LR,
710 assert(LR.PhysReg == 0 &&
"Already assigned a physreg");
711 assert(PhysReg != 0 &&
"Trying to assign no register");
712 LR.PhysReg = PhysReg;
713 setPhysRegState(PhysReg, VirtReg);
715 assignDanglingDebugValues(AtMI, VirtReg, PhysReg);
719 return MI.isFullCopy();
723 static const unsigned ChainLengthLimit = 3;
726 if (
Reg.isPhysical())
734 }
while (++
C <= ChainLengthLimit);
742 static const unsigned DefLimit = 3;
747 Reg = traceCopyChain(Reg);
760 Register Hint0,
bool LookAtPhysRegUses) {
761 const Register VirtReg = LR.VirtReg;
766 <<
" in class " <<
TRI->getRegClassName(&RC)
771 !isRegUsedInInstr(Hint0, LookAtPhysRegUses)) {
773 if (isPhysRegFree(Hint0)) {
776 assignVirtToPhysReg(
MI, LR, Hint0);
788 Register Hint1 = traceCopies(VirtReg);
790 !isRegUsedInInstr(Hint1, LookAtPhysRegUses)) {
792 if (isPhysRegFree(Hint1)) {
795 assignVirtToPhysReg(
MI, LR, Hint1);
806 unsigned BestCost = spillImpossible;
810 if (isRegUsedInInstr(PhysReg, LookAtPhysRegUses)) {
815 unsigned Cost = calcSpillCost(PhysReg);
816 LLVM_DEBUG(
dbgs() <<
"Cost: " << Cost <<
" BestCost: " << BestCost <<
'\n');
819 assignVirtToPhysReg(
MI, LR, PhysReg);
823 if (PhysReg == Hint0 || PhysReg == Hint1)
824 Cost -= spillPrefBonus;
826 if (Cost < BestCost) {
835 if (
MI.isInlineAsm())
836 MI.emitError(
"inline assembly requires more registers than available");
838 MI.emitError(
"ran out of registers during register allocation");
845 displacePhysReg(
MI, BestReg);
846 assignVirtToPhysReg(
MI, LR, BestReg);
853 if (!shouldAllocateRegister(VirtReg))
856 LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg);
858 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
859 PhysReg = LRI->PhysReg;
868 if (SubRegIdx != 0) {
869 PhysReg =
TRI->getSubReg(PhysReg, SubRegIdx);
878void RegAllocFast::defineLiveThroughVirtReg(
MachineInstr &
MI,
unsigned OpNum,
880 if (!shouldAllocateRegister(VirtReg))
882 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
883 if (LRI != LiveVirtRegs.end()) {
885 if (PrevReg != 0 && isRegUsedInInstr(PrevReg,
true)) {
887 <<
" (tied/earlyclobber resolution)\n");
888 freePhysReg(PrevReg);
890 allocVirtReg(
MI, *LRI, 0,
true);
896 TII->get(TargetOpcode::COPY), PrevReg)
904 return defineVirtReg(
MI, OpNum, VirtReg,
true);
913 Register VirtReg,
bool LookAtPhysRegUses) {
915 if (!shouldAllocateRegister(VirtReg))
918 LiveRegMap::iterator LRI;
920 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
923 if (mayLiveOut(VirtReg)) {
931 if (LRI->PhysReg == 0)
932 allocVirtReg(
MI, *LRI, 0, LookAtPhysRegUses);
934 assert(!isRegUsedInInstr(LRI->PhysReg, LookAtPhysRegUses) &&
935 "TODO: preassign mismatch");
937 <<
" use existing assignment to "
942 assert(PhysReg != 0 &&
"Register not assigned");
943 if (LRI->Reloaded || LRI->LiveOut) {
944 if (!
MI.isImplicitDef()) {
947 LLVM_DEBUG(
dbgs() <<
"Spill Reason: LO: " << LRI->LiveOut <<
" RL: "
948 << LRI->Reloaded <<
'\n');
949 bool Kill = LRI->LastUse ==
nullptr;
950 spill(SpillBefore, VirtReg, PhysReg, Kill, LRI->LiveOut);
954 if (
MI.getOpcode() == TargetOpcode::INLINEASM_BR) {
955 int FI = StackSlotForVirtReg[VirtReg];
961 FI, &RC,
TRI, VirtReg);
968 LRI->LastUse =
nullptr;
970 LRI->LiveOut =
false;
971 LRI->Reloaded =
false;
973 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
974 BundleVirtRegsMap[VirtReg] = PhysReg;
976 markRegUsedInInstr(PhysReg);
977 setPhysReg(
MI, MO, PhysReg);
984 if (!shouldAllocateRegister(VirtReg))
987 LiveRegMap::iterator LRI;
989 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
993 if (mayLiveOut(VirtReg)) {
1001 assert((!MO.
isKill() || LRI->LastUse == &
MI) &&
"Invalid kill flag");
1005 if (LRI->PhysReg == 0) {
1008 if (
MI.isCopy() &&
MI.getOperand(1).getSubReg() == 0) {
1009 Hint =
MI.getOperand(0).getReg();
1011 assert(!shouldAllocateRegister(Hint));
1015 "Copy destination should already be assigned");
1018 allocVirtReg(
MI, *LRI, Hint,
false);
1029 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
1030 BundleVirtRegsMap[VirtReg] = LRI->PhysReg;
1032 markRegUsedInInstr(LRI->PhysReg);
1033 setPhysReg(
MI, MO, LRI->PhysReg);
1059 MI.addRegisterKilled(PhysReg,
TRI,
true);
1067 MI.addRegisterDead(PhysReg,
TRI,
true);
1069 MI.addRegisterDefined(PhysReg,
TRI);
1075void RegAllocFast::dumpState()
const {
1076 for (
unsigned Unit = 1, UnitE =
TRI->getNumRegUnits(); Unit != UnitE;
1078 switch (
unsigned VirtReg = RegUnitStates[Unit]) {
1081 case regPreAssigned:
1088 LiveRegMap::const_iterator
I = findLiveVirtReg(VirtReg);
1089 assert(
I != LiveVirtRegs.end() &&
"have LiveVirtRegs entry");
1090 if (
I->LiveOut ||
I->Reloaded) {
1092 if (
I->LiveOut)
dbgs() <<
'O';
1093 if (
I->Reloaded)
dbgs() <<
'R';
1096 assert(
TRI->hasRegUnit(
I->PhysReg, Unit) &&
"inverse mapping present");
1103 for (
const LiveReg &LR : LiveVirtRegs) {
1109 "mapped to physreg");
1111 assert(RegUnitStates[*UI] == VirtReg &&
"inverse map valid");
1119void RegAllocFast::addRegClassDefCounts(std::vector<unsigned> &RegClassDefCounts,
1121 assert(RegClassDefCounts.size() ==
TRI->getNumRegClasses());
1123 if (
Reg.isVirtual()) {
1124 if (!shouldAllocateRegister(Reg))
1127 for (
unsigned RCIdx = 0, RCIdxEnd =
TRI->getNumRegClasses();
1128 RCIdx != RCIdxEnd; ++RCIdx) {
1132 ++RegClassDefCounts[RCIdx];
1138 for (
unsigned RCIdx = 0, RCIdxEnd =
TRI->getNumRegClasses();
1139 RCIdx != RCIdxEnd; ++RCIdx) {
1143 ++RegClassDefCounts[RCIdx];
1163 UsedInInstr.clear();
1165 BundleVirtRegsMap.
clear();
1169 unsigned TiedIdx =
MI.findTiedOperandIdx(
Idx);
1174 bool HasPhysRegUse =
false;
1175 bool HasRegMask =
false;
1176 bool HasVRegDef =
false;
1177 bool HasDef =
false;
1178 bool HasEarlyClobber =
false;
1179 bool NeedToAssignLiveThroughs =
false;
1180 for (
unsigned I = 0;
I <
MI.getNumOperands(); ++
I) {
1184 if (
Reg.isVirtual()) {
1185 if (!shouldAllocateRegister(Reg))
1191 HasEarlyClobber =
true;
1192 NeedToAssignLiveThroughs =
true;
1194 if ((MO.
isTied() && !TiedOpIsUndef(MO,
I)) ||
1196 NeedToAssignLiveThroughs =
true;
1198 }
else if (
Reg.isPhysical()) {
1199 if (!
MRI->isReserved(Reg)) {
1202 bool displacedAny = definePhysReg(
MI, Reg);
1204 HasEarlyClobber =
true;
1209 HasPhysRegUse =
true;
1227 if (NeedToAssignLiveThroughs) {
1228 DefOperandIndexes.
clear();
1229 PhysRegUses.clear();
1232 std::vector<unsigned> RegClassDefCounts(
TRI->getNumRegClasses(), 0);
1233 assert(RegClassDefCounts[0] == 0);
1236 for (
unsigned I = 0,
E =
MI.getNumOperands();
I <
E; ++
I) {
1242 if (
Reg.isPhysical()) {
1245 markPhysRegUsedInInstr(Reg);
1250 if (
Reg.isVirtual() && shouldAllocateRegister(Reg))
1253 addRegClassDefCounts(RegClassDefCounts, Reg);
1267 unsigned ClassSize0 = RegClassInfo.
getOrder(&RC0).
size();
1268 unsigned ClassSize1 = RegClassInfo.
getOrder(&RC1).
size();
1270 bool SmallClass0 = ClassSize0 < RegClassDefCounts[RC0.
getID()];
1271 bool SmallClass1 = ClassSize1 < RegClassDefCounts[RC1.
getID()];
1272 if (SmallClass0 > SmallClass1)
1274 if (SmallClass0 < SmallClass1)
1282 if (Livethrough0 > Livethrough1)
1284 if (Livethrough0 < Livethrough1)
1291 for (
uint16_t OpIdx : DefOperandIndexes) {
1296 (MO.
isTied() && !TiedOpIsUndef(MO, OpIdx)) ||
1298 defineLiveThroughVirtReg(
MI, OpIdx, Reg);
1300 defineVirtReg(
MI, OpIdx, Reg);
1305 for (
unsigned I = 0,
E =
MI.getNumOperands();
I <
E; ++
I) {
1310 if (
Reg.isVirtual())
1311 defineVirtReg(
MI,
I, Reg);
1319 for (
signed I =
MI.getNumOperands() - 1;
I >= 0; --
I) {
1332 "tied def assigned to clobbered register");
1340 if (
Reg.isVirtual()) {
1341 assert(!shouldAllocateRegister(Reg));
1345 if (
MRI->isReserved(Reg))
1348 unmarkRegUsedInInstr(Reg);
1356 for (
const auto *RM : RegMasks)
1357 MRI->addPhysRegsUsedFromRegMask(RM);
1360 for (
const LiveReg &LR : LiveVirtRegs) {
1362 if (PhysReg != 0 && isClobberedByRegMasks(PhysReg))
1363 displacePhysReg(
MI, PhysReg);
1368 if (HasPhysRegUse) {
1373 if (!
Reg.isPhysical())
1375 if (
MRI->isReserved(Reg))
1377 bool displacedAny = usePhysReg(
MI, Reg);
1384 bool HasUndefUse =
false;
1385 for (
unsigned I = 0;
I <
MI.getNumOperands(); ++
I) {
1390 if (!
Reg.isVirtual() || !shouldAllocateRegister(Reg))
1406 useVirtReg(
MI,
I, Reg);
1417 if (!
Reg.isVirtual() || !shouldAllocateRegister(Reg))
1420 assert(MO.
isUndef() &&
"Should only have undef virtreg uses left");
1421 allocVirtRegUndef(MO);
1426 if (HasEarlyClobber) {
1430 assert(!MO.
getSubReg() &&
"should be already handled in def processing");
1435 if (
Reg.isVirtual()) {
1436 assert(!shouldAllocateRegister(Reg));
1439 assert(
Reg.isPhysical() &&
"should have register assigned");
1447 if (
MI.readsRegister(Reg,
TRI))
1455 if (
MI.isCopy() &&
MI.getOperand(0).getReg() ==
MI.getOperand(1).getReg() &&
1456 MI.getNumOperands() == 2) {
1466 if (!
Reg.isVirtual())
1468 if (!shouldAllocateRegister(Reg))
1472 int SS = StackSlotForVirtReg[
Reg];
1482 LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
1487 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1489 for (
auto &RegMO : DbgOps)
1490 setPhysReg(
MI, *RegMO, LRI->PhysReg);
1492 DanglingDbgValues[
Reg].push_back(&
MI);
1497 LiveDbgValueMap[
Reg].append(DbgOps.
begin(), DbgOps.
end());
1504 while (BundledMI->isBundledWithPred()) {
1510 if (!
Reg.isVirtual() || !shouldAllocateRegister(Reg))
1514 DI = BundleVirtRegsMap.
find(Reg);
1515 assert(DI != BundleVirtRegsMap.
end() &&
"Unassigned virtual register");
1517 setPhysReg(
MI, MO, DI->second);
1528 RegUnitStates.assign(
TRI->getNumRegUnits(), regFree);
1529 assert(LiveVirtRegs.empty() &&
"Mapping not cleared from last block?");
1532 setPhysRegState(LiveReg.PhysReg, regPreAssigned);
1539 dbgs() <<
"\n>> " <<
MI <<
"Regs:";
1545 if (
MI.isDebugValue()) {
1546 handleDebugValue(
MI);
1550 allocateInstruction(
MI);
1554 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
1560 dbgs() <<
"Begin Regs:";
1565 LLVM_DEBUG(
dbgs() <<
"Loading live registers at begin of block.\n");
1572 NumCoalesced += Coalesced.size();
1574 for (
auto &UDBGPair : DanglingDbgValues) {
1578 if (!
DbgValue->hasDebugOperandForReg(UDBGPair.first))
1585 DanglingDbgValues.clear();
1591 LLVM_DEBUG(
dbgs() <<
"********** FAST REGISTER ALLOCATION **********\n"
1592 <<
"********** Function: " << MF.
getName() <<
'\n');
1598 MRI->freezeReservedRegs(MF);
1600 unsigned NumRegUnits =
TRI->getNumRegUnits();
1601 UsedInInstr.clear();
1602 UsedInInstr.setUniverse(NumRegUnits);
1603 PhysRegUses.clear();
1604 PhysRegUses.setUniverse(NumRegUnits);
1608 unsigned NumVirtRegs =
MRI->getNumVirtRegs();
1609 StackSlotForVirtReg.
resize(NumVirtRegs);
1610 LiveVirtRegs.setUniverse(NumVirtRegs);
1611 MayLiveAcrossBlocks.
clear();
1612 MayLiveAcrossBlocks.
resize(NumVirtRegs);
1616 allocateBasicBlock(
MBB);
1618 if (ClearVirtRegs) {
1621 MRI->clearVirtRegs();
1624 StackSlotForVirtReg.
clear();
1625 LiveDbgValueMap.
clear();
1630 return new RegAllocFast();
1634 bool ClearVirtRegs) {
1635 return new RegAllocFast(Ftor, ClearVirtRegs);
unsigned const MachineRegisterInfo * MRI
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseMap class.
const HexagonInstrInfo * TII
This file implements an indexed map.
unsigned const TargetRegisterInfo * TRI
This file implements a map that provides insertion order iteration.
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static bool dominates(MachineBasicBlock &MBB, MachineBasicBlock::const_iterator A, MachineBasicBlock::const_iterator B)
static bool isCoalescable(const MachineInstr &MI)
static cl::opt< bool > IgnoreMissingDefs("rafast-ignore-missing-defs", cl::Hidden)
static RegisterRegAlloc fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the SparseSet class derived from the version described in Briggs,...
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Class recording the (high level) value of a variable.
Represent the analysis usage information of a pass.
void setPreservesCFG()
This function should be called by the pass, iff they do not:
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
bool test(unsigned Idx) const
void resize(unsigned N, bool t=false)
resize - Grow or shrink the bitvector.
void clear()
clear - Removes all bits from the bitvector.
iterator find(const_arg_type_t< KeyT > Val)
Lightweight error class with error context and mandatory checking.
FunctionPass class - This class is used to implement most global optimizations.
void storeRegToStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, Register SrcReg, bool isKill, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI, Register VReg) const override
Store the specified register of the given register class to the specified stack frame index.
void loadRegFromStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, Register DestReg, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI, Register VReg) const override
Load the specified register of the given register class from the specified stack frame index.
void resize(typename StorageT::size_type s)
MCRegAliasIterator enumerates all registers aliasing Reg.
bool isValid() const
isValid - returns true if this iterator is not yet at the end.
Wrapper class representing physical registers. Should be passed by value.
iterator_range< liveout_iterator > liveouts() const
instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
iterator_range< livein_iterator > liveins() const
iterator getFirstTerminator()
Returns an iterator to the first terminator instruction of this basic block.
void addLiveIn(MCRegister PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())
Adds the specified register as a live in.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
instr_iterator erase(instr_iterator I)
Remove an instruction from the instruction list and delete it.
bool isSuccessor(const MachineBasicBlock *MBB) const
Return true if the specified MBB is a successor of this block.
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted.
int CreateSpillStackObject(uint64_t Size, Align Alignment)
Create a new statically sized stack object that represents a spill slot, returning a nonnegative iden...
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
virtual MachineFunctionProperties getClearedProperties() const
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
virtual MachineFunctionProperties getSetProperties() const
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
MachineFunctionProperties & set(Property P)
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
StringRef getName() const
getName - Return the name of the corresponding LLVM function.
MachineFrameInfo & getFrameInfo()
getFrameInfo - Return the frame info object for the current function.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
MachineInstr * CloneMachineInstr(const MachineInstr *Orig)
Create a new MachineInstr which is a copy of Orig, identical in all ways except the instruction has n...
const MachineBasicBlock & front() const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
Representation of each machine instruction.
bool isDebugValueList() const
const MachineBasicBlock * getParent() const
bool isNonListDebugValue() const
MachineOperand & getDebugOperand(unsigned Index)
const MachineOperand & getOperand(unsigned i) const
MachineOperand class - Representation of each machine instruction operand.
void setSubReg(unsigned subReg)
unsigned getSubReg() const
bool readsReg() const
readsReg - Returns true if this operand reads the previous value of its register.
void setIsRenamable(bool Val=true)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
MachineBasicBlock * getMBB() const
void setIsDead(bool Val=true)
void setReg(Register Reg)
Change the register this operand corresponds to.
void setIsKill(bool Val=true)
bool isEarlyClobber() const
Register getReg() const
getReg - Returns the register number.
bool isInternalRead() const
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
bool isMBB() const
isMBB - Tests if this is a MO_MachineBasicBlock operand.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
void runOnMachineFunction(const MachineFunction &MF)
runOnFunction - Prepare to answer questions about MF.
ArrayRef< MCPhysReg > getOrder(const TargetRegisterClass *RC) const
getOrder - Returns the preferred allocation order for RC.
Wrapper class representing virtual and physical registers.
bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
static unsigned virtReg2Index(Register Reg)
Convert a virtual register number to a 0-based index.
bool isVirtual() const
Return true if the specified register number is in the virtual register namespace.
static bool isPhysicalRegister(unsigned Reg)
Return true if the specified register number is in the physical register namespace.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
SparseSet - Fast set implementation for objects that can be identified by small unsigned keys.
typename DenseT::iterator iterator
typename DenseT::const_iterator const_iterator
StringRef - Represent a constant reference to a string, i.e.
TargetInstrInfo - Interface to description of machine instruction set.
unsigned getID() const
Return the register class ID number.
bool contains(Register Reg) const
Return true if the specified register is included in this register class.
bool hasSubClassEq(const TargetRegisterClass *RC) const
Returns true if RC is a sub-class of or equal to this class.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
TargetSubtargetInfo - Generic base class for all target subtargets.
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
virtual const TargetInstrInfo * getInstrInfo() const
Iterator for intrusive lists based on ilist_node.
self_iterator getIterator()
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ C
The default llvm calling convention, compatible with C.
@ Kill
The last use of a register.
Reg
All possible values of the reg field in the ModR/M byte.
This is an optimization pass for GlobalISel generic memory operations.
FunctionPass * createFastRegisterAllocator()
FastRegisterAllocation Pass - This pass register allocates as fast as possible.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
void updateDbgValueForSpill(MachineInstr &Orig, int FrameIndex, Register Reg)
Update a DBG_VALUE whose value has been spilled to FrameIndex.
Printable printRegUnit(unsigned Unit, const TargetRegisterInfo *TRI)
Create Printable object to print register units on a raw_ostream.
static bool allocateAllRegClasses(const TargetRegisterInfo &, const TargetRegisterClass &)
Default register class filter function for register allocation.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto reverse(ContainerTy &&C)
void sort(IteratorTy Start, IteratorTy End)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
MachineInstr * buildDbgValueForSpill(MachineBasicBlock &BB, MachineBasicBlock::iterator I, const MachineInstr &Orig, int FrameIndex, Register SpillReg)
Clone a DBG_VALUE whose value has been spilled to FrameIndex.
Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.
std::function< bool(const TargetRegisterInfo &TRI, const TargetRegisterClass &RC)> RegClassFilterFunc
This struct is a compact representation of a valid (non-zero power of two) alignment.
Pair of physical register and lane mask.
A MapVector that performs no allocations if smaller than a certain size.