LLVM 17.0.0git
RegAllocFast.cpp
Go to the documentation of this file.
1//===- RegAllocFast.cpp - A fast register allocator for debug code --------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9/// \file This register allocator allocates registers to a basic block at a
10/// time, attempting to keep values in registers and reusing registers as
11/// appropriate.
12//
13//===----------------------------------------------------------------------===//
14
15#include "llvm/ADT/ArrayRef.h"
16#include "llvm/ADT/DenseMap.h"
17#include "llvm/ADT/IndexedMap.h"
18#include "llvm/ADT/MapVector.h"
19#include "llvm/ADT/SmallSet.h"
21#include "llvm/ADT/SparseSet.h"
22#include "llvm/ADT/Statistic.h"
40#include "llvm/Pass.h"
41#include "llvm/Support/Debug.h"
44#include <cassert>
45#include <tuple>
46#include <vector>
47
48using namespace llvm;
49
50#define DEBUG_TYPE "regalloc"
51
52STATISTIC(NumStores, "Number of stores added");
53STATISTIC(NumLoads , "Number of loads added");
54STATISTIC(NumCoalesced, "Number of copies coalesced");
55
56// FIXME: Remove this switch when all testcases are fixed!
57static cl::opt<bool> IgnoreMissingDefs("rafast-ignore-missing-defs",
59
61 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator);
62
63namespace {
64
65 class RegAllocFast : public MachineFunctionPass {
66 public:
67 static char ID;
68
69 RegAllocFast(const RegClassFilterFunc F = allocateAllRegClasses,
70 bool ClearVirtRegs_ = true) :
72 ShouldAllocateClass(F),
73 StackSlotForVirtReg(-1),
74 ClearVirtRegs(ClearVirtRegs_) {
75 }
76
77 private:
81 const TargetInstrInfo *TII;
82 RegisterClassInfo RegClassInfo;
83 const RegClassFilterFunc ShouldAllocateClass;
84
85 /// Basic block currently being allocated.
87
88 /// Maps virtual regs to the frame index where these values are spilled.
89 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg;
90
91 bool ClearVirtRegs;
92
93 /// Everything we know about a live virtual register.
94 struct LiveReg {
95 MachineInstr *LastUse = nullptr; ///< Last instr to use reg.
96 Register VirtReg; ///< Virtual register number.
97 MCPhysReg PhysReg = 0; ///< Currently held here.
98 bool LiveOut = false; ///< Register is possibly live out.
99 bool Reloaded = false; ///< Register was reloaded.
100 bool Error = false; ///< Could not allocate.
101
102 explicit LiveReg(Register VirtReg) : VirtReg(VirtReg) {}
103
104 unsigned getSparseSetIndex() const {
105 return Register::virtReg2Index(VirtReg);
106 }
107 };
108
109 using LiveRegMap = SparseSet<LiveReg>;
110 /// This map contains entries for each virtual register that is currently
111 /// available in a physical register.
112 LiveRegMap LiveVirtRegs;
113
114 /// Stores assigned virtual registers present in the bundle MI.
115 DenseMap<Register, MCPhysReg> BundleVirtRegsMap;
116
118 /// List of DBG_VALUE that we encountered without the vreg being assigned
119 /// because they were placed after the last use of the vreg.
121
122 /// Has a bit set for every virtual register for which it was determined
123 /// that it is alive across blocks.
124 BitVector MayLiveAcrossBlocks;
125
126 /// State of a register unit.
127 enum RegUnitState {
128 /// A free register is not currently in use and can be allocated
129 /// immediately without checking aliases.
130 regFree,
131
132 /// A pre-assigned register has been assigned before register allocation
133 /// (e.g., setting up a call parameter).
134 regPreAssigned,
135
136 /// Used temporarily in reloadAtBegin() to mark register units that are
137 /// live-in to the basic block.
138 regLiveIn,
139
140 /// A register state may also be a virtual register number, indication
141 /// that the physical register is currently allocated to a virtual
142 /// register. In that case, LiveVirtRegs contains the inverse mapping.
143 };
144
145 /// Maps each physical register to a RegUnitState enum or virtual register.
146 std::vector<unsigned> RegUnitStates;
147
149
150 using RegUnitSet = SparseSet<uint16_t, identity<uint16_t>>;
151 /// Set of register units that are used in the current instruction, and so
152 /// cannot be allocated.
153 RegUnitSet UsedInInstr;
154 RegUnitSet PhysRegUses;
155 SmallVector<uint16_t, 8> DefOperandIndexes;
156 // Register masks attached to the current instruction.
158
159 void setPhysRegState(MCPhysReg PhysReg, unsigned NewState);
160 bool isPhysRegFree(MCPhysReg PhysReg) const;
161
162 /// Mark a physreg as used in this instruction.
163 void markRegUsedInInstr(MCPhysReg PhysReg) {
164 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
165 UsedInInstr.insert(*Units);
166 }
167
168 // Check if physreg is clobbered by instruction's regmask(s).
169 bool isClobberedByRegMasks(MCPhysReg PhysReg) const {
170 return llvm::any_of(RegMasks, [PhysReg](const uint32_t *Mask) {
171 return MachineOperand::clobbersPhysReg(Mask, PhysReg);
172 });
173 }
174
175 /// Check if a physreg or any of its aliases are used in this instruction.
176 bool isRegUsedInInstr(MCPhysReg PhysReg, bool LookAtPhysRegUses) const {
177 if (LookAtPhysRegUses && isClobberedByRegMasks(PhysReg))
178 return true;
179 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) {
180 if (UsedInInstr.count(*Units))
181 return true;
182 if (LookAtPhysRegUses && PhysRegUses.count(*Units))
183 return true;
184 }
185 return false;
186 }
187
188 /// Mark physical register as being used in a register use operand.
189 /// This is only used by the special livethrough handling code.
190 void markPhysRegUsedInInstr(MCPhysReg PhysReg) {
191 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
192 PhysRegUses.insert(*Units);
193 }
194
195 /// Remove mark of physical register being used in the instruction.
196 void unmarkRegUsedInInstr(MCPhysReg PhysReg) {
197 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
198 UsedInInstr.erase(*Units);
199 }
200
201 enum : unsigned {
202 spillClean = 50,
203 spillDirty = 100,
204 spillPrefBonus = 20,
205 spillImpossible = ~0u
206 };
207
208 public:
209 StringRef getPassName() const override { return "Fast Register Allocator"; }
210
211 void getAnalysisUsage(AnalysisUsage &AU) const override {
212 AU.setPreservesCFG();
214 }
215
218 MachineFunctionProperties::Property::NoPHIs);
219 }
220
222 if (ClearVirtRegs) {
224 MachineFunctionProperties::Property::NoVRegs);
225 }
226
228 }
229
232 MachineFunctionProperties::Property::IsSSA);
233 }
234
235 private:
236 bool runOnMachineFunction(MachineFunction &MF) override;
237
238 void allocateBasicBlock(MachineBasicBlock &MBB);
239
240 void addRegClassDefCounts(std::vector<unsigned> &RegClassDefCounts,
241 Register Reg) const;
242
243 void allocateInstruction(MachineInstr &MI);
244 void handleDebugValue(MachineInstr &MI);
245 void handleBundle(MachineInstr &MI);
246
247 bool usePhysReg(MachineInstr &MI, MCPhysReg PhysReg);
248 bool definePhysReg(MachineInstr &MI, MCPhysReg PhysReg);
249 bool displacePhysReg(MachineInstr &MI, MCPhysReg PhysReg);
250 void freePhysReg(MCPhysReg PhysReg);
251
252 unsigned calcSpillCost(MCPhysReg PhysReg) const;
253
254 LiveRegMap::iterator findLiveVirtReg(Register VirtReg) {
255 return LiveVirtRegs.find(Register::virtReg2Index(VirtReg));
256 }
257
258 LiveRegMap::const_iterator findLiveVirtReg(Register VirtReg) const {
259 return LiveVirtRegs.find(Register::virtReg2Index(VirtReg));
260 }
261
262 void assignVirtToPhysReg(MachineInstr &MI, LiveReg &, MCPhysReg PhysReg);
263 void allocVirtReg(MachineInstr &MI, LiveReg &LR, Register Hint,
264 bool LookAtPhysRegUses = false);
265 void allocVirtRegUndef(MachineOperand &MO);
266 void assignDanglingDebugValues(MachineInstr &Def, Register VirtReg,
267 MCPhysReg Reg);
268 void defineLiveThroughVirtReg(MachineInstr &MI, unsigned OpNum,
269 Register VirtReg);
270 void defineVirtReg(MachineInstr &MI, unsigned OpNum, Register VirtReg,
271 bool LookAtPhysRegUses = false);
272 void useVirtReg(MachineInstr &MI, unsigned OpNum, Register VirtReg);
273
275 getMBBBeginInsertionPoint(MachineBasicBlock &MBB,
276 SmallSet<Register, 2> &PrologLiveIns) const;
277
278 void reloadAtBegin(MachineBasicBlock &MBB);
279 void setPhysReg(MachineInstr &MI, MachineOperand &MO, MCPhysReg PhysReg);
280
281 Register traceCopies(Register VirtReg) const;
282 Register traceCopyChain(Register Reg) const;
283
284 bool shouldAllocateRegister(const Register Reg) const;
285 int getStackSpaceFor(Register VirtReg);
286 void spill(MachineBasicBlock::iterator Before, Register VirtReg,
287 MCPhysReg AssignedReg, bool Kill, bool LiveOut);
288 void reload(MachineBasicBlock::iterator Before, Register VirtReg,
289 MCPhysReg PhysReg);
290
291 bool mayLiveOut(Register VirtReg);
292 bool mayLiveIn(Register VirtReg);
293
294 void dumpState() const;
295 };
296
297} // end anonymous namespace
298
299char RegAllocFast::ID = 0;
300
301INITIALIZE_PASS(RegAllocFast, "regallocfast", "Fast Register Allocator", false,
302 false)
303
304bool RegAllocFast::shouldAllocateRegister(const Register Reg) const {
305 assert(Reg.isVirtual());
306 const TargetRegisterClass &RC = *MRI->getRegClass(Reg);
307 return ShouldAllocateClass(*TRI, RC);
308}
309
310void RegAllocFast::setPhysRegState(MCPhysReg PhysReg, unsigned NewState) {
311 for (MCRegUnitIterator UI(PhysReg, TRI); UI.isValid(); ++UI)
312 RegUnitStates[*UI] = NewState;
313}
314
315bool RegAllocFast::isPhysRegFree(MCPhysReg PhysReg) const {
316 for (MCRegUnitIterator UI(PhysReg, TRI); UI.isValid(); ++UI) {
317 if (RegUnitStates[*UI] != regFree)
318 return false;
319 }
320 return true;
321}
322
323/// This allocates space for the specified virtual register to be held on the
324/// stack.
325int RegAllocFast::getStackSpaceFor(Register VirtReg) {
326 // Find the location Reg would belong...
327 int SS = StackSlotForVirtReg[VirtReg];
328 // Already has space allocated?
329 if (SS != -1)
330 return SS;
331
332 // Allocate a new stack object for this spill location...
333 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
334 unsigned Size = TRI->getSpillSize(RC);
335 Align Alignment = TRI->getSpillAlign(RC);
336 int FrameIdx = MFI->CreateSpillStackObject(Size, Alignment);
337
338 // Assign the slot.
339 StackSlotForVirtReg[VirtReg] = FrameIdx;
340 return FrameIdx;
341}
342
346 auto MBBEnd = MBB.end();
347 if (B == MBBEnd)
348 return true;
349
351 for (; &*I != A && &*I != B; ++I)
352 ;
353
354 return &*I == A;
355}
356
357/// Returns false if \p VirtReg is known to not live out of the current block.
358bool RegAllocFast::mayLiveOut(Register VirtReg) {
359 if (MayLiveAcrossBlocks.test(Register::virtReg2Index(VirtReg))) {
360 // Cannot be live-out if there are no successors.
361 return !MBB->succ_empty();
362 }
363
364 const MachineInstr *SelfLoopDef = nullptr;
365
366 // If this block loops back to itself, it is necessary to check whether the
367 // use comes after the def.
368 if (MBB->isSuccessor(MBB)) {
369 // Find the first def in the self loop MBB.
370 for (const MachineInstr &DefInst : MRI->def_instructions(VirtReg)) {
371 if (DefInst.getParent() != MBB) {
372 MayLiveAcrossBlocks.set(Register::virtReg2Index(VirtReg));
373 return true;
374 } else {
375 if (!SelfLoopDef || dominates(*MBB, DefInst.getIterator(), SelfLoopDef))
376 SelfLoopDef = &DefInst;
377 }
378 }
379 if (!SelfLoopDef) {
380 MayLiveAcrossBlocks.set(Register::virtReg2Index(VirtReg));
381 return true;
382 }
383 }
384
385 // See if the first \p Limit uses of the register are all in the current
386 // block.
387 static const unsigned Limit = 8;
388 unsigned C = 0;
389 for (const MachineInstr &UseInst : MRI->use_nodbg_instructions(VirtReg)) {
390 if (UseInst.getParent() != MBB || ++C >= Limit) {
391 MayLiveAcrossBlocks.set(Register::virtReg2Index(VirtReg));
392 // Cannot be live-out if there are no successors.
393 return !MBB->succ_empty();
394 }
395
396 if (SelfLoopDef) {
397 // Try to handle some simple cases to avoid spilling and reloading every
398 // value inside a self looping block.
399 if (SelfLoopDef == &UseInst ||
400 !dominates(*MBB, SelfLoopDef->getIterator(), UseInst.getIterator())) {
401 MayLiveAcrossBlocks.set(Register::virtReg2Index(VirtReg));
402 return true;
403 }
404 }
405 }
406
407 return false;
408}
409
410/// Returns false if \p VirtReg is known to not be live into the current block.
411bool RegAllocFast::mayLiveIn(Register VirtReg) {
412 if (MayLiveAcrossBlocks.test(Register::virtReg2Index(VirtReg)))
413 return !MBB->pred_empty();
414
415 // See if the first \p Limit def of the register are all in the current block.
416 static const unsigned Limit = 8;
417 unsigned C = 0;
418 for (const MachineInstr &DefInst : MRI->def_instructions(VirtReg)) {
419 if (DefInst.getParent() != MBB || ++C >= Limit) {
420 MayLiveAcrossBlocks.set(Register::virtReg2Index(VirtReg));
421 return !MBB->pred_empty();
422 }
423 }
424
425 return false;
426}
427
428/// Insert spill instruction for \p AssignedReg before \p Before. Update
429/// DBG_VALUEs with \p VirtReg operands with the stack slot.
430void RegAllocFast::spill(MachineBasicBlock::iterator Before, Register VirtReg,
431 MCPhysReg AssignedReg, bool Kill, bool LiveOut) {
432 LLVM_DEBUG(dbgs() << "Spilling " << printReg(VirtReg, TRI)
433 << " in " << printReg(AssignedReg, TRI));
434 int FI = getStackSpaceFor(VirtReg);
435 LLVM_DEBUG(dbgs() << " to stack slot #" << FI << '\n');
436
437 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
438 TII->storeRegToStackSlot(*MBB, Before, AssignedReg, Kill, FI, &RC, TRI,
439 VirtReg);
440 ++NumStores;
441
443
444 // When we spill a virtual register, we will have spill instructions behind
445 // every definition of it, meaning we can switch all the DBG_VALUEs over
446 // to just reference the stack slot.
447 SmallVectorImpl<MachineOperand *> &LRIDbgOperands = LiveDbgValueMap[VirtReg];
449 SpilledOperandsMap;
450 for (MachineOperand *MO : LRIDbgOperands)
451 SpilledOperandsMap[MO->getParent()].push_back(MO);
452 for (auto MISpilledOperands : SpilledOperandsMap) {
453 MachineInstr &DBG = *MISpilledOperands.first;
454 // We don't have enough support for tracking operands of DBG_VALUE_LISTs.
455 if (DBG.isDebugValueList())
456 continue;
458 *MBB, Before, *MISpilledOperands.first, FI, MISpilledOperands.second);
459 assert(NewDV->getParent() == MBB && "dangling parent pointer");
460 (void)NewDV;
461 LLVM_DEBUG(dbgs() << "Inserting debug info due to spill:\n" << *NewDV);
462
463 if (LiveOut) {
464 // We need to insert a DBG_VALUE at the end of the block if the spill slot
465 // is live out, but there is another use of the value after the
466 // spill. This will allow LiveDebugValues to see the correct live out
467 // value to propagate to the successors.
468 MachineInstr *ClonedDV = MBB->getParent()->CloneMachineInstr(NewDV);
469 MBB->insert(FirstTerm, ClonedDV);
470 LLVM_DEBUG(dbgs() << "Cloning debug info due to live out spill\n");
471 }
472
473 // Rewrite unassigned dbg_values to use the stack slot.
474 // TODO We can potentially do this for list debug values as well if we know
475 // how the dbg_values are getting unassigned.
476 if (DBG.isNonListDebugValue()) {
477 MachineOperand &MO = DBG.getDebugOperand(0);
478 if (MO.isReg() && MO.getReg() == 0) {
479 updateDbgValueForSpill(DBG, FI, 0);
480 }
481 }
482 }
483 // Now this register is spilled there is should not be any DBG_VALUE
484 // pointing to this register because they are all pointing to spilled value
485 // now.
486 LRIDbgOperands.clear();
487}
488
489/// Insert reload instruction for \p PhysReg before \p Before.
490void RegAllocFast::reload(MachineBasicBlock::iterator Before, Register VirtReg,
491 MCPhysReg PhysReg) {
492 LLVM_DEBUG(dbgs() << "Reloading " << printReg(VirtReg, TRI) << " into "
493 << printReg(PhysReg, TRI) << '\n');
494 int FI = getStackSpaceFor(VirtReg);
495 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
496 TII->loadRegFromStackSlot(*MBB, Before, PhysReg, FI, &RC, TRI, VirtReg);
497 ++NumLoads;
498}
499
500/// Get basic block begin insertion point.
501/// This is not just MBB.begin() because surprisingly we have EH_LABEL
502/// instructions marking the begin of a basic block. This means we must insert
503/// new instructions after such labels...
505RegAllocFast::getMBBBeginInsertionPoint(
506 MachineBasicBlock &MBB, SmallSet<Register, 2> &PrologLiveIns) const {
508 while (I != MBB.end()) {
509 if (I->isLabel()) {
510 ++I;
511 continue;
512 }
513
514 // Most reloads should be inserted after prolog instructions.
515 if (!TII->isBasicBlockPrologue(*I))
516 break;
517
518 // However if a prolog instruction reads a register that needs to be
519 // reloaded, the reload should be inserted before the prolog.
520 for (MachineOperand &MO : I->operands()) {
521 if (MO.isReg())
522 PrologLiveIns.insert(MO.getReg());
523 }
524
525 ++I;
526 }
527
528 return I;
529}
530
531/// Reload all currently assigned virtual registers.
532void RegAllocFast::reloadAtBegin(MachineBasicBlock &MBB) {
533 if (LiveVirtRegs.empty())
534 return;
535
537 MCPhysReg Reg = P.PhysReg;
538 // Set state to live-in. This possibly overrides mappings to virtual
539 // registers but we don't care anymore at this point.
540 setPhysRegState(Reg, regLiveIn);
541 }
542
543
544 SmallSet<Register, 2> PrologLiveIns;
545
546 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order
547 // of spilling here is deterministic, if arbitrary.
548 MachineBasicBlock::iterator InsertBefore
549 = getMBBBeginInsertionPoint(MBB, PrologLiveIns);
550 for (const LiveReg &LR : LiveVirtRegs) {
551 MCPhysReg PhysReg = LR.PhysReg;
552 if (PhysReg == 0)
553 continue;
554
555 MCRegister FirstUnit = *MCRegUnitIterator(PhysReg, TRI);
556 if (RegUnitStates[FirstUnit] == regLiveIn)
557 continue;
558
560 "no reload in start block. Missing vreg def?");
561
562 if (PrologLiveIns.count(PhysReg)) {
563 // FIXME: Theoretically this should use an insert point skipping labels
564 // but I'm not sure how labels should interact with prolog instruction
565 // that need reloads.
566 reload(MBB.begin(), LR.VirtReg, PhysReg);
567 } else
568 reload(InsertBefore, LR.VirtReg, PhysReg);
569 }
570 LiveVirtRegs.clear();
571}
572
573/// Handle the direct use of a physical register. Check that the register is
574/// not used by a virtreg. Kill the physreg, marking it free. This may add
575/// implicit kills to MO->getParent() and invalidate MO.
576bool RegAllocFast::usePhysReg(MachineInstr &MI, MCPhysReg Reg) {
577 assert(Register::isPhysicalRegister(Reg) && "expected physreg");
578 bool displacedAny = displacePhysReg(MI, Reg);
579 setPhysRegState(Reg, regPreAssigned);
580 markRegUsedInInstr(Reg);
581 return displacedAny;
582}
583
584bool RegAllocFast::definePhysReg(MachineInstr &MI, MCPhysReg Reg) {
585 bool displacedAny = displacePhysReg(MI, Reg);
586 setPhysRegState(Reg, regPreAssigned);
587 return displacedAny;
588}
589
590/// Mark PhysReg as reserved or free after spilling any virtregs. This is very
591/// similar to defineVirtReg except the physreg is reserved instead of
592/// allocated.
593bool RegAllocFast::displacePhysReg(MachineInstr &MI, MCPhysReg PhysReg) {
594 bool displacedAny = false;
595
596 for (MCRegUnitIterator UI(PhysReg, TRI); UI.isValid(); ++UI) {
597 unsigned Unit = *UI;
598 switch (unsigned VirtReg = RegUnitStates[Unit]) {
599 default: {
600 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
601 assert(LRI != LiveVirtRegs.end() && "datastructures in sync");
602 MachineBasicBlock::iterator ReloadBefore =
603 std::next((MachineBasicBlock::iterator)MI.getIterator());
604 reload(ReloadBefore, VirtReg, LRI->PhysReg);
605
606 setPhysRegState(LRI->PhysReg, regFree);
607 LRI->PhysReg = 0;
608 LRI->Reloaded = true;
609 displacedAny = true;
610 break;
611 }
612 case regPreAssigned:
613 RegUnitStates[Unit] = regFree;
614 displacedAny = true;
615 break;
616 case regFree:
617 break;
618 }
619 }
620 return displacedAny;
621}
622
623void RegAllocFast::freePhysReg(MCPhysReg PhysReg) {
624 LLVM_DEBUG(dbgs() << "Freeing " << printReg(PhysReg, TRI) << ':');
625
626 MCRegister FirstUnit = *MCRegUnitIterator(PhysReg, TRI);
627 switch (unsigned VirtReg = RegUnitStates[FirstUnit]) {
628 case regFree:
629 LLVM_DEBUG(dbgs() << '\n');
630 return;
631 case regPreAssigned:
632 LLVM_DEBUG(dbgs() << '\n');
633 setPhysRegState(PhysReg, regFree);
634 return;
635 default: {
636 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
637 assert(LRI != LiveVirtRegs.end());
638 LLVM_DEBUG(dbgs() << ' ' << printReg(LRI->VirtReg, TRI) << '\n');
639 setPhysRegState(LRI->PhysReg, regFree);
640 LRI->PhysReg = 0;
641 }
642 return;
643 }
644}
645
646/// Return the cost of spilling clearing out PhysReg and aliases so it is free
647/// for allocation. Returns 0 when PhysReg is free or disabled with all aliases
648/// disabled - it can be allocated directly.
649/// \returns spillImpossible when PhysReg or an alias can't be spilled.
650unsigned RegAllocFast::calcSpillCost(MCPhysReg PhysReg) const {
651 for (MCRegUnitIterator UI(PhysReg, TRI); UI.isValid(); ++UI) {
652 switch (unsigned VirtReg = RegUnitStates[*UI]) {
653 case regFree:
654 break;
655 case regPreAssigned:
656 LLVM_DEBUG(dbgs() << "Cannot spill pre-assigned "
657 << printReg(PhysReg, TRI) << '\n');
658 return spillImpossible;
659 default: {
660 bool SureSpill = StackSlotForVirtReg[VirtReg] != -1 ||
661 findLiveVirtReg(VirtReg)->LiveOut;
662 return SureSpill ? spillClean : spillDirty;
663 }
664 }
665 }
666 return 0;
667}
668
669void RegAllocFast::assignDanglingDebugValues(MachineInstr &Definition,
670 Register VirtReg, MCPhysReg Reg) {
671 auto UDBGValIter = DanglingDbgValues.find(VirtReg);
672 if (UDBGValIter == DanglingDbgValues.end())
673 return;
674
675 SmallVectorImpl<MachineInstr*> &Dangling = UDBGValIter->second;
676 for (MachineInstr *DbgValue : Dangling) {
677 assert(DbgValue->isDebugValue());
678 if (!DbgValue->hasDebugOperandForReg(VirtReg))
679 continue;
680
681 // Test whether the physreg survives from the definition to the DBG_VALUE.
682 MCPhysReg SetToReg = Reg;
683 unsigned Limit = 20;
684 for (MachineBasicBlock::iterator I = std::next(Definition.getIterator()),
685 E = DbgValue->getIterator(); I != E; ++I) {
686 if (I->modifiesRegister(Reg, TRI) || --Limit == 0) {
687 LLVM_DEBUG(dbgs() << "Register did not survive for " << *DbgValue
688 << '\n');
689 SetToReg = 0;
690 break;
691 }
692 }
693 for (MachineOperand &MO : DbgValue->getDebugOperandsForReg(VirtReg)) {
694 MO.setReg(SetToReg);
695 if (SetToReg != 0)
696 MO.setIsRenamable();
697 }
698 }
699 Dangling.clear();
700}
701
702/// This method updates local state so that we know that PhysReg is the
703/// proper container for VirtReg now. The physical register must not be used
704/// for anything else when this is called.
705void RegAllocFast::assignVirtToPhysReg(MachineInstr &AtMI, LiveReg &LR,
706 MCPhysReg PhysReg) {
707 Register VirtReg = LR.VirtReg;
708 LLVM_DEBUG(dbgs() << "Assigning " << printReg(VirtReg, TRI) << " to "
709 << printReg(PhysReg, TRI) << '\n');
710 assert(LR.PhysReg == 0 && "Already assigned a physreg");
711 assert(PhysReg != 0 && "Trying to assign no register");
712 LR.PhysReg = PhysReg;
713 setPhysRegState(PhysReg, VirtReg);
714
715 assignDanglingDebugValues(AtMI, VirtReg, PhysReg);
716}
717
718static bool isCoalescable(const MachineInstr &MI) {
719 return MI.isFullCopy();
720}
721
722Register RegAllocFast::traceCopyChain(Register Reg) const {
723 static const unsigned ChainLengthLimit = 3;
724 unsigned C = 0;
725 do {
726 if (Reg.isPhysical())
727 return Reg;
728 assert(Reg.isVirtual());
729
730 MachineInstr *VRegDef = MRI->getUniqueVRegDef(Reg);
731 if (!VRegDef || !isCoalescable(*VRegDef))
732 return 0;
733 Reg = VRegDef->getOperand(1).getReg();
734 } while (++C <= ChainLengthLimit);
735 return 0;
736}
737
738/// Check if any of \p VirtReg's definitions is a copy. If it is follow the
739/// chain of copies to check whether we reach a physical register we can
740/// coalesce with.
741Register RegAllocFast::traceCopies(Register VirtReg) const {
742 static const unsigned DefLimit = 3;
743 unsigned C = 0;
744 for (const MachineInstr &MI : MRI->def_instructions(VirtReg)) {
745 if (isCoalescable(MI)) {
746 Register Reg = MI.getOperand(1).getReg();
747 Reg = traceCopyChain(Reg);
748 if (Reg.isValid())
749 return Reg;
750 }
751
752 if (++C >= DefLimit)
753 break;
754 }
755 return Register();
756}
757
758/// Allocates a physical register for VirtReg.
759void RegAllocFast::allocVirtReg(MachineInstr &MI, LiveReg &LR,
760 Register Hint0, bool LookAtPhysRegUses) {
761 const Register VirtReg = LR.VirtReg;
762 assert(LR.PhysReg == 0);
763
764 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
765 LLVM_DEBUG(dbgs() << "Search register for " << printReg(VirtReg)
766 << " in class " << TRI->getRegClassName(&RC)
767 << " with hint " << printReg(Hint0, TRI) << '\n');
768
769 // Take hint when possible.
770 if (Hint0.isPhysical() && MRI->isAllocatable(Hint0) && RC.contains(Hint0) &&
771 !isRegUsedInInstr(Hint0, LookAtPhysRegUses)) {
772 // Take hint if the register is currently free.
773 if (isPhysRegFree(Hint0)) {
774 LLVM_DEBUG(dbgs() << "\tPreferred Register 1: " << printReg(Hint0, TRI)
775 << '\n');
776 assignVirtToPhysReg(MI, LR, Hint0);
777 return;
778 } else {
779 LLVM_DEBUG(dbgs() << "\tPreferred Register 0: " << printReg(Hint0, TRI)
780 << " occupied\n");
781 }
782 } else {
783 Hint0 = Register();
784 }
785
786
787 // Try other hint.
788 Register Hint1 = traceCopies(VirtReg);
789 if (Hint1.isPhysical() && MRI->isAllocatable(Hint1) && RC.contains(Hint1) &&
790 !isRegUsedInInstr(Hint1, LookAtPhysRegUses)) {
791 // Take hint if the register is currently free.
792 if (isPhysRegFree(Hint1)) {
793 LLVM_DEBUG(dbgs() << "\tPreferred Register 0: " << printReg(Hint1, TRI)
794 << '\n');
795 assignVirtToPhysReg(MI, LR, Hint1);
796 return;
797 } else {
798 LLVM_DEBUG(dbgs() << "\tPreferred Register 1: " << printReg(Hint1, TRI)
799 << " occupied\n");
800 }
801 } else {
802 Hint1 = Register();
803 }
804
805 MCPhysReg BestReg = 0;
806 unsigned BestCost = spillImpossible;
807 ArrayRef<MCPhysReg> AllocationOrder = RegClassInfo.getOrder(&RC);
808 for (MCPhysReg PhysReg : AllocationOrder) {
809 LLVM_DEBUG(dbgs() << "\tRegister: " << printReg(PhysReg, TRI) << ' ');
810 if (isRegUsedInInstr(PhysReg, LookAtPhysRegUses)) {
811 LLVM_DEBUG(dbgs() << "already used in instr.\n");
812 continue;
813 }
814
815 unsigned Cost = calcSpillCost(PhysReg);
816 LLVM_DEBUG(dbgs() << "Cost: " << Cost << " BestCost: " << BestCost << '\n');
817 // Immediate take a register with cost 0.
818 if (Cost == 0) {
819 assignVirtToPhysReg(MI, LR, PhysReg);
820 return;
821 }
822
823 if (PhysReg == Hint0 || PhysReg == Hint1)
824 Cost -= spillPrefBonus;
825
826 if (Cost < BestCost) {
827 BestReg = PhysReg;
828 BestCost = Cost;
829 }
830 }
831
832 if (!BestReg) {
833 // Nothing we can do: Report an error and keep going with an invalid
834 // allocation.
835 if (MI.isInlineAsm())
836 MI.emitError("inline assembly requires more registers than available");
837 else
838 MI.emitError("ran out of registers during register allocation");
839
840 LR.Error = true;
841 LR.PhysReg = 0;
842 return;
843 }
844
845 displacePhysReg(MI, BestReg);
846 assignVirtToPhysReg(MI, LR, BestReg);
847}
848
849void RegAllocFast::allocVirtRegUndef(MachineOperand &MO) {
850 assert(MO.isUndef() && "expected undef use");
851 Register VirtReg = MO.getReg();
852 assert(VirtReg.isVirtual() && "Expected virtreg");
853 if (!shouldAllocateRegister(VirtReg))
854 return;
855
856 LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg);
857 MCPhysReg PhysReg;
858 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
859 PhysReg = LRI->PhysReg;
860 } else {
861 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
862 ArrayRef<MCPhysReg> AllocationOrder = RegClassInfo.getOrder(&RC);
863 assert(!AllocationOrder.empty() && "Allocation order must not be empty");
864 PhysReg = AllocationOrder[0];
865 }
866
867 unsigned SubRegIdx = MO.getSubReg();
868 if (SubRegIdx != 0) {
869 PhysReg = TRI->getSubReg(PhysReg, SubRegIdx);
870 MO.setSubReg(0);
871 }
872 MO.setReg(PhysReg);
873 MO.setIsRenamable(true);
874}
875
876/// Variation of defineVirtReg() with special handling for livethrough regs
877/// (tied or earlyclobber) that may interfere with preassigned uses.
878void RegAllocFast::defineLiveThroughVirtReg(MachineInstr &MI, unsigned OpNum,
879 Register VirtReg) {
880 if (!shouldAllocateRegister(VirtReg))
881 return;
882 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
883 if (LRI != LiveVirtRegs.end()) {
884 MCPhysReg PrevReg = LRI->PhysReg;
885 if (PrevReg != 0 && isRegUsedInInstr(PrevReg, true)) {
886 LLVM_DEBUG(dbgs() << "Need new assignment for " << printReg(PrevReg, TRI)
887 << " (tied/earlyclobber resolution)\n");
888 freePhysReg(PrevReg);
889 LRI->PhysReg = 0;
890 allocVirtReg(MI, *LRI, 0, true);
891 MachineBasicBlock::iterator InsertBefore =
892 std::next((MachineBasicBlock::iterator)MI.getIterator());
893 LLVM_DEBUG(dbgs() << "Copy " << printReg(LRI->PhysReg, TRI) << " to "
894 << printReg(PrevReg, TRI) << '\n');
895 BuildMI(*MBB, InsertBefore, MI.getDebugLoc(),
896 TII->get(TargetOpcode::COPY), PrevReg)
897 .addReg(LRI->PhysReg, llvm::RegState::Kill);
898 }
899 MachineOperand &MO = MI.getOperand(OpNum);
900 if (MO.getSubReg() && !MO.isUndef()) {
901 LRI->LastUse = &MI;
902 }
903 }
904 return defineVirtReg(MI, OpNum, VirtReg, true);
905}
906
907/// Allocates a register for VirtReg definition. Typically the register is
908/// already assigned from a use of the virtreg, however we still need to
909/// perform an allocation if:
910/// - It is a dead definition without any uses.
911/// - The value is live out and all uses are in different basic blocks.
912void RegAllocFast::defineVirtReg(MachineInstr &MI, unsigned OpNum,
913 Register VirtReg, bool LookAtPhysRegUses) {
914 assert(VirtReg.isVirtual() && "Not a virtual register");
915 if (!shouldAllocateRegister(VirtReg))
916 return;
917 MachineOperand &MO = MI.getOperand(OpNum);
918 LiveRegMap::iterator LRI;
919 bool New;
920 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
921 if (New) {
922 if (!MO.isDead()) {
923 if (mayLiveOut(VirtReg)) {
924 LRI->LiveOut = true;
925 } else {
926 // It is a dead def without the dead flag; add the flag now.
927 MO.setIsDead(true);
928 }
929 }
930 }
931 if (LRI->PhysReg == 0)
932 allocVirtReg(MI, *LRI, 0, LookAtPhysRegUses);
933 else {
934 assert(!isRegUsedInInstr(LRI->PhysReg, LookAtPhysRegUses) &&
935 "TODO: preassign mismatch");
936 LLVM_DEBUG(dbgs() << "In def of " << printReg(VirtReg, TRI)
937 << " use existing assignment to "
938 << printReg(LRI->PhysReg, TRI) << '\n');
939 }
940
941 MCPhysReg PhysReg = LRI->PhysReg;
942 assert(PhysReg != 0 && "Register not assigned");
943 if (LRI->Reloaded || LRI->LiveOut) {
944 if (!MI.isImplicitDef()) {
945 MachineBasicBlock::iterator SpillBefore =
946 std::next((MachineBasicBlock::iterator)MI.getIterator());
947 LLVM_DEBUG(dbgs() << "Spill Reason: LO: " << LRI->LiveOut << " RL: "
948 << LRI->Reloaded << '\n');
949 bool Kill = LRI->LastUse == nullptr;
950 spill(SpillBefore, VirtReg, PhysReg, Kill, LRI->LiveOut);
951
952 // We need to place additional spills for each indirect destination of an
953 // INLINEASM_BR.
954 if (MI.getOpcode() == TargetOpcode::INLINEASM_BR) {
955 int FI = StackSlotForVirtReg[VirtReg];
956 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
957 for (MachineOperand &MO : MI.operands()) {
958 if (MO.isMBB()) {
959 MachineBasicBlock *Succ = MO.getMBB();
960 TII->storeRegToStackSlot(*Succ, Succ->begin(), PhysReg, Kill,
961 FI, &RC, TRI, VirtReg);
962 ++NumStores;
963 Succ->addLiveIn(PhysReg);
964 }
965 }
966 }
967
968 LRI->LastUse = nullptr;
969 }
970 LRI->LiveOut = false;
971 LRI->Reloaded = false;
972 }
973 if (MI.getOpcode() == TargetOpcode::BUNDLE) {
974 BundleVirtRegsMap[VirtReg] = PhysReg;
975 }
976 markRegUsedInInstr(PhysReg);
977 setPhysReg(MI, MO, PhysReg);
978}
979
980/// Allocates a register for a VirtReg use.
981void RegAllocFast::useVirtReg(MachineInstr &MI, unsigned OpNum,
982 Register VirtReg) {
983 assert(VirtReg.isVirtual() && "Not a virtual register");
984 if (!shouldAllocateRegister(VirtReg))
985 return;
986 MachineOperand &MO = MI.getOperand(OpNum);
987 LiveRegMap::iterator LRI;
988 bool New;
989 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
990 if (New) {
991 MachineOperand &MO = MI.getOperand(OpNum);
992 if (!MO.isKill()) {
993 if (mayLiveOut(VirtReg)) {
994 LRI->LiveOut = true;
995 } else {
996 // It is a last (killing) use without the kill flag; add the flag now.
997 MO.setIsKill(true);
998 }
999 }
1000 } else {
1001 assert((!MO.isKill() || LRI->LastUse == &MI) && "Invalid kill flag");
1002 }
1003
1004 // If necessary allocate a register.
1005 if (LRI->PhysReg == 0) {
1006 assert(!MO.isTied() && "tied op should be allocated");
1007 Register Hint;
1008 if (MI.isCopy() && MI.getOperand(1).getSubReg() == 0) {
1009 Hint = MI.getOperand(0).getReg();
1010 if (Hint.isVirtual()) {
1011 assert(!shouldAllocateRegister(Hint));
1012 Hint = Register();
1013 } else {
1014 assert(Hint.isPhysical() &&
1015 "Copy destination should already be assigned");
1016 }
1017 }
1018 allocVirtReg(MI, *LRI, Hint, false);
1019 if (LRI->Error) {
1020 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
1021 ArrayRef<MCPhysReg> AllocationOrder = RegClassInfo.getOrder(&RC);
1022 setPhysReg(MI, MO, *AllocationOrder.begin());
1023 return;
1024 }
1025 }
1026
1027 LRI->LastUse = &MI;
1028
1029 if (MI.getOpcode() == TargetOpcode::BUNDLE) {
1030 BundleVirtRegsMap[VirtReg] = LRI->PhysReg;
1031 }
1032 markRegUsedInInstr(LRI->PhysReg);
1033 setPhysReg(MI, MO, LRI->PhysReg);
1034}
1035
1036/// Changes operand OpNum in MI the refer the PhysReg, considering subregs. This
1037/// may invalidate any operand pointers. Return true if the operand kills its
1038/// register.
1039void RegAllocFast::setPhysReg(MachineInstr &MI, MachineOperand &MO,
1040 MCPhysReg PhysReg) {
1041 if (!MO.getSubReg()) {
1042 MO.setReg(PhysReg);
1043 MO.setIsRenamable(true);
1044 return;
1045 }
1046
1047 // Handle subregister index.
1048 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : MCRegister());
1049 MO.setIsRenamable(true);
1050 // Note: We leave the subreg number around a little longer in case of defs.
1051 // This is so that the register freeing logic in allocateInstruction can still
1052 // recognize this as subregister defs. The code there will clear the number.
1053 if (!MO.isDef())
1054 MO.setSubReg(0);
1055
1056 // A kill flag implies killing the full register. Add corresponding super
1057 // register kill.
1058 if (MO.isKill()) {
1059 MI.addRegisterKilled(PhysReg, TRI, true);
1060 return;
1061 }
1062
1063 // A <def,read-undef> of a sub-register requires an implicit def of the full
1064 // register.
1065 if (MO.isDef() && MO.isUndef()) {
1066 if (MO.isDead())
1067 MI.addRegisterDead(PhysReg, TRI, true);
1068 else
1069 MI.addRegisterDefined(PhysReg, TRI);
1070 }
1071}
1072
1073#ifndef NDEBUG
1074
1075void RegAllocFast::dumpState() const {
1076 for (unsigned Unit = 1, UnitE = TRI->getNumRegUnits(); Unit != UnitE;
1077 ++Unit) {
1078 switch (unsigned VirtReg = RegUnitStates[Unit]) {
1079 case regFree:
1080 break;
1081 case regPreAssigned:
1082 dbgs() << " " << printRegUnit(Unit, TRI) << "[P]";
1083 break;
1084 case regLiveIn:
1085 llvm_unreachable("Should not have regLiveIn in map");
1086 default: {
1087 dbgs() << ' ' << printRegUnit(Unit, TRI) << '=' << printReg(VirtReg);
1088 LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg);
1089 assert(I != LiveVirtRegs.end() && "have LiveVirtRegs entry");
1090 if (I->LiveOut || I->Reloaded) {
1091 dbgs() << '[';
1092 if (I->LiveOut) dbgs() << 'O';
1093 if (I->Reloaded) dbgs() << 'R';
1094 dbgs() << ']';
1095 }
1096 assert(TRI->hasRegUnit(I->PhysReg, Unit) && "inverse mapping present");
1097 break;
1098 }
1099 }
1100 }
1101 dbgs() << '\n';
1102 // Check that LiveVirtRegs is the inverse.
1103 for (const LiveReg &LR : LiveVirtRegs) {
1104 Register VirtReg = LR.VirtReg;
1105 assert(VirtReg.isVirtual() && "Bad map key");
1106 MCPhysReg PhysReg = LR.PhysReg;
1107 if (PhysReg != 0) {
1109 "mapped to physreg");
1110 for (MCRegUnitIterator UI(PhysReg, TRI); UI.isValid(); ++UI) {
1111 assert(RegUnitStates[*UI] == VirtReg && "inverse map valid");
1112 }
1113 }
1114 }
1115}
1116#endif
1117
1118/// Count number of defs consumed from each register class by \p Reg
1119void RegAllocFast::addRegClassDefCounts(std::vector<unsigned> &RegClassDefCounts,
1120 Register Reg) const {
1121 assert(RegClassDefCounts.size() == TRI->getNumRegClasses());
1122
1123 if (Reg.isVirtual()) {
1124 if (!shouldAllocateRegister(Reg))
1125 return;
1126 const TargetRegisterClass *OpRC = MRI->getRegClass(Reg);
1127 for (unsigned RCIdx = 0, RCIdxEnd = TRI->getNumRegClasses();
1128 RCIdx != RCIdxEnd; ++RCIdx) {
1129 const TargetRegisterClass *IdxRC = TRI->getRegClass(RCIdx);
1130 // FIXME: Consider aliasing sub/super registers.
1131 if (OpRC->hasSubClassEq(IdxRC))
1132 ++RegClassDefCounts[RCIdx];
1133 }
1134
1135 return;
1136 }
1137
1138 for (unsigned RCIdx = 0, RCIdxEnd = TRI->getNumRegClasses();
1139 RCIdx != RCIdxEnd; ++RCIdx) {
1140 const TargetRegisterClass *IdxRC = TRI->getRegClass(RCIdx);
1141 for (MCRegAliasIterator Alias(Reg, TRI, true); Alias.isValid(); ++Alias) {
1142 if (IdxRC->contains(*Alias)) {
1143 ++RegClassDefCounts[RCIdx];
1144 break;
1145 }
1146 }
1147 }
1148}
1149
1150void RegAllocFast::allocateInstruction(MachineInstr &MI) {
1151 // The basic algorithm here is:
1152 // 1. Mark registers of def operands as free
1153 // 2. Allocate registers to use operands and place reload instructions for
1154 // registers displaced by the allocation.
1155 //
1156 // However we need to handle some corner cases:
1157 // - pre-assigned defs and uses need to be handled before the other def/use
1158 // operands are processed to avoid the allocation heuristics clashing with
1159 // the pre-assignment.
1160 // - The "free def operands" step has to come last instead of first for tied
1161 // operands and early-clobbers.
1162
1163 UsedInInstr.clear();
1164 RegMasks.clear();
1165 BundleVirtRegsMap.clear();
1166
1167 auto TiedOpIsUndef = [&](const MachineOperand &MO, unsigned Idx) {
1168 assert(MO.isTied());
1169 unsigned TiedIdx = MI.findTiedOperandIdx(Idx);
1170 const MachineOperand &TiedMO = MI.getOperand(TiedIdx);
1171 return TiedMO.isUndef();
1172 };
1173 // Scan for special cases; Apply pre-assigned register defs to state.
1174 bool HasPhysRegUse = false;
1175 bool HasRegMask = false;
1176 bool HasVRegDef = false;
1177 bool HasDef = false;
1178 bool HasEarlyClobber = false;
1179 bool NeedToAssignLiveThroughs = false;
1180 for (unsigned I = 0; I < MI.getNumOperands(); ++I) {
1181 MachineOperand &MO = MI.getOperand(I);
1182 if (MO.isReg()) {
1183 Register Reg = MO.getReg();
1184 if (Reg.isVirtual()) {
1185 if (!shouldAllocateRegister(Reg))
1186 continue;
1187 if (MO.isDef()) {
1188 HasDef = true;
1189 HasVRegDef = true;
1190 if (MO.isEarlyClobber()) {
1191 HasEarlyClobber = true;
1192 NeedToAssignLiveThroughs = true;
1193 }
1194 if ((MO.isTied() && !TiedOpIsUndef(MO, I)) ||
1195 (MO.getSubReg() != 0 && !MO.isUndef()))
1196 NeedToAssignLiveThroughs = true;
1197 }
1198 } else if (Reg.isPhysical()) {
1199 if (!MRI->isReserved(Reg)) {
1200 if (MO.isDef()) {
1201 HasDef = true;
1202 bool displacedAny = definePhysReg(MI, Reg);
1203 if (MO.isEarlyClobber())
1204 HasEarlyClobber = true;
1205 if (!displacedAny)
1206 MO.setIsDead(true);
1207 }
1208 if (MO.readsReg())
1209 HasPhysRegUse = true;
1210 }
1211 }
1212 } else if (MO.isRegMask()) {
1213 HasRegMask = true;
1214 RegMasks.push_back(MO.getRegMask());
1215 }
1216 }
1217
1218 // Allocate virtreg defs.
1219 if (HasDef) {
1220 if (HasVRegDef) {
1221 // Special handling for early clobbers, tied operands or subregister defs:
1222 // Compared to "normal" defs these:
1223 // - Must not use a register that is pre-assigned for a use operand.
1224 // - In order to solve tricky inline assembly constraints we change the
1225 // heuristic to figure out a good operand order before doing
1226 // assignments.
1227 if (NeedToAssignLiveThroughs) {
1228 DefOperandIndexes.clear();
1229 PhysRegUses.clear();
1230
1231 // Track number of defs which may consume a register from the class.
1232 std::vector<unsigned> RegClassDefCounts(TRI->getNumRegClasses(), 0);
1233 assert(RegClassDefCounts[0] == 0);
1234
1235 LLVM_DEBUG(dbgs() << "Need to assign livethroughs\n");
1236 for (unsigned I = 0, E = MI.getNumOperands(); I < E; ++I) {
1237 const MachineOperand &MO = MI.getOperand(I);
1238 if (!MO.isReg())
1239 continue;
1240 Register Reg = MO.getReg();
1241 if (MO.readsReg()) {
1242 if (Reg.isPhysical()) {
1243 LLVM_DEBUG(dbgs() << "mark extra used: " << printReg(Reg, TRI)
1244 << '\n');
1245 markPhysRegUsedInInstr(Reg);
1246 }
1247 }
1248
1249 if (MO.isDef()) {
1250 if (Reg.isVirtual() && shouldAllocateRegister(Reg))
1251 DefOperandIndexes.push_back(I);
1252
1253 addRegClassDefCounts(RegClassDefCounts, Reg);
1254 }
1255 }
1256
1257 llvm::sort(DefOperandIndexes, [&](uint16_t I0, uint16_t I1) {
1258 const MachineOperand &MO0 = MI.getOperand(I0);
1259 const MachineOperand &MO1 = MI.getOperand(I1);
1260 Register Reg0 = MO0.getReg();
1261 Register Reg1 = MO1.getReg();
1262 const TargetRegisterClass &RC0 = *MRI->getRegClass(Reg0);
1263 const TargetRegisterClass &RC1 = *MRI->getRegClass(Reg1);
1264
1265 // Identify regclass that are easy to use up completely just in this
1266 // instruction.
1267 unsigned ClassSize0 = RegClassInfo.getOrder(&RC0).size();
1268 unsigned ClassSize1 = RegClassInfo.getOrder(&RC1).size();
1269
1270 bool SmallClass0 = ClassSize0 < RegClassDefCounts[RC0.getID()];
1271 bool SmallClass1 = ClassSize1 < RegClassDefCounts[RC1.getID()];
1272 if (SmallClass0 > SmallClass1)
1273 return true;
1274 if (SmallClass0 < SmallClass1)
1275 return false;
1276
1277 // Allocate early clobbers and livethrough operands first.
1278 bool Livethrough0 = MO0.isEarlyClobber() || MO0.isTied() ||
1279 (MO0.getSubReg() == 0 && !MO0.isUndef());
1280 bool Livethrough1 = MO1.isEarlyClobber() || MO1.isTied() ||
1281 (MO1.getSubReg() == 0 && !MO1.isUndef());
1282 if (Livethrough0 > Livethrough1)
1283 return true;
1284 if (Livethrough0 < Livethrough1)
1285 return false;
1286
1287 // Tie-break rule: operand index.
1288 return I0 < I1;
1289 });
1290
1291 for (uint16_t OpIdx : DefOperandIndexes) {
1292 MachineOperand &MO = MI.getOperand(OpIdx);
1293 LLVM_DEBUG(dbgs() << "Allocating " << MO << '\n');
1294 unsigned Reg = MO.getReg();
1295 if (MO.isEarlyClobber() ||
1296 (MO.isTied() && !TiedOpIsUndef(MO, OpIdx)) ||
1297 (MO.getSubReg() && !MO.isUndef())) {
1298 defineLiveThroughVirtReg(MI, OpIdx, Reg);
1299 } else {
1300 defineVirtReg(MI, OpIdx, Reg);
1301 }
1302 }
1303 } else {
1304 // Assign virtual register defs.
1305 for (unsigned I = 0, E = MI.getNumOperands(); I < E; ++I) {
1306 MachineOperand &MO = MI.getOperand(I);
1307 if (!MO.isReg() || !MO.isDef())
1308 continue;
1309 Register Reg = MO.getReg();
1310 if (Reg.isVirtual())
1311 defineVirtReg(MI, I, Reg);
1312 }
1313 }
1314 }
1315
1316 // Free registers occupied by defs.
1317 // Iterate operands in reverse order, so we see the implicit super register
1318 // defs first (we added them earlier in case of <def,read-undef>).
1319 for (signed I = MI.getNumOperands() - 1; I >= 0; --I) {
1320 MachineOperand &MO = MI.getOperand(I);
1321 if (!MO.isReg() || !MO.isDef())
1322 continue;
1323
1324 // subreg defs don't free the full register. We left the subreg number
1325 // around as a marker in setPhysReg() to recognize this case here.
1326 if (MO.getSubReg() != 0) {
1327 MO.setSubReg(0);
1328 continue;
1329 }
1330
1331 assert((!MO.isTied() || !isClobberedByRegMasks(MO.getReg())) &&
1332 "tied def assigned to clobbered register");
1333
1334 // Do not free tied operands and early clobbers.
1335 if ((MO.isTied() && !TiedOpIsUndef(MO, I)) || MO.isEarlyClobber())
1336 continue;
1337 Register Reg = MO.getReg();
1338 if (!Reg)
1339 continue;
1340 if (Reg.isVirtual()) {
1341 assert(!shouldAllocateRegister(Reg));
1342 continue;
1343 }
1344 assert(Reg.isPhysical());
1345 if (MRI->isReserved(Reg))
1346 continue;
1347 freePhysReg(Reg);
1348 unmarkRegUsedInInstr(Reg);
1349 }
1350 }
1351
1352 // Displace clobbered registers.
1353 if (HasRegMask) {
1354 assert(!RegMasks.empty() && "expected RegMask");
1355 // MRI bookkeeping.
1356 for (const auto *RM : RegMasks)
1357 MRI->addPhysRegsUsedFromRegMask(RM);
1358
1359 // Displace clobbered registers.
1360 for (const LiveReg &LR : LiveVirtRegs) {
1361 MCPhysReg PhysReg = LR.PhysReg;
1362 if (PhysReg != 0 && isClobberedByRegMasks(PhysReg))
1363 displacePhysReg(MI, PhysReg);
1364 }
1365 }
1366
1367 // Apply pre-assigned register uses to state.
1368 if (HasPhysRegUse) {
1369 for (MachineOperand &MO : MI.operands()) {
1370 if (!MO.isReg() || !MO.readsReg())
1371 continue;
1372 Register Reg = MO.getReg();
1373 if (!Reg.isPhysical())
1374 continue;
1375 if (MRI->isReserved(Reg))
1376 continue;
1377 bool displacedAny = usePhysReg(MI, Reg);
1378 if (!displacedAny)
1379 MO.setIsKill(true);
1380 }
1381 }
1382
1383 // Allocate virtreg uses and insert reloads as necessary.
1384 bool HasUndefUse = false;
1385 for (unsigned I = 0; I < MI.getNumOperands(); ++I) {
1386 MachineOperand &MO = MI.getOperand(I);
1387 if (!MO.isReg() || !MO.isUse())
1388 continue;
1389 Register Reg = MO.getReg();
1390 if (!Reg.isVirtual() || !shouldAllocateRegister(Reg))
1391 continue;
1392
1393 if (MO.isUndef()) {
1394 HasUndefUse = true;
1395 continue;
1396 }
1397
1398
1399 // Populate MayLiveAcrossBlocks in case the use block is allocated before
1400 // the def block (removing the vreg uses).
1401 mayLiveIn(Reg);
1402
1403
1404 assert(!MO.isInternalRead() && "Bundles not supported");
1405 assert(MO.readsReg() && "reading use");
1406 useVirtReg(MI, I, Reg);
1407 }
1408
1409 // Allocate undef operands. This is a separate step because in a situation
1410 // like ` = OP undef %X, %X` both operands need the same register assign
1411 // so we should perform the normal assignment first.
1412 if (HasUndefUse) {
1413 for (MachineOperand &MO : MI.uses()) {
1414 if (!MO.isReg() || !MO.isUse())
1415 continue;
1416 Register Reg = MO.getReg();
1417 if (!Reg.isVirtual() || !shouldAllocateRegister(Reg))
1418 continue;
1419
1420 assert(MO.isUndef() && "Should only have undef virtreg uses left");
1421 allocVirtRegUndef(MO);
1422 }
1423 }
1424
1425 // Free early clobbers.
1426 if (HasEarlyClobber) {
1427 for (MachineOperand &MO : llvm::reverse(MI.operands())) {
1428 if (!MO.isReg() || !MO.isDef() || !MO.isEarlyClobber())
1429 continue;
1430 assert(!MO.getSubReg() && "should be already handled in def processing");
1431
1432 Register Reg = MO.getReg();
1433 if (!Reg)
1434 continue;
1435 if (Reg.isVirtual()) {
1436 assert(!shouldAllocateRegister(Reg));
1437 continue;
1438 }
1439 assert(Reg.isPhysical() && "should have register assigned");
1440
1441 // We sometimes get odd situations like:
1442 // early-clobber %x0 = INSTRUCTION %x0
1443 // which is semantically questionable as the early-clobber should
1444 // apply before the use. But in practice we consider the use to
1445 // happen before the early clobber now. Don't free the early clobber
1446 // register in this case.
1447 if (MI.readsRegister(Reg, TRI))
1448 continue;
1449
1450 freePhysReg(Reg);
1451 }
1452 }
1453
1454 LLVM_DEBUG(dbgs() << "<< " << MI);
1455 if (MI.isCopy() && MI.getOperand(0).getReg() == MI.getOperand(1).getReg() &&
1456 MI.getNumOperands() == 2) {
1457 LLVM_DEBUG(dbgs() << "Mark identity copy for removal\n");
1458 Coalesced.push_back(&MI);
1459 }
1460}
1461
1462void RegAllocFast::handleDebugValue(MachineInstr &MI) {
1463 // Ignore DBG_VALUEs that aren't based on virtual registers. These are
1464 // mostly constants and frame indices.
1465 for (Register Reg : MI.getUsedDebugRegs()) {
1466 if (!Reg.isVirtual())
1467 continue;
1468 if (!shouldAllocateRegister(Reg))
1469 continue;
1470
1471 // Already spilled to a stackslot?
1472 int SS = StackSlotForVirtReg[Reg];
1473 if (SS != -1) {
1474 // Modify DBG_VALUE now that the value is in a spill slot.
1475 updateDbgValueForSpill(MI, SS, Reg);
1476 LLVM_DEBUG(dbgs() << "Rewrite DBG_VALUE for spilled memory: " << MI);
1477 continue;
1478 }
1479
1480 // See if this virtual register has already been allocated to a physical
1481 // register or spilled to a stack slot.
1482 LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
1484 for (MachineOperand &Op : MI.getDebugOperandsForReg(Reg))
1485 DbgOps.push_back(&Op);
1486
1487 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1488 // Update every use of Reg within MI.
1489 for (auto &RegMO : DbgOps)
1490 setPhysReg(MI, *RegMO, LRI->PhysReg);
1491 } else {
1492 DanglingDbgValues[Reg].push_back(&MI);
1493 }
1494
1495 // If Reg hasn't been spilled, put this DBG_VALUE in LiveDbgValueMap so
1496 // that future spills of Reg will have DBG_VALUEs.
1497 LiveDbgValueMap[Reg].append(DbgOps.begin(), DbgOps.end());
1498 }
1499}
1500
1501void RegAllocFast::handleBundle(MachineInstr &MI) {
1502 MachineBasicBlock::instr_iterator BundledMI = MI.getIterator();
1503 ++BundledMI;
1504 while (BundledMI->isBundledWithPred()) {
1505 for (MachineOperand &MO : BundledMI->operands()) {
1506 if (!MO.isReg())
1507 continue;
1508
1509 Register Reg = MO.getReg();
1510 if (!Reg.isVirtual() || !shouldAllocateRegister(Reg))
1511 continue;
1512
1514 DI = BundleVirtRegsMap.find(Reg);
1515 assert(DI != BundleVirtRegsMap.end() && "Unassigned virtual register");
1516
1517 setPhysReg(MI, MO, DI->second);
1518 }
1519
1520 ++BundledMI;
1521 }
1522}
1523
1524void RegAllocFast::allocateBasicBlock(MachineBasicBlock &MBB) {
1525 this->MBB = &MBB;
1526 LLVM_DEBUG(dbgs() << "\nAllocating " << MBB);
1527
1528 RegUnitStates.assign(TRI->getNumRegUnits(), regFree);
1529 assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?");
1530
1531 for (const auto &LiveReg : MBB.liveouts())
1532 setPhysRegState(LiveReg.PhysReg, regPreAssigned);
1533
1534 Coalesced.clear();
1535
1536 // Traverse block in reverse order allocating instructions one by one.
1537 for (MachineInstr &MI : reverse(MBB)) {
1538 LLVM_DEBUG(
1539 dbgs() << "\n>> " << MI << "Regs:";
1540 dumpState()
1541 );
1542
1543 // Special handling for debug values. Note that they are not allowed to
1544 // affect codegen of the other instructions in any way.
1545 if (MI.isDebugValue()) {
1546 handleDebugValue(MI);
1547 continue;
1548 }
1549
1550 allocateInstruction(MI);
1551
1552 // Once BUNDLE header is assigned registers, same assignments need to be
1553 // done for bundled MIs.
1554 if (MI.getOpcode() == TargetOpcode::BUNDLE) {
1555 handleBundle(MI);
1556 }
1557 }
1558
1559 LLVM_DEBUG(
1560 dbgs() << "Begin Regs:";
1561 dumpState()
1562 );
1563
1564 // Spill all physical registers holding virtual registers now.
1565 LLVM_DEBUG(dbgs() << "Loading live registers at begin of block.\n");
1566 reloadAtBegin(MBB);
1567
1568 // Erase all the coalesced copies. We are delaying it until now because
1569 // LiveVirtRegs might refer to the instrs.
1570 for (MachineInstr *MI : Coalesced)
1571 MBB.erase(MI);
1572 NumCoalesced += Coalesced.size();
1573
1574 for (auto &UDBGPair : DanglingDbgValues) {
1575 for (MachineInstr *DbgValue : UDBGPair.second) {
1576 assert(DbgValue->isDebugValue() && "expected DBG_VALUE");
1577 // Nothing to do if the vreg was spilled in the meantime.
1578 if (!DbgValue->hasDebugOperandForReg(UDBGPair.first))
1579 continue;
1580 LLVM_DEBUG(dbgs() << "Register did not survive for " << *DbgValue
1581 << '\n');
1582 DbgValue->setDebugValueUndef();
1583 }
1584 }
1585 DanglingDbgValues.clear();
1586
1587 LLVM_DEBUG(MBB.dump());
1588}
1589
1590bool RegAllocFast::runOnMachineFunction(MachineFunction &MF) {
1591 LLVM_DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n"
1592 << "********** Function: " << MF.getName() << '\n');
1593 MRI = &MF.getRegInfo();
1594 const TargetSubtargetInfo &STI = MF.getSubtarget();
1595 TRI = STI.getRegisterInfo();
1596 TII = STI.getInstrInfo();
1597 MFI = &MF.getFrameInfo();
1598 MRI->freezeReservedRegs(MF);
1599 RegClassInfo.runOnMachineFunction(MF);
1600 unsigned NumRegUnits = TRI->getNumRegUnits();
1601 UsedInInstr.clear();
1602 UsedInInstr.setUniverse(NumRegUnits);
1603 PhysRegUses.clear();
1604 PhysRegUses.setUniverse(NumRegUnits);
1605
1606 // initialize the virtual->physical register map to have a 'null'
1607 // mapping for all virtual registers
1608 unsigned NumVirtRegs = MRI->getNumVirtRegs();
1609 StackSlotForVirtReg.resize(NumVirtRegs);
1610 LiveVirtRegs.setUniverse(NumVirtRegs);
1611 MayLiveAcrossBlocks.clear();
1612 MayLiveAcrossBlocks.resize(NumVirtRegs);
1613
1614 // Loop over all of the basic blocks, eliminating virtual register references
1615 for (MachineBasicBlock &MBB : MF)
1616 allocateBasicBlock(MBB);
1617
1618 if (ClearVirtRegs) {
1619 // All machine operands and other references to virtual registers have been
1620 // replaced. Remove the virtual registers.
1621 MRI->clearVirtRegs();
1622 }
1623
1624 StackSlotForVirtReg.clear();
1625 LiveDbgValueMap.clear();
1626 return true;
1627}
1628
1630 return new RegAllocFast();
1631}
1632
1634 bool ClearVirtRegs) {
1635 return new RegAllocFast(Ftor, ClearVirtRegs);
1636}
unsigned const MachineRegisterInfo * MRI
aarch64 promote const
MachineBasicBlock & MBB
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
#define LLVM_DEBUG(X)
Definition: Debug.h:101
This file defines the DenseMap class.
uint64_t Size
const HexagonInstrInfo * TII
IRTranslator LLVM IR MI
This file implements an indexed map.
#define F(x, y, z)
Definition: MD5.cpp:55
#define I(x, y, z)
Definition: MD5.cpp:58
unsigned const TargetRegisterInfo * TRI
This file implements a map that provides insertion order iteration.
#define P(N)
if(VerifyEach)
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:38
static bool dominates(MachineBasicBlock &MBB, MachineBasicBlock::const_iterator A, MachineBasicBlock::const_iterator B)
static bool isCoalescable(const MachineInstr &MI)
static cl::opt< bool > IgnoreMissingDefs("rafast-ignore-missing-defs", cl::Hidden)
static RegisterRegAlloc fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the SparseSet class derived from the version described in Briggs,...
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Definition: Statistic.h:167
Class recording the (high level) value of a variable.
Iterator begin() const
Represent the analysis usage information of a pass.
void setPreservesCFG()
This function should be called by the pass, iff they do not:
Definition: Pass.cpp:265
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
Definition: ArrayRef.h:41
bool test(unsigned Idx) const
Definition: BitVector.h:461
void resize(unsigned N, bool t=false)
resize - Grow or shrink the bitvector.
Definition: BitVector.h:341
void clear()
clear - Removes all bits from the bitvector.
Definition: BitVector.h:335
BitVector & set()
Definition: BitVector.h:351
iterator find(const_arg_type_t< KeyT > Val)
Definition: DenseMap.h:155
iterator end()
Definition: DenseMap.h:84
Lightweight error class with error context and mandatory checking.
Definition: Error.h:156
FunctionPass class - This class is used to implement most global optimizations.
Definition: Pass.h:308
void storeRegToStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, Register SrcReg, bool isKill, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI, Register VReg) const override
Store the specified register of the given register class to the specified stack frame index.
void loadRegFromStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, Register DestReg, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI, Register VReg) const override
Load the specified register of the given register class from the specified stack frame index.
void resize(typename StorageT::size_type s)
Definition: IndexedMap.h:60
MCRegAliasIterator enumerates all registers aliasing Reg.
bool isValid() const
isValid - returns true if this iterator is not yet at the end.
Wrapper class representing physical registers. Should be passed by value.
Definition: MCRegister.h:24
iterator_range< liveout_iterator > liveouts() const
instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
iterator_range< livein_iterator > liveins() const
iterator getFirstTerminator()
Returns an iterator to the first terminator instruction of this basic block.
void addLiveIn(MCRegister PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())
Adds the specified register as a live in.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
instr_iterator erase(instr_iterator I)
Remove an instruction from the instruction list and delete it.
bool isSuccessor(const MachineBasicBlock *MBB) const
Return true if the specified MBB is a successor of this block.
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted.
int CreateSpillStackObject(uint64_t Size, Align Alignment)
Create a new statically sized stack object that represents a spill slot, returning a nonnegative iden...
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
virtual MachineFunctionProperties getClearedProperties() const
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
virtual MachineFunctionProperties getSetProperties() const
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
MachineFunctionProperties & set(Property P)
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
StringRef getName() const
getName - Return the name of the corresponding LLVM function.
MachineFrameInfo & getFrameInfo()
getFrameInfo - Return the frame info object for the current function.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
MachineInstr * CloneMachineInstr(const MachineInstr *Orig)
Create a new MachineInstr which is a copy of Orig, identical in all ways except the instruction has n...
const MachineBasicBlock & front() const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
Representation of each machine instruction.
Definition: MachineInstr.h:68
bool isDebugValueList() const
const MachineBasicBlock * getParent() const
Definition: MachineInstr.h:313
bool isNonListDebugValue() const
MachineOperand & getDebugOperand(unsigned Index)
Definition: MachineInstr.h:535
const MachineOperand & getOperand(unsigned i) const
Definition: MachineInstr.h:526
MachineOperand class - Representation of each machine instruction operand.
void setSubReg(unsigned subReg)
unsigned getSubReg() const
bool readsReg() const
readsReg - Returns true if this operand reads the previous value of its register.
void setIsRenamable(bool Val=true)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
MachineBasicBlock * getMBB() const
void setIsDead(bool Val=true)
void setReg(Register Reg)
Change the register this operand corresponds to.
void setIsKill(bool Val=true)
bool isEarlyClobber() const
Register getReg() const
getReg - Returns the register number.
bool isInternalRead() const
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
bool isMBB() const
isMBB - Tests if this is a MO_MachineBasicBlock operand.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
Definition: Pass.cpp:81
void runOnMachineFunction(const MachineFunction &MF)
runOnFunction - Prepare to answer questions about MF.
ArrayRef< MCPhysReg > getOrder(const TargetRegisterClass *RC) const
getOrder - Returns the preferred allocation order for RC.
Wrapper class representing virtual and physical registers.
Definition: Register.h:19
bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
Definition: Register.h:97
static unsigned virtReg2Index(Register Reg)
Convert a virtual register number to a 0-based index.
Definition: Register.h:77
bool isVirtual() const
Return true if the specified register number is in the virtual register namespace.
Definition: Register.h:91
static bool isPhysicalRegister(unsigned Reg)
Return true if the specified register number is in the physical register namespace.
Definition: Register.h:65
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
Definition: SmallSet.h:135
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
Definition: SmallSet.h:164
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
Definition: SmallSet.h:177
bool empty() const
Definition: SmallVector.h:94
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
Definition: SmallVector.h:577
void push_back(const T &Elt)
Definition: SmallVector.h:416
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1200
SparseSet - Fast set implementation for objects that can be identified by small unsigned keys.
Definition: SparseSet.h:124
typename DenseT::iterator iterator
Definition: SparseSet.h:171
typename DenseT::const_iterator const_iterator
Definition: SparseSet.h:172
StringRef - Represent a constant reference to a string, i.e.
Definition: StringRef.h:50
TargetInstrInfo - Interface to description of machine instruction set.
unsigned getID() const
Return the register class ID number.
bool contains(Register Reg) const
Return true if the specified register is included in this register class.
bool hasSubClassEq(const TargetRegisterClass *RC) const
Returns true if RC is a sub-class of or equal to this class.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
TargetSubtargetInfo - Generic base class for all target subtargets.
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
virtual const TargetInstrInfo * getInstrInfo() const
Iterator for intrusive lists based on ilist_node.
self_iterator getIterator()
Definition: ilist_node.h:82
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Definition: CallingConv.h:24
@ C
The default llvm calling convention, compatible with C.
Definition: CallingConv.h:34
@ Kill
The last use of a register.
@ SS
Definition: X86.h:209
Reg
All possible values of the reg field in the ModR/M byte.
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
FunctionPass * createFastRegisterAllocator()
FastRegisterAllocation Pass - This pass register allocates as fast as possible.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
Definition: STLExtras.h:1777
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
void updateDbgValueForSpill(MachineInstr &Orig, int FrameIndex, Register Reg)
Update a DBG_VALUE whose value has been spilled to FrameIndex.
Printable printRegUnit(unsigned Unit, const TargetRegisterInfo *TRI)
Create Printable object to print register units on a raw_ostream.
static bool allocateAllRegClasses(const TargetRegisterInfo &, const TargetRegisterClass &)
Default register class filter function for register allocation.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
Definition: STLExtras.h:1826
auto reverse(ContainerTy &&C)
Definition: STLExtras.h:511
void sort(IteratorTy Start, IteratorTy End)
Definition: STLExtras.h:1744
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:163
MachineInstr * buildDbgValueForSpill(MachineBasicBlock &BB, MachineBasicBlock::iterator I, const MachineInstr &Orig, int FrameIndex, Register SpillReg)
Clone a DBG_VALUE whose value has been spilled to FrameIndex.
Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.
std::function< bool(const TargetRegisterInfo &TRI, const TargetRegisterClass &RC)> RegClassFilterFunc
This struct is a compact representation of a valid (non-zero power of two) alignment.
Definition: Alignment.h:39
Pair of physical register and lane mask.
A MapVector that performs no allocations if smaller than a certain size.
Definition: MapVector.h:234