LLVM 23.0.0git
MachineCopyPropagation.cpp
Go to the documentation of this file.
1//===- MachineCopyPropagation.cpp - Machine Copy Propagation Pass ---------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This is an extremely simple MachineInstr-level copy propagation pass.
10//
11// This pass forwards the source of COPYs to the users of their destinations
12// when doing so is legal. For example:
13//
14// %reg1 = COPY %reg0
15// ...
16// ... = OP %reg1
17//
18// If
19// - %reg0 has not been clobbered by the time of the use of %reg1
20// - the register class constraints are satisfied
21// - the COPY def is the only value that reaches OP
22// then this pass replaces the above with:
23//
24// %reg1 = COPY %reg0
25// ...
26// ... = OP %reg0
27//
28// This pass also removes some redundant COPYs. For example:
29//
30// %R1 = COPY %R0
31// ... // No clobber of %R1
32// %R0 = COPY %R1 <<< Removed
33//
34// or
35//
36// %R1 = COPY %R0
37// ... // No clobber of %R0
38// %R1 = COPY %R0 <<< Removed
39//
40// or
41//
42// $R0 = OP ...
43// ... // No read/clobber of $R0 and $R1
44// $R1 = COPY $R0 // $R0 is killed
45// Replace $R0 with $R1 and remove the COPY
46// $R1 = OP ...
47// ...
48//
49//===----------------------------------------------------------------------===//
50
52#include "llvm/ADT/DenseMap.h"
53#include "llvm/ADT/STLExtras.h"
54#include "llvm/ADT/SetVector.h"
55#include "llvm/ADT/SmallSet.h"
57#include "llvm/ADT/Statistic.h"
69#include "llvm/MC/MCRegister.h"
71#include "llvm/Pass.h"
72#include "llvm/Support/Debug.h"
75#include <cassert>
76#include <iterator>
77
78using namespace llvm;
79
80#define DEBUG_TYPE "machine-cp"
81
82STATISTIC(NumDeletes, "Number of dead copies deleted");
83STATISTIC(NumCopyForwards, "Number of copy uses forwarded");
84STATISTIC(NumCopyBackwardPropagated, "Number of copy defs backward propagated");
85STATISTIC(SpillageChainsLength, "Length of spillage chains");
86STATISTIC(NumSpillageChains, "Number of spillage chains");
87DEBUG_COUNTER(FwdCounter, "machine-cp-fwd",
88 "Controls which register COPYs are forwarded");
89
90static cl::opt<bool> MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false),
93 EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden);
94
95namespace {
96
97MCRegister asPhysMCReg(const MachineOperand *Operand) {
98 Register Reg = Operand->getReg();
99 assert(Reg.isPhysical() &&
100 "MachineCopyPropagation should be run after register allocation!");
101 return Reg;
102}
103
104MCRegister getDstMCReg(const DestSourcePair &DSP) {
105 return asPhysMCReg(DSP.Destination);
106}
107MCRegister getSrcMCReg(const DestSourcePair &DSP) {
108 return asPhysMCReg(DSP.Source);
109}
110std::pair<MCRegister, MCRegister> getDstSrcMCRegs(const DestSourcePair &DSP) {
111 return {getDstMCReg(DSP), getSrcMCReg(DSP)};
112}
113
114std::optional<DestSourcePair> isCopyInstr(const MachineInstr &MI,
115 const TargetInstrInfo &TII,
116 bool UseCopyInstr) {
117 if (UseCopyInstr)
118 return TII.isCopyInstr(MI);
119
120 if (MI.isCopy())
121 return DestSourcePair{MI.getOperand(0), MI.getOperand(1)};
122
123 return std::nullopt;
124}
125
126class CopyTracker {
127 struct CopyInfo {
128 MachineInstr *MI = nullptr;
129 MachineInstr *LastSeenUseInCopy = nullptr;
130 SmallPtrSet<MachineInstr *, 4> SrcUsers;
132 bool Avail = false;
133 };
134
135 DenseMap<MCRegUnit, CopyInfo> Copies;
136
137 // Memoised sets of register units which are preserved by each register mask,
138 // needed to efficiently remove copies which are invalidated by call
139 // instructions.
140 DenseMap<const uint32_t *, BitVector> RegMaskToPreservedRegUnits;
141
142public:
143 /// Get the set of register units which are preserved by RegMaskOp.
144 BitVector &getPreservedRegUnits(const MachineOperand &RegMaskOp,
145 const TargetRegisterInfo &TRI) {
146 const uint32_t *RegMask = RegMaskOp.getRegMask();
147 auto [It, Inserted] = RegMaskToPreservedRegUnits.try_emplace(RegMask);
148 if (!Inserted)
149 return It->second;
150 BitVector &PreservedRegUnits = It->second;
151
152 PreservedRegUnits.resize(TRI.getNumRegUnits());
153 for (unsigned SafeReg = 0, E = TRI.getNumRegs(); SafeReg < E; ++SafeReg)
154 if (!RegMaskOp.clobbersPhysReg(SafeReg))
155 for (MCRegUnit SafeUnit : TRI.regunits(SafeReg))
156 PreservedRegUnits.set(static_cast<unsigned>(SafeUnit));
157
158 return PreservedRegUnits;
159 }
160
161 /// Mark all of the given registers and their subregisters as unavailable for
162 /// copying.
163 void markRegsUnavailable(ArrayRef<MCRegister> Regs,
164 const TargetRegisterInfo &TRI) {
165 for (MCRegister Reg : Regs) {
166 // Source of copy is no longer available for propagation.
167 for (MCRegUnit Unit : TRI.regunits(Reg)) {
168 auto CI = Copies.find(Unit);
169 if (CI != Copies.end())
170 CI->second.Avail = false;
171 }
172 }
173 }
174
175 /// Remove register from copy maps.
176 void invalidateRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
177 const TargetInstrInfo &TII, bool UseCopyInstr) {
178 // Since Reg might be a subreg of some registers, only invalidate Reg is not
179 // enough. We have to find the COPY defines Reg or registers defined by Reg
180 // and invalidate all of them. Similarly, we must invalidate all of the
181 // the subregisters used in the source of the COPY.
182 SmallSet<MCRegUnit, 8> RegUnitsToInvalidate;
183 auto InvalidateCopy = [&](MachineInstr *MI) {
184 DestSourcePair CopyOperands = *isCopyInstr(*MI, TII, UseCopyInstr);
185 auto [Dst, Src] = getDstSrcMCRegs(CopyOperands);
186 auto DstUnits = TRI.regunits(Dst);
187 auto SrcUnits = TRI.regunits(Src);
188 RegUnitsToInvalidate.insert_range(DstUnits);
189 RegUnitsToInvalidate.insert_range(SrcUnits);
190 };
191
192 for (MCRegUnit Unit : TRI.regunits(Reg)) {
193 auto I = Copies.find(Unit);
194 if (I != Copies.end()) {
195 if (MachineInstr *MI = I->second.MI)
196 InvalidateCopy(MI);
197 if (MachineInstr *MI = I->second.LastSeenUseInCopy)
198 InvalidateCopy(MI);
199 }
200 }
201 for (MCRegUnit Unit : RegUnitsToInvalidate)
202 Copies.erase(Unit);
203 }
204
205 /// Clobber a single register unit, removing it from the tracker's copy maps.
206 void clobberRegUnit(MCRegUnit Unit, const TargetRegisterInfo &TRI,
207 const TargetInstrInfo &TII, bool UseCopyInstr) {
208 auto I = Copies.find(Unit);
209 if (I != Copies.end()) {
210 // When we clobber the source of a copy, we need to clobber everything
211 // it defined.
212 markRegsUnavailable(I->second.DefRegs, TRI);
213 // When we clobber the destination of a copy, we need to clobber the
214 // whole register it defined.
215 if (MachineInstr *MI = I->second.MI) {
216 DestSourcePair CopyOperands = *isCopyInstr(*MI, TII, UseCopyInstr);
217 auto [Dst, Src] = getDstSrcMCRegs(CopyOperands);
218
219 markRegsUnavailable(Dst, TRI);
220
221 // Since we clobber the destination of a copy, the semantic of Src's
222 // "DefRegs" to contain Def is no longer effectual. We will also need
223 // to remove the record from the copy maps that indicates Src defined
224 // Def. Failing to do so might cause the target to miss some
225 // opportunities to further eliminate redundant copy instructions.
226 // Consider the following sequence during the
227 // ForwardCopyPropagateBlock procedure:
228 // L1: r0 = COPY r9 <- TrackMI
229 // L2: r0 = COPY r8 <- TrackMI (Remove r9 defined r0 from tracker)
230 // L3: use r0 <- Remove L2 from MaybeDeadCopies
231 // L4: early-clobber r9 <- Clobber r9 (L2 is still valid in tracker)
232 // L5: r0 = COPY r8 <- Remove NopCopy
233 for (MCRegUnit SrcUnit : TRI.regunits(Src)) {
234 auto SrcCopy = Copies.find(SrcUnit);
235 if (SrcCopy != Copies.end() && SrcCopy->second.LastSeenUseInCopy) {
236 // If SrcCopy defines multiple values, we only need
237 // to erase the record for Def in DefRegs.
238 // NOLINTNEXTLINE(llvm-qualified-auto)
239 for (auto Itr = SrcCopy->second.DefRegs.begin();
240 Itr != SrcCopy->second.DefRegs.end(); Itr++) {
241 if (*Itr == Dst) {
242 SrcCopy->second.DefRegs.erase(Itr);
243 // If DefReg becomes empty after removal, we can remove the
244 // SrcCopy from the tracker's copy maps. We only remove those
245 // entries solely record the Def is defined by Src. If an
246 // entry also contains the definition record of other Def'
247 // registers, it cannot be cleared.
248 if (SrcCopy->second.DefRegs.empty() && !SrcCopy->second.MI) {
249 Copies.erase(SrcCopy);
250 }
251 break;
252 }
253 }
254 }
255 }
256 }
257 // Now we can erase the copy.
258 Copies.erase(I);
259 }
260 }
261
262 /// Clobber a single register, removing it from the tracker's copy maps.
263 void clobberRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
264 const TargetInstrInfo &TII, bool UseCopyInstr) {
265 for (MCRegUnit Unit : TRI.regunits(Reg)) {
266 clobberRegUnit(Unit, TRI, TII, UseCopyInstr);
267 }
268 }
269
270 /// Track copy's src users, and return false if that can't be done.
271 /// We can only track if we have a COPY instruction which source is
272 /// the same as the Reg.
273 bool trackSrcUsers(MCRegister Reg, MachineInstr &MI,
274 const TargetRegisterInfo &TRI, const TargetInstrInfo &TII,
275 bool UseCopyInstr) {
276 MCRegUnit RU = *TRI.regunits(Reg).begin();
277 MachineInstr *AvailCopy = findCopyDefViaUnit(RU, TRI);
278 if (!AvailCopy)
279 return false;
280
281 DestSourcePair CopyOperands = *isCopyInstr(*AvailCopy, TII, UseCopyInstr);
282 MCRegister Src = getSrcMCReg(CopyOperands);
283
284 // Bail out, if the source of the copy is not the same as the Reg.
285 if (Src != Reg)
286 return false;
287
288 auto I = Copies.find(RU);
289 if (I == Copies.end())
290 return false;
291
292 I->second.SrcUsers.insert(&MI);
293 return true;
294 }
295
296 /// Return the users for a given register.
297 SmallPtrSet<MachineInstr *, 4> getSrcUsers(MCRegister Reg,
298 const TargetRegisterInfo &TRI) {
299 MCRegUnit RU = *TRI.regunits(Reg).begin();
300 auto I = Copies.find(RU);
301 if (I == Copies.end())
302 return {};
303 return I->second.SrcUsers;
304 }
305
306 /// Add this copy's registers into the tracker's copy maps.
307 void trackCopy(MachineInstr *MI, const TargetRegisterInfo &TRI,
308 const TargetInstrInfo &TII, bool UseCopyInstr) {
309 DestSourcePair CopyOperands = *isCopyInstr(*MI, TII, UseCopyInstr);
310 auto [Dst, Src] = getDstSrcMCRegs(CopyOperands);
311
312 // Remember Dst is defined by the copy.
313 for (MCRegUnit Unit : TRI.regunits(Dst))
314 Copies[Unit] = {MI, nullptr, {}, {}, true};
315
316 // Remember source that's copied to Dst. Once it's clobbered, then
317 // it's no longer available for copy propagation.
318 for (MCRegUnit Unit : TRI.regunits(Src)) {
319 auto &Copy = Copies[Unit];
320 if (!is_contained(Copy.DefRegs, Dst))
321 Copy.DefRegs.push_back(Dst);
322 Copy.LastSeenUseInCopy = MI;
323 }
324 }
325
326 bool hasAnyCopies() {
327 return !Copies.empty();
328 }
329
330 MachineInstr *findCopyForUnit(MCRegUnit RegUnit,
331 const TargetRegisterInfo &TRI,
332 bool MustBeAvailable = false) {
333 auto CI = Copies.find(RegUnit);
334 if (CI == Copies.end())
335 return nullptr;
336 if (MustBeAvailable && !CI->second.Avail)
337 return nullptr;
338 return CI->second.MI;
339 }
340
341 MachineInstr *findCopyDefViaUnit(MCRegUnit RegUnit,
342 const TargetRegisterInfo &TRI) {
343 auto CI = Copies.find(RegUnit);
344 if (CI == Copies.end())
345 return nullptr;
346 if (CI->second.DefRegs.size() != 1)
347 return nullptr;
348 MCRegUnit RU = *TRI.regunits(CI->second.DefRegs[0]).begin();
349 return findCopyForUnit(RU, TRI, true);
350 }
351
352 MachineInstr *findAvailBackwardCopy(MachineInstr &I, MCRegister Reg,
353 const TargetRegisterInfo &TRI,
354 const TargetInstrInfo &TII,
355 bool UseCopyInstr) {
356 MCRegUnit RU = *TRI.regunits(Reg).begin();
357 MachineInstr *AvailCopy = findCopyDefViaUnit(RU, TRI);
358
359 if (!AvailCopy)
360 return nullptr;
361
362 DestSourcePair CopyOperands = *isCopyInstr(*AvailCopy, TII, UseCopyInstr);
363 auto [AvailDst, AvailSrc] = getDstSrcMCRegs(CopyOperands);
364 if (!TRI.isSubRegisterEq(AvailSrc, Reg))
365 return nullptr;
366
367 for (const MachineInstr &MI :
368 make_range(AvailCopy->getReverseIterator(), I.getReverseIterator()))
369 for (const MachineOperand &MO : MI.operands())
370 if (MO.isRegMask())
371 // FIXME: Shall we simultaneously invalidate AvailSrc or AvailDst?
372 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDst))
373 return nullptr;
374
375 return AvailCopy;
376 }
377
378 MachineInstr *findAvailCopy(MachineInstr &DestCopy, MCRegister Reg,
379 const TargetRegisterInfo &TRI,
380 const TargetInstrInfo &TII, bool UseCopyInstr) {
381 // We check the first RegUnit here, since we'll only be interested in the
382 // copy if it copies the entire register anyway.
383 MCRegUnit RU = *TRI.regunits(Reg).begin();
384 MachineInstr *AvailCopy =
385 findCopyForUnit(RU, TRI, /*MustBeAvailable=*/true);
386
387 if (!AvailCopy)
388 return nullptr;
389
390 DestSourcePair CopyOperands = *isCopyInstr(*AvailCopy, TII, UseCopyInstr);
391 auto [AvailDst, AvailSrc] = getDstSrcMCRegs(CopyOperands);
392 if (!TRI.isSubRegisterEq(AvailDst, Reg))
393 return nullptr;
394
395 // Check that the available copy isn't clobbered by any regmasks between
396 // itself and the destination.
397 for (const MachineInstr &MI :
398 make_range(AvailCopy->getIterator(), DestCopy.getIterator()))
399 for (const MachineOperand &MO : MI.operands())
400 if (MO.isRegMask())
401 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDst))
402 return nullptr;
403
404 return AvailCopy;
405 }
406
407 // Find last COPY that defines Reg before Current MachineInstr.
408 MachineInstr *findLastSeenDefInCopy(const MachineInstr &Current,
409 MCRegister Reg,
410 const TargetRegisterInfo &TRI,
411 const TargetInstrInfo &TII,
412 bool UseCopyInstr) {
413 MCRegUnit RU = *TRI.regunits(Reg).begin();
414 auto CI = Copies.find(RU);
415 if (CI == Copies.end() || !CI->second.Avail)
416 return nullptr;
417
418 MachineInstr *DefCopy = CI->second.MI;
419 DestSourcePair CopyOperands = *isCopyInstr(*DefCopy, TII, UseCopyInstr);
420 MCRegister Dst = getDstMCReg(CopyOperands);
421 if (!TRI.isSubRegisterEq(Dst, Reg))
422 return nullptr;
423
424 for (const MachineInstr &MI :
425 make_range(static_cast<const MachineInstr *>(DefCopy)->getIterator(),
426 Current.getIterator()))
427 for (const MachineOperand &MO : MI.operands())
428 if (MO.isRegMask())
429 if (MO.clobbersPhysReg(Dst)) {
430 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of "
431 << printReg(Dst, &TRI) << "\n");
432 return nullptr;
433 }
434
435 return DefCopy;
436 }
437
438 // Find last COPY that uses Reg.
439 MachineInstr *findLastSeenUseInCopy(MCRegister Reg,
440 const TargetRegisterInfo &TRI) {
441 MCRegUnit RU = *TRI.regunits(Reg).begin();
442 auto CI = Copies.find(RU);
443 if (CI == Copies.end())
444 return nullptr;
445 return CI->second.LastSeenUseInCopy;
446 }
447
448 void clear() {
449 Copies.clear();
450 }
451};
452
453class MachineCopyPropagation {
454 const TargetRegisterInfo *TRI = nullptr;
455 const TargetInstrInfo *TII = nullptr;
456 const MachineRegisterInfo *MRI = nullptr;
457
458 // Return true if this is a copy instruction and false otherwise.
459 bool UseCopyInstr;
460
461public:
462 MachineCopyPropagation(bool CopyInstr = false)
463 : UseCopyInstr(CopyInstr || MCPUseCopyInstr) {}
464
465 bool run(MachineFunction &MF);
466
467private:
468 typedef enum { DebugUse = false, RegularUse = true } DebugType;
469
470 void readRegister(MCRegister Reg, MachineInstr &Reader, DebugType DT);
471 void readSuccessorLiveIns(const MachineBasicBlock &MBB);
472 void forwardCopyPropagateBlock(MachineBasicBlock &MBB);
473 void backwardCopyPropagateBlock(MachineBasicBlock &MBB);
474 void eliminateSpillageCopies(MachineBasicBlock &MBB);
475 bool eraseIfRedundant(MachineInstr &Copy, MCRegister Dst, MCRegister Src);
476 void forwardUses(MachineInstr &MI);
477 void propagateDefs(MachineInstr &MI);
478 bool isForwardableRegClassCopy(const MachineInstr &Copy,
479 const MachineInstr &UseI, unsigned UseIdx);
480 bool isBackwardPropagatableRegClassCopy(const MachineInstr &Copy,
481 const MachineInstr &UseI,
482 unsigned UseIdx);
483 bool isBackwardPropagatableCopy(const MachineInstr &Copy,
484 const DestSourcePair &CopyOperands);
485 /// Returns true iff a copy instruction having operand @p CopyOperand must
486 /// never be eliminated as redundant.
487 bool isNeverRedundant(MCRegister CopyOperand) {
488 // Avoid eliminating a copy from/to a reserved registers as we cannot
489 // predict the value (Example: The sparc zero register is writable but stays
490 // zero).
491 return MRI->isReserved(CopyOperand);
492 }
493 /// Returns true iff the @p Copy instruction must never be eliminated as
494 /// redundant. This overload does not consider the operands of @p Copy.
495 bool isNeverRedundant(const MachineInstr &Copy) {
496 return Copy.getFlag(MachineInstr::FrameSetup) ||
498 }
499 bool hasImplicitOverlap(const MachineInstr &MI, const MachineOperand &Use);
500 bool hasOverlappingMultipleDef(const MachineInstr &MI,
501 const MachineOperand &MODef, MCRegister Def);
502 bool canUpdateSrcUsers(const MachineInstr &Copy,
503 const MachineOperand &CopySrc);
504
505 /// Candidates for deletion.
506 SmallSetVector<MachineInstr *, 8> MaybeDeadCopies;
507
508 /// Multimap tracking debug users in current BB
509 DenseMap<MachineInstr *, SmallPtrSet<MachineInstr *, 2>> CopyDbgUsers;
510
511 CopyTracker Tracker;
512
513 bool Changed = false;
514};
515
516class MachineCopyPropagationLegacy : public MachineFunctionPass {
517 bool UseCopyInstr;
518
519public:
520 static char ID; // pass identification
521
522 MachineCopyPropagationLegacy(bool UseCopyInstr = false)
523 : MachineFunctionPass(ID), UseCopyInstr(UseCopyInstr) {}
524
525 void getAnalysisUsage(AnalysisUsage &AU) const override {
526 AU.setPreservesCFG();
528 }
529
530 bool runOnMachineFunction(MachineFunction &MF) override;
531
532 MachineFunctionProperties getRequiredProperties() const override {
533 return MachineFunctionProperties().setNoVRegs();
534 }
535};
536
537} // end anonymous namespace
538
539char MachineCopyPropagationLegacy::ID = 0;
540
541char &llvm::MachineCopyPropagationID = MachineCopyPropagationLegacy::ID;
542
543INITIALIZE_PASS(MachineCopyPropagationLegacy, DEBUG_TYPE,
544 "Machine Copy Propagation Pass", false, false)
545
546void MachineCopyPropagation::readRegister(MCRegister Reg, MachineInstr &Reader,
547 DebugType DT) {
548 // If 'Reg' is defined by a copy, the copy is no longer a candidate
549 // for elimination. If a copy is "read" by a debug user, record the user
550 // for propagation.
551 for (MCRegUnit Unit : TRI->regunits(Reg)) {
552 if (MachineInstr *Copy = Tracker.findCopyForUnit(Unit, *TRI)) {
553 if (DT == RegularUse) {
554 LLVM_DEBUG(dbgs() << "MCP: Copy is used - not dead: "; Copy->dump());
555 MaybeDeadCopies.remove(Copy);
556 } else {
557 CopyDbgUsers[Copy].insert(&Reader);
558 }
559 }
560 }
561}
562
563void MachineCopyPropagation::readSuccessorLiveIns(
564 const MachineBasicBlock &MBB) {
565 if (MaybeDeadCopies.empty())
566 return;
567
568 // If a copy result is livein to a successor, it is not dead.
569 for (const MachineBasicBlock *Succ : MBB.successors()) {
570 for (const auto &LI : Succ->liveins()) {
571 for (MCRegUnitMaskIterator U(LI.PhysReg, TRI); U.isValid(); ++U) {
572 auto [Unit, Mask] = *U;
573 if ((Mask & LI.LaneMask).any()) {
574 if (MachineInstr *Copy = Tracker.findCopyForUnit(Unit, *TRI))
575 MaybeDeadCopies.remove(Copy);
576 }
577 }
578 }
579 }
580}
581
582/// Return true if \p PreviousCopy did copy register \p Src to register \p Dst.
583/// This fact may have been obscured by sub register usage or may not be true at
584/// all even though Src and Dst are subregisters of the registers used in
585/// PreviousCopy. e.g.
586/// isNopCopy("ecx = COPY eax", AX, CX) == true
587/// isNopCopy("ecx = COPY eax", AH, CL) == false
588static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src,
590 const TargetInstrInfo *TII, bool UseCopyInstr) {
591
592 DestSourcePair CopyOperands = *isCopyInstr(PreviousCopy, *TII, UseCopyInstr);
593 auto [PreviousDst, PreviousSrc] = getDstSrcMCRegs(CopyOperands);
594 if (Src == PreviousSrc && Dst == PreviousDst)
595 return true;
596 if (!TRI->isSubRegister(PreviousSrc, Src))
597 return false;
598 unsigned SubIdx = TRI->getSubRegIndex(PreviousSrc, Src);
599 return SubIdx == TRI->getSubRegIndex(PreviousDst, Dst);
600}
601
602/// Remove instruction \p Copy if there exists a previous copy that copies the
603/// register \p Src to the register \p Dst; This may happen indirectly by
604/// copying the super registers.
605bool MachineCopyPropagation::eraseIfRedundant(MachineInstr &Copy,
606 MCRegister Dst, MCRegister Src) {
607 if (isNeverRedundant(Copy) || isNeverRedundant(Src) || isNeverRedundant(Dst))
608 return false;
609
610 // Search for an existing copy.
611 MachineInstr *PrevCopy =
612 Tracker.findAvailCopy(Copy, Dst, *TRI, *TII, UseCopyInstr);
613 if (!PrevCopy)
614 return false;
615
616 DestSourcePair PrevCopyOperands = *isCopyInstr(*PrevCopy, *TII, UseCopyInstr);
617 // Check that the existing copy uses the correct sub registers.
618 if (PrevCopyOperands.Destination->isDead())
619 return false;
620 if (!isNopCopy(*PrevCopy, Src, Dst, TRI, TII, UseCopyInstr))
621 return false;
622
623 LLVM_DEBUG(dbgs() << "MCP: copy is a NOP, removing: "; Copy.dump());
624
625 // Copy was redundantly redefining either Src or Dst. Remove earlier kill
626 // flags between Copy and PrevCopy because the value will be reused now.
627 DestSourcePair CopyOperands = *isCopyInstr(Copy, *TII, UseCopyInstr);
628
629 MCRegister CopyDst = getDstMCReg(CopyOperands);
630 assert(CopyDst == Src || CopyDst == Dst);
631 for (MachineInstr &MI :
632 make_range(PrevCopy->getIterator(), Copy.getIterator()))
633 MI.clearRegisterKills(CopyDst, TRI);
634
635 // Clear undef flag from remaining copy if needed.
636 if (!CopyOperands.Source->isUndef()) {
637 PrevCopy->getOperand(PrevCopyOperands.Source->getOperandNo())
638 .setIsUndef(false);
639 }
640
641 Copy.eraseFromParent();
642 Changed = true;
643 ++NumDeletes;
644 return true;
645}
646
647bool MachineCopyPropagation::isBackwardPropagatableRegClassCopy(
648 const MachineInstr &Copy, const MachineInstr &UseI, unsigned UseIdx) {
649 DestSourcePair CopyOperands = *isCopyInstr(Copy, *TII, UseCopyInstr);
650 MCRegister Dst = getDstMCReg(CopyOperands);
651
652 if (const TargetRegisterClass *URC =
653 UseI.getRegClassConstraint(UseIdx, TII, TRI))
654 return URC->contains(Dst);
655
656 // We don't process further if UseI is a COPY, since forward copy propagation
657 // should handle that.
658 return false;
659}
660
661bool MachineCopyPropagation::isBackwardPropagatableCopy(
662 const MachineInstr &Copy, const DestSourcePair &CopyOperands) {
663 auto [Dst, Src] = getDstSrcMCRegs(CopyOperands);
664
665 if (!Dst || !Src)
666 return false;
667
668 if (isNeverRedundant(Copy) || isNeverRedundant(Dst) || isNeverRedundant(Src))
669 return false;
670
671 return CopyOperands.Source->isRenamable() && CopyOperands.Source->isKill();
672}
673
674/// Decide whether we should forward the source of \param Copy to its use in
675/// \param UseI based on the physical register class constraints of the opcode
676/// and avoiding introducing more cross-class COPYs.
677bool MachineCopyPropagation::isForwardableRegClassCopy(const MachineInstr &Copy,
678 const MachineInstr &UseI,
679 unsigned UseIdx) {
680 DestSourcePair CopyOperands = *isCopyInstr(Copy, *TII, UseCopyInstr);
681 MCRegister CopySrc = getSrcMCReg(CopyOperands);
682
683 // If the new register meets the opcode register constraints, then allow
684 // forwarding.
685 if (const TargetRegisterClass *URC =
686 UseI.getRegClassConstraint(UseIdx, TII, TRI))
687 return URC->contains(CopySrc);
688
689 std::optional<DestSourcePair> UseICopyOperands =
690 isCopyInstr(UseI, *TII, UseCopyInstr);
691 if (!UseICopyOperands)
692 return false;
693
694 /// COPYs don't have register class constraints, so if the user instruction
695 /// is a COPY, we just try to avoid introducing additional cross-class
696 /// COPYs. For example:
697 ///
698 /// RegClassA = COPY RegClassB // Copy parameter
699 /// ...
700 /// RegClassB = COPY RegClassA // UseI parameter
701 ///
702 /// which after forwarding becomes
703 ///
704 /// RegClassA = COPY RegClassB
705 /// ...
706 /// RegClassB = COPY RegClassB
707 ///
708 /// so we have reduced the number of cross-class COPYs and potentially
709 /// introduced a nop COPY that can be removed.
710
711 // Allow forwarding if src and dst belong to any common class, so long as they
712 // don't belong to any (possibly smaller) common class that requires copies to
713 // go via a different class.
714 MCRegister UseDst = getDstMCReg(*UseICopyOperands);
715 bool Found = false;
716 bool IsCrossClass = false;
717 for (const TargetRegisterClass *RC : TRI->regclasses()) {
718 if (RC->contains(CopySrc) && RC->contains(UseDst)) {
719 Found = true;
720 if (TRI->getCrossCopyRegClass(RC) != RC) {
721 IsCrossClass = true;
722 break;
723 }
724 }
725 }
726 if (!Found)
727 return false;
728 if (!IsCrossClass)
729 return true;
730 // The forwarded copy would be cross-class. Only do this if the original copy
731 // was also cross-class.
732 MCRegister CopyDst = getDstMCReg(CopyOperands);
733 for (const TargetRegisterClass *RC : TRI->regclasses()) {
734 if (RC->contains(CopySrc) && RC->contains(CopyDst) &&
735 TRI->getCrossCopyRegClass(RC) != RC)
736 return true;
737 }
738 return false;
739}
740
741/// Check that \p MI does not have implicit uses that overlap with it's \p Use
742/// operand (the register being replaced), since these can sometimes be
743/// implicitly tied to other operands. For example, on AMDGPU:
744///
745/// V_MOVRELS_B32_e32 %VGPR2, %M0<imp-use>, %EXEC<imp-use>, %VGPR2_VGPR3_VGPR4_VGPR5<imp-use>
746///
747/// the %VGPR2 is implicitly tied to the larger reg operand, but we have no
748/// way of knowing we need to update the latter when updating the former.
749bool MachineCopyPropagation::hasImplicitOverlap(const MachineInstr &MI,
750 const MachineOperand &Use) {
751 for (const MachineOperand &MIUse : MI.uses())
752 if (&MIUse != &Use && MIUse.isReg() && MIUse.isImplicit() &&
753 MIUse.isUse() && TRI->regsOverlap(Use.getReg(), MIUse.getReg()))
754 return true;
755
756 return false;
757}
758
759/// For an MI that has multiple definitions, check whether \p MI has
760/// a definition that overlaps with another of its definitions.
761/// For example, on ARM: umull r9, r9, lr, r0
762/// The umull instruction is unpredictable unless RdHi and RdLo are different.
763bool MachineCopyPropagation::hasOverlappingMultipleDef(
764 const MachineInstr &MI, const MachineOperand &MODef, MCRegister Def) {
765 for (const MachineOperand &MIDef : MI.all_defs()) {
766 if ((&MIDef != &MODef) && MIDef.isReg() &&
767 TRI->regsOverlap(Def, MIDef.getReg()))
768 return true;
769 }
770
771 return false;
772}
773
774/// Return true if it is safe to update all users of the \p CopySrc register
775/// in the given \p Copy instruction.
776bool MachineCopyPropagation::canUpdateSrcUsers(const MachineInstr &Copy,
777 const MachineOperand &CopySrc) {
778 assert(CopySrc.isReg() && "Expected a register operand");
779 for (auto *SrcUser : Tracker.getSrcUsers(CopySrc.getReg(), *TRI)) {
780 if (hasImplicitOverlap(*SrcUser, CopySrc))
781 return false;
782
783 for (MachineOperand &MO : SrcUser->uses()) {
784 if (!MO.isReg() || !MO.isUse() || MO.getReg() != CopySrc.getReg())
785 continue;
786 if (MO.isTied() || !MO.isRenamable() ||
787 !isBackwardPropagatableRegClassCopy(Copy, *SrcUser,
788 MO.getOperandNo()))
789 return false;
790 }
791 }
792 return true;
793}
794
795/// Look for available copies whose destination register is used by \p MI and
796/// replace the use in \p MI with the copy's source register.
797void MachineCopyPropagation::forwardUses(MachineInstr &MI) {
798 if (!Tracker.hasAnyCopies())
799 return;
800
801 // Look for non-tied explicit vreg uses that have an active COPY
802 // instruction that defines the physical register allocated to them.
803 // Replace the vreg with the source of the active COPY.
804 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx < OpEnd;
805 ++OpIdx) {
806 MachineOperand &MOUse = MI.getOperand(OpIdx);
807 // Don't forward into undef use operands since doing so can cause problems
808 // with the machine verifier, since it doesn't treat undef reads as reads,
809 // so we can end up with a live range that ends on an undef read, leading to
810 // an error that the live range doesn't end on a read of the live range
811 // register.
812 if (!MOUse.isReg() || MOUse.isTied() || MOUse.isUndef() || MOUse.isDef() ||
813 MOUse.isImplicit())
814 continue;
815
816 if (!MOUse.getReg())
817 continue;
818
819 // Check that the register is marked 'renamable' so we know it is safe to
820 // rename it without violating any constraints that aren't expressed in the
821 // IR (e.g. ABI or opcode requirements).
822 if (!MOUse.isRenamable())
823 continue;
824
825 MachineInstr *Copy = Tracker.findAvailCopy(MI, MOUse.getReg().asMCReg(),
826 *TRI, *TII, UseCopyInstr);
827 if (!Copy)
828 continue;
829
830 DestSourcePair CopyOperands = *isCopyInstr(*Copy, *TII, UseCopyInstr);
831 auto [CopyDst, CopySrc] = getDstSrcMCRegs(CopyOperands);
832 const MachineOperand &CopySrcOperand = *CopyOperands.Source;
833
834 MCRegister ForwardedReg = CopySrc;
835 // MI might use a sub-register of the Copy destination, in which case the
836 // forwarded register is the matching sub-register of the Copy source.
837 if (MOUse.getReg() != CopyDst) {
838 unsigned SubRegIdx = TRI->getSubRegIndex(CopyDst, MOUse.getReg());
839 assert(SubRegIdx &&
840 "MI source is not a sub-register of Copy destination");
841 ForwardedReg = TRI->getSubReg(CopySrc, SubRegIdx);
842 if (!ForwardedReg || TRI->isArtificial(ForwardedReg)) {
843 LLVM_DEBUG(dbgs() << "MCP: Copy source does not have sub-register "
844 << TRI->getSubRegIndexName(SubRegIdx) << '\n');
845 continue;
846 }
847 }
848
849 // Don't forward COPYs of reserved regs unless they are constant.
850 if (MRI->isReserved(CopySrc) && !MRI->isConstantPhysReg(CopySrc))
851 continue;
852
853 if (!isForwardableRegClassCopy(*Copy, MI, OpIdx))
854 continue;
855
856 if (hasImplicitOverlap(MI, MOUse))
857 continue;
858
859 // Check that the instruction is not a copy that partially overwrites the
860 // original copy source that we are about to use. The tracker mechanism
861 // cannot cope with that.
862 if (isCopyInstr(MI, *TII, UseCopyInstr) &&
863 MI.modifiesRegister(CopySrc, TRI) &&
864 !MI.definesRegister(CopySrc, /*TRI=*/nullptr)) {
865 LLVM_DEBUG(dbgs() << "MCP: Copy source overlap with dest in " << MI);
866 continue;
867 }
868
869 if (!DebugCounter::shouldExecute(FwdCounter)) {
870 LLVM_DEBUG(dbgs() << "MCP: Skipping forwarding due to debug counter:\n "
871 << MI);
872 continue;
873 }
874
875 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MOUse.getReg(), TRI)
876 << "\n with " << printReg(ForwardedReg, TRI)
877 << "\n in " << MI << " from " << *Copy);
878
879 MOUse.setReg(ForwardedReg);
880
881 if (!CopySrcOperand.isRenamable())
882 MOUse.setIsRenamable(false);
883 MOUse.setIsUndef(CopySrcOperand.isUndef());
884
885 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
886
887 // Clear kill markers that may have been invalidated.
888 for (MachineInstr &KMI :
889 make_range(Copy->getIterator(), std::next(MI.getIterator())))
890 KMI.clearRegisterKills(CopySrc, TRI);
891
892 ++NumCopyForwards;
893 Changed = true;
894 }
895}
896
897void MachineCopyPropagation::forwardCopyPropagateBlock(MachineBasicBlock &MBB) {
898 LLVM_DEBUG(dbgs() << "MCP: ForwardCopyPropagateBlock " << MBB.getName()
899 << "\n");
900
901 for (MachineInstr &MI : llvm::make_early_inc_range(MBB)) {
902 // Analyze copies (which don't overlap themselves).
903 std::optional<DestSourcePair> CopyOperands =
904 isCopyInstr(MI, *TII, UseCopyInstr);
905 if (CopyOperands) {
906 auto [Dst, Src] = getDstSrcMCRegs(*CopyOperands);
907 if (!TRI->regsOverlap(Dst, Src)) {
908 // The two copies cancel out and the source of the first copy
909 // hasn't been overridden, eliminate the second one. e.g.
910 // %ecx = COPY %eax
911 // ... nothing clobbered eax.
912 // %eax = COPY %ecx
913 // =>
914 // %ecx = COPY %eax
915 //
916 // or
917 //
918 // %ecx = COPY %eax
919 // ... nothing clobbered eax.
920 // %ecx = COPY %eax
921 // =>
922 // %ecx = COPY %eax
923 if (eraseIfRedundant(MI, Dst, Src) || eraseIfRedundant(MI, Src, Dst))
924 continue;
925 }
926 }
927
928 // Clobber any earlyclobber regs first.
929 for (const MachineOperand &MO : MI.operands())
930 if (MO.isReg() && MO.isEarlyClobber()) {
931 MCRegister Reg = MO.getReg().asMCReg();
932 // If we have a tied earlyclobber, that means it is also read by this
933 // instruction, so we need to make sure we don't remove it as dead
934 // later.
935 if (MO.isTied())
936 readRegister(Reg, MI, RegularUse);
937 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
938 }
939
940 forwardUses(MI);
941
942 // Attempt to canonicalize/optimize the instruction now its arguments have
943 // been mutated. This may convert MI from a non-copy to a copy instruction.
944 if (TII->simplifyInstruction(MI)) {
945 Changed = true;
946 LLVM_DEBUG(dbgs() << "MCP: After simplifyInstruction: " << MI);
947 }
948
949 CopyOperands = isCopyInstr(MI, *TII, UseCopyInstr);
950 if (CopyOperands) {
951 auto [Dst, Src] = getDstSrcMCRegs(*CopyOperands);
952 if (!TRI->regsOverlap(Dst, Src)) {
953 // FIXME: Document why this does not consider `RegSrc`, similar to how
954 // `backwardCopyPropagateBlock` does.
955 if (!isNeverRedundant(MI) && !isNeverRedundant(Dst))
956 MaybeDeadCopies.insert(&MI);
957 }
958 }
959
961 const MachineOperand *RegMask = nullptr;
962 for (const MachineOperand &MO : MI.operands()) {
963 if (MO.isRegMask())
964 RegMask = &MO;
965 if (!MO.isReg())
966 continue;
967 Register Reg = MO.getReg();
968 if (!Reg)
969 continue;
970
972 "MachineCopyPropagation should be run after register allocation!");
973
974 if (MO.isDef() && !MO.isEarlyClobber()) {
975 // Skip invalidating constant registers.
976 if (!MRI->isConstantPhysReg(Reg)) {
977 Defs.push_back(Reg.asMCReg());
978 continue;
979 }
980 } else if (MO.readsReg()) {
981 readRegister(Reg.asMCReg(), MI, MO.isDebug() ? DebugUse : RegularUse);
982 }
983 }
984
985 // The instruction has a register mask operand which means that it clobbers
986 // a large set of registers. Treat clobbered registers the same way as
987 // defined registers.
988 if (RegMask) {
989 BitVector &PreservedRegUnits =
990 Tracker.getPreservedRegUnits(*RegMask, *TRI);
991
992 // Erase any MaybeDeadCopies whose destination register is clobbered.
993 for (SmallSetVector<MachineInstr *, 8>::iterator DI =
994 MaybeDeadCopies.begin();
995 DI != MaybeDeadCopies.end();) {
996 MachineInstr *MaybeDead = *DI;
997 std::optional<DestSourcePair> CopyOperands =
998 isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
999 MCRegister Reg = CopyOperands->Destination->getReg().asMCReg();
1000 assert(!isNeverRedundant(*MaybeDead) && !isNeverRedundant(Reg));
1001
1002 if (!RegMask->clobbersPhysReg(Reg)) {
1003 ++DI;
1004 continue;
1005 }
1006
1007 // Invalidate all entries in the copy map which are not preserved by
1008 // this register mask.
1009 bool MIRefedinCopyInfo = false;
1010 for (MCRegUnit RegUnit : TRI->regunits(Reg)) {
1011 if (!PreservedRegUnits.test(static_cast<unsigned>(RegUnit)))
1012 Tracker.clobberRegUnit(RegUnit, *TRI, *TII, UseCopyInstr);
1013 else {
1014 if (MaybeDead == Tracker.findCopyForUnit(RegUnit, *TRI)) {
1015 MIRefedinCopyInfo = true;
1016 }
1017 }
1018 }
1019
1020 // erase() will return the next valid iterator pointing to the next
1021 // element after the erased one.
1022 DI = MaybeDeadCopies.erase(DI);
1023
1024 // Preserved by RegMask, DO NOT remove copy
1025 if (MIRefedinCopyInfo)
1026 continue;
1027
1028 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to regmask clobbering: "
1029 << *MaybeDead);
1030
1031 MaybeDead->eraseFromParent();
1032 Changed = true;
1033 ++NumDeletes;
1034 }
1035 }
1036
1037 // Any previous copy definition or reading the Defs is no longer available.
1038 for (MCRegister Reg : Defs)
1039 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
1040
1041 if (CopyOperands) {
1042 auto [Dst, Src] = getDstSrcMCRegs(*CopyOperands);
1043 if (!TRI->regsOverlap(Dst, Src)) {
1044 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1045 }
1046 }
1047 }
1048
1049 bool TracksLiveness = MRI->tracksLiveness();
1050
1051 // If liveness is tracked, we can use the live-in lists to know which
1052 // copies aren't dead.
1053 if (TracksLiveness)
1054 readSuccessorLiveIns(MBB);
1055
1056 // If MBB doesn't have succesor, delete copies whose defs are not used.
1057 // If MBB does have successors, we can only delete copies if we are able to
1058 // use liveness information from successors to confirm they are really dead.
1059 if (MBB.succ_empty() || TracksLiveness) {
1060 for (MachineInstr *MaybeDead : MaybeDeadCopies) {
1061 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to no live-out succ: ";
1062 MaybeDead->dump());
1063
1064 DestSourcePair CopyOperands =
1065 *isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
1066
1067 auto [Dst, Src] = getDstSrcMCRegs(CopyOperands);
1068 assert(!isNeverRedundant(*MaybeDead) && !isNeverRedundant(Dst));
1069
1070 // Update matching debug values, if any.
1071 const auto &DbgUsers = CopyDbgUsers[MaybeDead];
1072 SmallVector<MachineInstr *> MaybeDeadDbgUsers(DbgUsers.begin(),
1073 DbgUsers.end());
1074 MRI->updateDbgUsersToReg(Dst, Src, MaybeDeadDbgUsers);
1075
1076 MaybeDead->eraseFromParent();
1077 Changed = true;
1078 ++NumDeletes;
1079 }
1080 }
1081
1082 MaybeDeadCopies.clear();
1083 CopyDbgUsers.clear();
1084 Tracker.clear();
1085}
1086
1087void MachineCopyPropagation::propagateDefs(MachineInstr &MI) {
1088 if (!Tracker.hasAnyCopies())
1089 return;
1090
1091 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx != OpEnd;
1092 ++OpIdx) {
1093 MachineOperand &MODef = MI.getOperand(OpIdx);
1094
1095 if (!MODef.isReg() || MODef.isUse())
1096 continue;
1097
1098 // Ignore non-trivial cases.
1099 if (MODef.isTied() || MODef.isUndef() || MODef.isImplicit())
1100 continue;
1101
1102 if (!MODef.getReg())
1103 continue;
1104
1105 // We only handle if the register comes from a vreg.
1106 if (!MODef.isRenamable())
1107 continue;
1108
1109 MachineInstr *Copy = Tracker.findAvailBackwardCopy(
1110 MI, MODef.getReg().asMCReg(), *TRI, *TII, UseCopyInstr);
1111 if (!Copy)
1112 continue;
1113
1114 DestSourcePair CopyOperands = *isCopyInstr(*Copy, *TII, UseCopyInstr);
1115 auto [Dst, Src] = getDstSrcMCRegs(CopyOperands);
1116
1117 if (MODef.getReg() != Src)
1118 continue;
1119
1120 if (!isBackwardPropagatableRegClassCopy(*Copy, MI, OpIdx))
1121 continue;
1122
1123 if (hasImplicitOverlap(MI, MODef))
1124 continue;
1125
1126 if (hasOverlappingMultipleDef(MI, MODef, Dst))
1127 continue;
1128
1129 if (!canUpdateSrcUsers(*Copy, *CopyOperands.Source))
1130 continue;
1131
1132 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MODef.getReg(), TRI)
1133 << "\n with " << printReg(Dst, TRI) << "\n in "
1134 << MI << " from " << *Copy);
1135
1136 MODef.setReg(Dst);
1137 MODef.setIsRenamable(CopyOperands.Destination->isRenamable());
1138
1139 for (auto *SrcUser : Tracker.getSrcUsers(Src, *TRI)) {
1140 for (MachineOperand &MO : SrcUser->uses()) {
1141 if (!MO.isReg() || !MO.isUse() || MO.getReg() != Src)
1142 continue;
1143 MO.setReg(Dst);
1144 MO.setIsRenamable(CopyOperands.Destination->isRenamable());
1145 }
1146 }
1147
1148 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
1149 MaybeDeadCopies.insert(Copy);
1150 Changed = true;
1151 ++NumCopyBackwardPropagated;
1152 }
1153}
1154
1155void MachineCopyPropagation::backwardCopyPropagateBlock(
1156 MachineBasicBlock &MBB) {
1157 LLVM_DEBUG(dbgs() << "MCP: BackwardCopyPropagateBlock " << MBB.getName()
1158 << "\n");
1159
1160 for (MachineInstr &MI : llvm::make_early_inc_range(llvm::reverse(MBB))) {
1161 // Ignore non-trivial COPYs.
1162 std::optional<DestSourcePair> CopyOperands =
1163 isCopyInstr(MI, *TII, UseCopyInstr);
1164 if (CopyOperands && MI.getNumImplicitOperands() == 0) {
1165 auto [Dst, Src] = getDstSrcMCRegs(*CopyOperands);
1166
1167 if (!TRI->regsOverlap(Dst, Src)) {
1168 // Unlike forward cp, we don't invoke propagateDefs here,
1169 // just let forward cp do COPY-to-COPY propagation.
1170 if (isBackwardPropagatableCopy(MI, *CopyOperands)) {
1171 Tracker.invalidateRegister(Src, *TRI, *TII, UseCopyInstr);
1172 Tracker.invalidateRegister(Dst, *TRI, *TII, UseCopyInstr);
1173 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1174 continue;
1175 }
1176 }
1177 }
1178
1179 // Invalidate any earlyclobber regs first.
1180 for (const MachineOperand &MO : MI.operands())
1181 if (MO.isReg() && MO.isEarlyClobber()) {
1182 MCRegister Reg = MO.getReg().asMCReg();
1183 if (!Reg)
1184 continue;
1185 Tracker.invalidateRegister(Reg, *TRI, *TII, UseCopyInstr);
1186 }
1187
1188 propagateDefs(MI);
1189 for (const MachineOperand &MO : MI.operands()) {
1190 if (!MO.isReg())
1191 continue;
1192
1193 if (!MO.getReg())
1194 continue;
1195
1196 if (MO.isDef())
1197 Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1198 UseCopyInstr);
1199
1200 if (MO.readsReg()) {
1201 if (MO.isDebug()) {
1202 // Check if the register in the debug instruction is utilized
1203 // in a copy instruction, so we can update the debug info if the
1204 // register is changed.
1205 for (MCRegUnit Unit : TRI->regunits(MO.getReg().asMCReg())) {
1206 if (auto *Copy = Tracker.findCopyDefViaUnit(Unit, *TRI)) {
1207 CopyDbgUsers[Copy].insert(&MI);
1208 }
1209 }
1210 } else if (!Tracker.trackSrcUsers(MO.getReg().asMCReg(), MI, *TRI, *TII,
1211 UseCopyInstr)) {
1212 // If we can't track the source users, invalidate the register.
1213 Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1214 UseCopyInstr);
1215 }
1216 }
1217 }
1218 }
1219
1220 for (auto *Copy : MaybeDeadCopies) {
1221 DestSourcePair CopyOperands = *isCopyInstr(*Copy, *TII, UseCopyInstr);
1222 auto [Dst, Src] = getDstSrcMCRegs(CopyOperands);
1223 const auto &DbgUsers = CopyDbgUsers[Copy];
1224 SmallVector<MachineInstr *> MaybeDeadDbgUsers(DbgUsers.begin(),
1225 DbgUsers.end());
1226
1227 MRI->updateDbgUsersToReg(Src, Dst, MaybeDeadDbgUsers);
1228 Copy->eraseFromParent();
1229 ++NumDeletes;
1230 }
1231
1232 MaybeDeadCopies.clear();
1233 CopyDbgUsers.clear();
1234 Tracker.clear();
1235}
1236
1237[[maybe_unused]] static void printSpillReloadChain(
1240 MachineInstr *Leader) {
1241 auto &SC = SpillChain[Leader];
1242 auto &RC = ReloadChain[Leader];
1243 for (auto I = SC.rbegin(), E = SC.rend(); I != E; ++I)
1244 (*I)->dump();
1245 for (MachineInstr *MI : RC)
1246 MI->dump();
1247}
1248
1249// Remove spill-reload like copy chains. For example
1250// r0 = COPY r1
1251// r1 = COPY r2
1252// r2 = COPY r3
1253// r3 = COPY r4
1254// <def-use r4>
1255// r4 = COPY r3
1256// r3 = COPY r2
1257// r2 = COPY r1
1258// r1 = COPY r0
1259// will be folded into
1260// r0 = COPY r1
1261// r1 = COPY r4
1262// <def-use r4>
1263// r4 = COPY r1
1264// r1 = COPY r0
1265// TODO: Currently we don't track usage of r0 outside the chain, so we
1266// conservatively keep its value as it was before the rewrite.
1267//
1268// The algorithm is trying to keep
1269// property#1: No Dst of spill COPY in the chain is used or defined until the
1270// paired reload COPY in the chain uses the Dst.
1271//
1272// property#2: NO Source of COPY in the chain is used or defined until the next
1273// COPY in the chain defines the Source, except the innermost spill-reload
1274// pair.
1275//
1276// The algorithm is conducted by checking every COPY inside the MBB, assuming
1277// the COPY is a reload COPY, then try to find paired spill COPY by searching
1278// the COPY defines the Src of the reload COPY backward. If such pair is found,
1279// it either belongs to an existing chain or a new chain depends on
1280// last available COPY uses the Dst of the reload COPY.
1281// Implementation notes, we use CopyTracker::findLastDefCopy(Reg, ...) to find
1282// out last COPY that defines Reg; we use CopyTracker::findLastUseCopy(Reg, ...)
1283// to find out last COPY that uses Reg. When we are encountered with a Non-COPY
1284// instruction, we check registers in the operands of this instruction. If this
1285// Reg is defined by a COPY, we untrack this Reg via
1286// CopyTracker::clobberRegister(Reg, ...).
1287void MachineCopyPropagation::eliminateSpillageCopies(MachineBasicBlock &MBB) {
1288 // ChainLeader maps MI inside a spill-reload chain to its innermost reload COPY.
1289 // Thus we can track if a MI belongs to an existing spill-reload chain.
1290 DenseMap<MachineInstr *, MachineInstr *> ChainLeader;
1291 // SpillChain maps innermost reload COPY of a spill-reload chain to a sequence
1292 // of COPYs that forms spills of a spill-reload chain.
1293 // ReloadChain maps innermost reload COPY of a spill-reload chain to a
1294 // sequence of COPYs that forms reloads of a spill-reload chain.
1295 DenseMap<MachineInstr *, SmallVector<MachineInstr *>> SpillChain, ReloadChain;
1296 // If a COPY's Source has use or def until next COPY defines the Source,
1297 // we put the COPY in this set to keep property#2.
1298 DenseSet<const MachineInstr *> CopySourceInvalid;
1299
1300 auto TryFoldSpillageCopies =
1301 [&, this](const SmallVectorImpl<MachineInstr *> &SC,
1302 const SmallVectorImpl<MachineInstr *> &RC) {
1303 assert(SC.size() == RC.size() && "Spill-reload should be paired");
1304
1305 // We need at least 3 pairs of copies for the transformation to apply,
1306 // because the first outermost pair cannot be removed since we don't
1307 // recolor outside of the chain and that we need at least one temporary
1308 // spill slot to shorten the chain. If we only have a chain of two
1309 // pairs, we already have the shortest sequence this code can handle:
1310 // the outermost pair for the temporary spill slot, and the pair that
1311 // use that temporary spill slot for the other end of the chain.
1312 // TODO: We might be able to simplify to one spill-reload pair if collecting
1313 // more infomation about the outermost COPY.
1314 if (SC.size() <= 2)
1315 return;
1316
1317 // If violate property#2, we don't fold the chain.
1318 for (const MachineInstr *Spill : drop_begin(SC))
1319 if (CopySourceInvalid.count(Spill))
1320 return;
1321
1322 for (const MachineInstr *Reload : drop_end(RC))
1323 if (CopySourceInvalid.count(Reload))
1324 return;
1325
1326 auto CheckCopyConstraint = [this](Register Dst, Register Src) {
1327 for (const TargetRegisterClass *RC : TRI->regclasses()) {
1328 if (RC->contains(Dst) && RC->contains(Src))
1329 return true;
1330 }
1331 return false;
1332 };
1333
1334 auto UpdateReg = [](MachineInstr *MI, const MachineOperand *Old,
1335 const MachineOperand *New) {
1336 for (MachineOperand &MO : MI->operands()) {
1337 if (&MO == Old)
1338 MO.setReg(New->getReg());
1339 }
1340 };
1341
1342 DestSourcePair InnerMostSpillCopy =
1343 *isCopyInstr(*SC[0], *TII, UseCopyInstr);
1344 DestSourcePair OuterMostSpillCopy =
1345 *isCopyInstr(*SC.back(), *TII, UseCopyInstr);
1346 DestSourcePair InnerMostReloadCopy =
1347 *isCopyInstr(*RC[0], *TII, UseCopyInstr);
1348 DestSourcePair OuterMostReloadCopy =
1349 *isCopyInstr(*RC.back(), *TII, UseCopyInstr);
1350 if (!CheckCopyConstraint(getSrcMCReg(OuterMostSpillCopy),
1351 getSrcMCReg(InnerMostSpillCopy)) ||
1352 !CheckCopyConstraint(getDstMCReg(InnerMostReloadCopy),
1353 getDstMCReg(OuterMostReloadCopy)))
1354 return;
1355
1356 SpillageChainsLength += SC.size() + RC.size();
1357 NumSpillageChains += 1;
1358 UpdateReg(SC[0], InnerMostSpillCopy.Destination,
1359 OuterMostSpillCopy.Source);
1360 UpdateReg(RC[0], InnerMostReloadCopy.Source,
1361 OuterMostReloadCopy.Destination);
1362
1363 for (size_t I = 1; I < SC.size() - 1; ++I) {
1364 SC[I]->eraseFromParent();
1365 RC[I]->eraseFromParent();
1366 NumDeletes += 2;
1367 }
1368 };
1369
1370 auto IsFoldableCopy = [this](const MachineInstr &MaybeCopy) {
1371 if (MaybeCopy.getNumImplicitOperands() > 0)
1372 return false;
1373 std::optional<DestSourcePair> CopyOperands =
1374 isCopyInstr(MaybeCopy, *TII, UseCopyInstr);
1375 if (!CopyOperands)
1376 return false;
1377 auto [Dst, Src] = getDstSrcMCRegs(*CopyOperands);
1378 return Src && Dst && !TRI->regsOverlap(Src, Dst) &&
1379 CopyOperands->Source->isRenamable() &&
1380 CopyOperands->Destination->isRenamable();
1381 };
1382
1383 auto IsSpillReloadPair = [&, this](const MachineInstr &Spill,
1384 const MachineInstr &Reload) {
1385 if (!IsFoldableCopy(Spill) || !IsFoldableCopy(Reload))
1386 return false;
1387 std::optional<DestSourcePair> SpillCopy =
1388 isCopyInstr(Spill, *TII, UseCopyInstr);
1389 std::optional<DestSourcePair> ReloadCopy =
1390 isCopyInstr(Reload, *TII, UseCopyInstr);
1391 if (!SpillCopy || !ReloadCopy)
1392 return false;
1393 return getSrcMCReg(*SpillCopy) == getDstMCReg(*ReloadCopy) &&
1394 getDstMCReg(*SpillCopy) == getSrcMCReg(*ReloadCopy);
1395 };
1396
1397 auto IsChainedCopy = [&, this](const MachineInstr &Prev,
1398 const MachineInstr &Current) {
1399 if (!IsFoldableCopy(Prev) || !IsFoldableCopy(Current))
1400 return false;
1401 std::optional<DestSourcePair> PrevCopy =
1402 isCopyInstr(Prev, *TII, UseCopyInstr);
1403 std::optional<DestSourcePair> CurrentCopy =
1404 isCopyInstr(Current, *TII, UseCopyInstr);
1405 if (!PrevCopy || !CurrentCopy)
1406 return false;
1407 return getSrcMCReg(*PrevCopy) == getDstMCReg(*CurrentCopy);
1408 };
1409
1410 for (MachineInstr &MI : llvm::make_early_inc_range(MBB)) {
1411 std::optional<DestSourcePair> CopyOperands =
1412 isCopyInstr(MI, *TII, UseCopyInstr);
1413
1414 // Update track information via non-copy instruction.
1415 SmallSet<Register, 8> RegsToClobber;
1416 if (!CopyOperands) {
1417 for (const MachineOperand &MO : MI.operands()) {
1418 if (!MO.isReg())
1419 continue;
1420 Register Reg = MO.getReg();
1421 if (!Reg)
1422 continue;
1423 MachineInstr *LastUseCopy =
1424 Tracker.findLastSeenUseInCopy(Reg.asMCReg(), *TRI);
1425 if (LastUseCopy) {
1426 LLVM_DEBUG(dbgs() << "MCP: Copy source of\n");
1427 LLVM_DEBUG(LastUseCopy->dump());
1428 LLVM_DEBUG(dbgs() << "might be invalidated by\n");
1429 LLVM_DEBUG(MI.dump());
1430 CopySourceInvalid.insert(LastUseCopy);
1431 }
1432 // Must be noted Tracker.clobberRegister(Reg, ...) removes tracking of
1433 // Reg, i.e, COPY that defines Reg is removed from the mapping as well
1434 // as marking COPYs that uses Reg unavailable.
1435 // We don't invoke CopyTracker::clobberRegister(Reg, ...) if Reg is not
1436 // defined by a previous COPY, since we don't want to make COPYs uses
1437 // Reg unavailable.
1438 if (Tracker.findLastSeenDefInCopy(MI, Reg.asMCReg(), *TRI, *TII,
1439 UseCopyInstr))
1440 // Thus we can keep the property#1.
1441 RegsToClobber.insert(Reg);
1442 }
1443 for (Register Reg : RegsToClobber) {
1444 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
1445 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Reg, TRI)
1446 << "\n");
1447 }
1448 continue;
1449 }
1450
1451 auto [Dst, Src] = getDstSrcMCRegs(*CopyOperands);
1452 // Check if we can find a pair spill-reload copy.
1453 LLVM_DEBUG(dbgs() << "MCP: Searching paired spill for reload: ");
1454 LLVM_DEBUG(MI.dump());
1455 MachineInstr *MaybeSpill =
1456 Tracker.findLastSeenDefInCopy(MI, Src, *TRI, *TII, UseCopyInstr);
1457 bool MaybeSpillIsChained = ChainLeader.count(MaybeSpill);
1458 if (!MaybeSpillIsChained && MaybeSpill &&
1459 IsSpillReloadPair(*MaybeSpill, MI)) {
1460 // Check if we already have an existing chain. Now we have a
1461 // spill-reload pair.
1462 // L2: r2 = COPY r3
1463 // L5: r3 = COPY r2
1464 // Looking for a valid COPY before L5 which uses r3.
1465 // This can be serverial cases.
1466 // Case #1:
1467 // No COPY is found, which can be r3 is def-use between (L2, L5), we
1468 // create a new chain for L2 and L5.
1469 // Case #2:
1470 // L2: r2 = COPY r3
1471 // L5: r3 = COPY r2
1472 // Such COPY is found and is L2, we create a new chain for L2 and L5.
1473 // Case #3:
1474 // L2: r2 = COPY r3
1475 // L3: r1 = COPY r3
1476 // L5: r3 = COPY r2
1477 // we create a new chain for L2 and L5.
1478 // Case #4:
1479 // L2: r2 = COPY r3
1480 // L3: r1 = COPY r3
1481 // L4: r3 = COPY r1
1482 // L5: r3 = COPY r2
1483 // Such COPY won't be found since L4 defines r3. we create a new chain
1484 // for L2 and L5.
1485 // Case #5:
1486 // L2: r2 = COPY r3
1487 // L3: r3 = COPY r1
1488 // L4: r1 = COPY r3
1489 // L5: r3 = COPY r2
1490 // COPY is found and is L4 which belongs to an existing chain, we add
1491 // L2 and L5 to this chain.
1492 LLVM_DEBUG(dbgs() << "MCP: Found spill: ");
1493 LLVM_DEBUG(MaybeSpill->dump());
1494 MachineInstr *MaybePrevReload = Tracker.findLastSeenUseInCopy(Dst, *TRI);
1495 auto Leader = ChainLeader.find(MaybePrevReload);
1496 MachineInstr *L = nullptr;
1497 if (Leader == ChainLeader.end() ||
1498 (MaybePrevReload && !IsChainedCopy(*MaybePrevReload, MI))) {
1499 L = &MI;
1500 assert(!SpillChain.count(L) &&
1501 "SpillChain should not have contained newly found chain");
1502 } else {
1503 assert(MaybePrevReload &&
1504 "Found a valid leader through nullptr should not happend");
1505 L = Leader->second;
1506 assert(SpillChain[L].size() > 0 &&
1507 "Existing chain's length should be larger than zero");
1508 }
1509 assert(!ChainLeader.count(&MI) && !ChainLeader.count(MaybeSpill) &&
1510 "Newly found paired spill-reload should not belong to any chain "
1511 "at this point");
1512 ChainLeader.insert({MaybeSpill, L});
1513 ChainLeader.insert({&MI, L});
1514 SpillChain[L].push_back(MaybeSpill);
1515 ReloadChain[L].push_back(&MI);
1516 LLVM_DEBUG(dbgs() << "MCP: Chain " << L << " now is:\n");
1517 LLVM_DEBUG(printSpillReloadChain(SpillChain, ReloadChain, L));
1518 } else if (MaybeSpill && !MaybeSpillIsChained) {
1519 // MaybeSpill is unable to pair with MI. That's to say adding MI makes
1520 // the chain invalid.
1521 // The COPY defines Src is no longer considered as a candidate of a
1522 // valid chain. Since we expect the Dst of a spill copy isn't used by
1523 // any COPY instruction until a reload copy. For example:
1524 // L1: r1 = COPY r2
1525 // L2: r3 = COPY r1
1526 // If we later have
1527 // L1: r1 = COPY r2
1528 // L2: r3 = COPY r1
1529 // L3: r2 = COPY r1
1530 // L1 and L3 can't be a valid spill-reload pair.
1531 // Thus we keep the property#1.
1532 LLVM_DEBUG(dbgs() << "MCP: Not paired spill-reload:\n");
1533 LLVM_DEBUG(MaybeSpill->dump());
1534 LLVM_DEBUG(MI.dump());
1535 Tracker.clobberRegister(Src, *TRI, *TII, UseCopyInstr);
1536 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Src, TRI)
1537 << "\n");
1538 }
1539 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1540 }
1541
1542 for (auto I = SpillChain.begin(), E = SpillChain.end(); I != E; ++I) {
1543 auto &SC = I->second;
1544 assert(ReloadChain.count(I->first) &&
1545 "Reload chain of the same leader should exist");
1546 auto &RC = ReloadChain[I->first];
1547 TryFoldSpillageCopies(SC, RC);
1548 }
1549
1550 MaybeDeadCopies.clear();
1551 CopyDbgUsers.clear();
1552 Tracker.clear();
1553}
1554
1555bool MachineCopyPropagationLegacy::runOnMachineFunction(MachineFunction &MF) {
1556 if (skipFunction(MF.getFunction()))
1557 return false;
1558
1559 return MachineCopyPropagation(UseCopyInstr).run(MF);
1560}
1561
1562PreservedAnalyses
1565 MFPropsModifier _(*this, MF);
1566 if (!MachineCopyPropagation(UseCopyInstr).run(MF))
1567 return PreservedAnalyses::all();
1569 PA.preserveSet<CFGAnalyses>();
1570 return PA;
1571}
1572
1573bool MachineCopyPropagation::run(MachineFunction &MF) {
1574 bool IsSpillageCopyElimEnabled = false;
1576 case cl::BOU_UNSET:
1577 IsSpillageCopyElimEnabled =
1579 break;
1580 case cl::BOU_TRUE:
1581 IsSpillageCopyElimEnabled = true;
1582 break;
1583 case cl::BOU_FALSE:
1584 IsSpillageCopyElimEnabled = false;
1585 break;
1586 }
1587
1588 Changed = false;
1589
1591 TII = MF.getSubtarget().getInstrInfo();
1592 MRI = &MF.getRegInfo();
1593
1594 for (MachineBasicBlock &MBB : MF) {
1595 if (IsSpillageCopyElimEnabled)
1596 eliminateSpillageCopies(MBB);
1597 backwardCopyPropagateBlock(MBB);
1598 forwardCopyPropagateBlock(MBB);
1599 }
1600
1601 return Changed;
1602}
1603
1604MachineFunctionPass *
1605llvm::createMachineCopyPropagationPass(bool UseCopyInstr = false) {
1606 return new MachineCopyPropagationLegacy(UseCopyInstr);
1607}
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock & MBB
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This file provides an implementation of debug counters.
#define DEBUG_COUNTER(VARNAME, COUNTERNAME, DESC)
This file defines the DenseMap class.
#define DEBUG_TYPE
const HexagonInstrInfo * TII
#define _
IRTranslator LLVM IR MI
#define I(x, y, z)
Definition MD5.cpp:57
static cl::opt< cl::boolOrDefault > EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden)
static void printSpillReloadChain(DenseMap< MachineInstr *, SmallVector< MachineInstr * > > &SpillChain, DenseMap< MachineInstr *, SmallVector< MachineInstr * > > &ReloadChain, MachineInstr *Leader)
static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src, MCRegister Dst, const TargetRegisterInfo *TRI, const TargetInstrInfo *TII, bool UseCopyInstr)
Return true if PreviousCopy did copy register Src to register Dst.
static cl::opt< bool > MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false), cl::Hidden)
Register Reg
Register const TargetRegisterInfo * TRI
Promote Memory to Register
Definition Mem2Reg.cpp:110
MachineInstr unsigned OpIdx
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
Definition PassSupport.h:56
This file contains some templates that are useful if you are working with the STL at all.
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Definition Statistic.h:171
#define LLVM_DEBUG(...)
Definition Debug.h:114
LLVM_ABI void setPreservesCFG()
This function should be called by the pass, iff they do not:
Definition Pass.cpp:270
bool test(unsigned Idx) const
Definition BitVector.h:480
void resize(unsigned N, bool t=false)
resize - Grow or shrink the bitvector.
Definition BitVector.h:360
BitVector & set()
Definition BitVector.h:370
Represents analyses that only rely on functions' control flow.
Definition Analysis.h:73
static bool shouldExecute(CounterInfo &Counter)
iterator find(const_arg_type_t< KeyT > Val)
Definition DenseMap.h:178
iterator begin()
Definition DenseMap.h:78
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
Definition DenseMap.h:174
iterator end()
Definition DenseMap.h:81
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition DenseMap.h:241
Wrapper class representing physical registers. Should be passed by value.
Definition MCRegister.h:41
An RAII based helper class to modify MachineFunctionProperties when running pass.
iterator_range< succ_iterator > successors()
LLVM_ABI StringRef getName() const
Return the name of the corresponding LLVM basic block, or an empty string.
PreservedAnalyses run(MachineFunction &MF, MachineFunctionAnalysisManager &MFAM)
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
Representation of each machine instruction.
LLVM_ABI void dump() const
const MachineOperand & getOperand(unsigned i) const
LLVM_ABI MachineInstrBundleIterator< MachineInstr > eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
LLVM_ABI const TargetRegisterClass * getRegClassConstraint(unsigned OpIdx, const TargetInstrInfo *TII, const TargetRegisterInfo *TRI) const
Compute the static register class constraint for operand OpIdx.
MachineOperand class - Representation of each machine instruction operand.
LLVM_ABI unsigned getOperandNo() const
Returns the index of this operand in the instruction that it belongs to.
LLVM_ABI void setIsRenamable(bool Val=true)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
LLVM_ABI void setReg(Register Reg)
Change the register this operand corresponds to.
LLVM_ABI bool isRenamable() const
isRenamable - Returns true if this register may be renamed, i.e.
void setIsUndef(bool Val=true)
Register getReg() const
getReg - Returns the register number.
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
bool tracksLiveness() const
tracksLiveness - Returns true when tracking register liveness accurately.
bool isReserved(MCRegister PhysReg) const
isReserved - Returns true when PhysReg is a reserved register.
LLVM_ABI bool isConstantPhysReg(MCRegister PhysReg) const
Returns true if PhysReg is unallocatable and constant throughout the function.
void updateDbgUsersToReg(MCRegister OldReg, MCRegister NewReg, ArrayRef< MachineInstr * > Users) const
updateDbgUsersToReg - Update a collection of debug instructions to refer to the designated register.
void dump() const
Definition Pass.cpp:146
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition Analysis.h:118
Wrapper class representing virtual and physical registers.
Definition Register.h:20
MCRegister asMCReg() const
Utility to check-convert this value to a MCRegister.
Definition Register.h:107
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
Definition Register.h:83
void insert_range(Range &&R)
Definition SmallSet.h:196
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
Definition SmallSet.h:184
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
TargetInstrInfo - Interface to description of machine instruction set.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
virtual bool enableSpillageCopyElimination() const
Enable spillage copy elimination in MachineCopyPropagation pass.
virtual const TargetInstrInfo * getInstrInfo() const
virtual const TargetRegisterInfo * getRegisterInfo() const =0
Return the target's register information.
std::pair< iterator, bool > insert(const ValueT &V)
Definition DenseSet.h:202
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
Definition DenseSet.h:180
reverse_self_iterator getReverseIterator()
Definition ilist_node.h:126
self_iterator getIterator()
Definition ilist_node.h:123
Changed
This provides a very simple, boring adaptor for a begin and end iterator into a range type.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
initializer< Ty > init(const Ty &Val)
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
Value * readRegister(IRBuilder<> &IRB, StringRef Name)
NodeAddr< UseNode * > Use
Definition RDFGraph.h:385
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
Definition STLExtras.h:316
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
Definition STLExtras.h:1669
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
Definition STLExtras.h:634
AnalysisManager< MachineFunction > MachineFunctionAnalysisManager
LLVM_ABI PreservedAnalyses getMachineFunctionPassPreservedAnalyses()
Returns the minimum set of Analyses that all machine function passes must preserve.
auto reverse(ContainerTy &&C)
Definition STLExtras.h:408
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition Debug.cpp:207
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
auto drop_end(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the last N elements excluded.
Definition STLExtras.h:323
ArrayRef(const T &OneElt) -> ArrayRef< T >
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
Definition STLExtras.h:1947
LLVM_ABI MachineFunctionPass * createMachineCopyPropagationPass(bool UseCopyInstr)
LLVM_ABI Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.
LLVM_ABI char & MachineCopyPropagationID
MachineCopyPropagation - This pass performs copy propagation on machine instructions.
const MachineOperand * Source
const MachineOperand * Destination