LLVM 23.0.0git
MachineCopyPropagation.cpp
Go to the documentation of this file.
1//===- MachineCopyPropagation.cpp - Machine Copy Propagation Pass ---------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This is an extremely simple MachineInstr-level copy propagation pass.
10//
11// This pass forwards the source of COPYs to the users of their destinations
12// when doing so is legal. For example:
13//
14// %reg1 = COPY %reg0
15// ...
16// ... = OP %reg1
17//
18// If
19// - %reg0 has not been clobbered by the time of the use of %reg1
20// - the register class constraints are satisfied
21// - the COPY def is the only value that reaches OP
22// then this pass replaces the above with:
23//
24// %reg1 = COPY %reg0
25// ...
26// ... = OP %reg0
27//
28// This pass also removes some redundant COPYs. For example:
29//
30// %R1 = COPY %R0
31// ... // No clobber of %R1
32// %R0 = COPY %R1 <<< Removed
33//
34// or
35//
36// %R1 = COPY %R0
37// ... // No clobber of %R0
38// %R1 = COPY %R0 <<< Removed
39//
40// or
41//
42// $R0 = OP ...
43// ... // No read/clobber of $R0 and $R1
44// $R1 = COPY $R0 // $R0 is killed
45// Replace $R0 with $R1 and remove the COPY
46// $R1 = OP ...
47// ...
48//
49//===----------------------------------------------------------------------===//
50
52#include "llvm/ADT/DenseMap.h"
53#include "llvm/ADT/STLExtras.h"
54#include "llvm/ADT/SetVector.h"
55#include "llvm/ADT/SmallSet.h"
57#include "llvm/ADT/Statistic.h"
69#include "llvm/MC/MCRegister.h"
71#include "llvm/Pass.h"
72#include "llvm/Support/Debug.h"
75#include <cassert>
76#include <iterator>
77
78using namespace llvm;
79
80#define DEBUG_TYPE "machine-cp"
81
82STATISTIC(NumDeletes, "Number of dead copies deleted");
83STATISTIC(NumCopyForwards, "Number of copy uses forwarded");
84STATISTIC(NumCopyBackwardPropagated, "Number of copy defs backward propagated");
85STATISTIC(SpillageChainsLength, "Length of spillage chains");
86STATISTIC(NumSpillageChains, "Number of spillage chains");
87DEBUG_COUNTER(FwdCounter, "machine-cp-fwd",
88 "Controls which register COPYs are forwarded");
89
90static cl::opt<bool> MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false),
93 EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden);
94
95namespace {
96
97MCRegister asPhysMCReg(const MachineOperand *Operand) {
98 Register Reg = Operand->getReg();
99 assert(Reg.isPhysical() &&
100 "MachineCopyPropagation should be run after register allocation!");
101 return Reg;
102}
103
104MCRegister getDstMCReg(const DestSourcePair &DSP) {
105 return asPhysMCReg(DSP.Destination);
106}
107MCRegister getSrcMCReg(const DestSourcePair &DSP) {
108 return asPhysMCReg(DSP.Source);
109}
110std::pair<MCRegister, MCRegister> getDstSrcMCRegs(const DestSourcePair &DSP) {
111 return {getDstMCReg(DSP), getSrcMCReg(DSP)};
112}
113
114std::optional<DestSourcePair> isCopyInstr(const MachineInstr &MI,
115 const TargetInstrInfo &TII,
116 bool UseCopyInstr) {
117 if (UseCopyInstr)
118 return TII.isCopyInstr(MI);
119
120 if (MI.isCopy())
121 return DestSourcePair{MI.getOperand(0), MI.getOperand(1)};
122
123 return std::nullopt;
124}
125
126class CopyTracker {
127 struct CopyInfo {
128 MachineInstr *MI = nullptr;
129 MachineInstr *LastSeenUseInCopy = nullptr;
130 SmallPtrSet<MachineInstr *, 4> SrcUsers;
132 bool Avail = false;
133 };
134
135 DenseMap<MCRegUnit, CopyInfo> Copies;
136
137 // Memoised sets of register units which are preserved by each register mask,
138 // needed to efficiently remove copies which are invalidated by call
139 // instructions.
140 DenseMap<const uint32_t *, BitVector> RegMaskToPreservedRegUnits;
141
142public:
143 /// Get the set of register units which are preserved by RegMaskOp.
144 BitVector &getPreservedRegUnits(const MachineOperand &RegMaskOp,
145 const TargetRegisterInfo &TRI) {
146 const uint32_t *RegMask = RegMaskOp.getRegMask();
147 auto [It, Inserted] = RegMaskToPreservedRegUnits.try_emplace(RegMask);
148 if (!Inserted)
149 return It->second;
150 BitVector &PreservedRegUnits = It->second;
151
152 PreservedRegUnits.resize(TRI.getNumRegUnits());
153 for (unsigned SafeReg = 0, E = TRI.getNumRegs(); SafeReg < E; ++SafeReg)
154 if (!RegMaskOp.clobbersPhysReg(SafeReg))
155 for (MCRegUnit SafeUnit : TRI.regunits(SafeReg))
156 PreservedRegUnits.set(static_cast<unsigned>(SafeUnit));
157
158 return PreservedRegUnits;
159 }
160
161 /// Mark all of the given registers and their subregisters as unavailable for
162 /// copying.
163 void markRegsUnavailable(ArrayRef<MCRegister> Regs,
164 const TargetRegisterInfo &TRI) {
165 for (MCRegister Reg : Regs) {
166 // Source of copy is no longer available for propagation.
167 for (MCRegUnit Unit : TRI.regunits(Reg)) {
168 auto CI = Copies.find(Unit);
169 if (CI != Copies.end())
170 CI->second.Avail = false;
171 }
172 }
173 }
174
175 /// Remove register from copy maps.
176 void invalidateRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
177 const TargetInstrInfo &TII, bool UseCopyInstr) {
178 // Since Reg might be a subreg of some registers, only invalidate Reg is not
179 // enough. We have to find the COPY defines Reg or registers defined by Reg
180 // and invalidate all of them. Similarly, we must invalidate all of the
181 // the subregisters used in the source of the COPY.
182 SmallSet<MCRegUnit, 8> RegUnitsToInvalidate;
183 auto InvalidateCopy = [&](MachineInstr *MI) {
184 DestSourcePair CopyOperands = *isCopyInstr(*MI, TII, UseCopyInstr);
185 auto [Dst, Src] = getDstSrcMCRegs(CopyOperands);
186 auto DstUnits = TRI.regunits(Dst);
187 auto SrcUnits = TRI.regunits(Src);
188 RegUnitsToInvalidate.insert_range(DstUnits);
189 RegUnitsToInvalidate.insert_range(SrcUnits);
190 };
191
192 for (MCRegUnit Unit : TRI.regunits(Reg)) {
193 auto I = Copies.find(Unit);
194 if (I != Copies.end()) {
195 if (MachineInstr *MI = I->second.MI)
196 InvalidateCopy(MI);
197 if (MachineInstr *MI = I->second.LastSeenUseInCopy)
198 InvalidateCopy(MI);
199 }
200 }
201 for (MCRegUnit Unit : RegUnitsToInvalidate)
202 Copies.erase(Unit);
203 }
204
205 /// Clobber a single register unit, removing it from the tracker's copy maps.
206 void clobberRegUnit(MCRegUnit Unit, const TargetRegisterInfo &TRI,
207 const TargetInstrInfo &TII, bool UseCopyInstr) {
208 auto I = Copies.find(Unit);
209 if (I != Copies.end()) {
210 // When we clobber the source of a copy, we need to clobber everything
211 // it defined.
212 markRegsUnavailable(I->second.DefRegs, TRI);
213 // When we clobber the destination of a copy, we need to clobber the
214 // whole register it defined.
215 if (MachineInstr *MI = I->second.MI) {
216 DestSourcePair CopyOperands = *isCopyInstr(*MI, TII, UseCopyInstr);
217 auto [Dst, Src] = getDstSrcMCRegs(CopyOperands);
218
219 markRegsUnavailable(Dst, TRI);
220
221 // Since we clobber the destination of a copy, the semantic of Src's
222 // "DefRegs" to contain Def is no longer effectual. We will also need
223 // to remove the record from the copy maps that indicates Src defined
224 // Def. Failing to do so might cause the target to miss some
225 // opportunities to further eliminate redundant copy instructions.
226 // Consider the following sequence during the
227 // ForwardCopyPropagateBlock procedure:
228 // L1: r0 = COPY r9 <- TrackMI
229 // L2: r0 = COPY r8 <- TrackMI (Remove r9 defined r0 from tracker)
230 // L3: use r0 <- Remove L2 from MaybeDeadCopies
231 // L4: early-clobber r9 <- Clobber r9 (L2 is still valid in tracker)
232 // L5: r0 = COPY r8 <- Remove NopCopy
233 for (MCRegUnit SrcUnit : TRI.regunits(Src)) {
234 auto SrcCopy = Copies.find(SrcUnit);
235 if (SrcCopy != Copies.end() && SrcCopy->second.LastSeenUseInCopy) {
236 // If SrcCopy defines multiple values, we only need
237 // to erase the record for Def in DefRegs.
238 // NOLINTNEXTLINE(llvm-qualified-auto)
239 for (auto Itr = SrcCopy->second.DefRegs.begin();
240 Itr != SrcCopy->second.DefRegs.end(); Itr++) {
241 if (*Itr == Dst) {
242 SrcCopy->second.DefRegs.erase(Itr);
243 // If DefReg becomes empty after removal, we can remove the
244 // SrcCopy from the tracker's copy maps. We only remove those
245 // entries solely record the Def is defined by Src. If an
246 // entry also contains the definition record of other Def'
247 // registers, it cannot be cleared.
248 if (SrcCopy->second.DefRegs.empty() && !SrcCopy->second.MI) {
249 Copies.erase(SrcCopy);
250 }
251 break;
252 }
253 }
254 }
255 }
256 }
257 // Now we can erase the copy.
258 Copies.erase(I);
259 }
260 }
261
262 /// Clobber a single register, removing it from the tracker's copy maps.
263 void clobberRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
264 const TargetInstrInfo &TII, bool UseCopyInstr) {
265 for (MCRegUnit Unit : TRI.regunits(Reg)) {
266 clobberRegUnit(Unit, TRI, TII, UseCopyInstr);
267 }
268 }
269
270 /// Track copy's src users, and return false if that can't be done.
271 /// We can only track if we have a COPY instruction which source is
272 /// the same as the Reg.
273 bool trackSrcUsers(MCRegister Reg, MachineInstr &MI,
274 const TargetRegisterInfo &TRI, const TargetInstrInfo &TII,
275 bool UseCopyInstr) {
276 MCRegUnit RU = *TRI.regunits(Reg).begin();
277 MachineInstr *AvailCopy = findCopyDefViaUnit(RU, TRI);
278 if (!AvailCopy)
279 return false;
280
281 DestSourcePair CopyOperands = *isCopyInstr(*AvailCopy, TII, UseCopyInstr);
282 MCRegister Src = getSrcMCReg(CopyOperands);
283
284 // Bail out, if the source of the copy is not the same as the Reg.
285 if (Src != Reg)
286 return false;
287
288 auto I = Copies.find(RU);
289 if (I == Copies.end())
290 return false;
291
292 I->second.SrcUsers.insert(&MI);
293 return true;
294 }
295
296 /// Return the users for a given register.
297 SmallPtrSet<MachineInstr *, 4> getSrcUsers(MCRegister Reg,
298 const TargetRegisterInfo &TRI) {
299 MCRegUnit RU = *TRI.regunits(Reg).begin();
300 auto I = Copies.find(RU);
301 if (I == Copies.end())
302 return {};
303 return I->second.SrcUsers;
304 }
305
306 /// Add this copy's registers into the tracker's copy maps.
307 void trackCopy(MachineInstr *MI, const TargetRegisterInfo &TRI,
308 const TargetInstrInfo &TII, bool UseCopyInstr) {
309 DestSourcePair CopyOperands = *isCopyInstr(*MI, TII, UseCopyInstr);
310 auto [Dst, Src] = getDstSrcMCRegs(CopyOperands);
311
312 // Remember Dst is defined by the copy.
313 for (MCRegUnit Unit : TRI.regunits(Dst))
314 Copies[Unit] = {MI, nullptr, {}, {}, true};
315
316 // Remember source that's copied to Dst. Once it's clobbered, then
317 // it's no longer available for copy propagation.
318 for (MCRegUnit Unit : TRI.regunits(Src)) {
319 auto &Copy = Copies[Unit];
320 if (!is_contained(Copy.DefRegs, Dst))
321 Copy.DefRegs.push_back(Dst);
322 Copy.LastSeenUseInCopy = MI;
323 }
324 }
325
326 bool hasAnyCopies() {
327 return !Copies.empty();
328 }
329
330 MachineInstr *findCopyForUnit(MCRegUnit RegUnit,
331 const TargetRegisterInfo &TRI,
332 bool MustBeAvailable = false) {
333 auto CI = Copies.find(RegUnit);
334 if (CI == Copies.end())
335 return nullptr;
336 if (MustBeAvailable && !CI->second.Avail)
337 return nullptr;
338 return CI->second.MI;
339 }
340
341 MachineInstr *findCopyDefViaUnit(MCRegUnit RegUnit,
342 const TargetRegisterInfo &TRI) {
343 auto CI = Copies.find(RegUnit);
344 if (CI == Copies.end())
345 return nullptr;
346 if (CI->second.DefRegs.size() != 1)
347 return nullptr;
348 MCRegUnit RU = *TRI.regunits(CI->second.DefRegs[0]).begin();
349 return findCopyForUnit(RU, TRI, true);
350 }
351
352 MachineInstr *findAvailBackwardCopy(MachineInstr &I, MCRegister Reg,
353 const TargetRegisterInfo &TRI,
354 const TargetInstrInfo &TII,
355 bool UseCopyInstr) {
356 MCRegUnit RU = *TRI.regunits(Reg).begin();
357 MachineInstr *AvailCopy = findCopyDefViaUnit(RU, TRI);
358
359 if (!AvailCopy)
360 return nullptr;
361
362 DestSourcePair CopyOperands = *isCopyInstr(*AvailCopy, TII, UseCopyInstr);
363 auto [AvailDst, AvailSrc] = getDstSrcMCRegs(CopyOperands);
364 if (!TRI.isSubRegisterEq(AvailSrc, Reg))
365 return nullptr;
366
367 for (const MachineInstr &MI :
368 make_range(AvailCopy->getReverseIterator(), I.getReverseIterator()))
369 for (const MachineOperand &MO : MI.operands())
370 if (MO.isRegMask())
371 // FIXME: Shall we simultaneously invalidate AvailSrc or AvailDst?
372 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDst))
373 return nullptr;
374
375 return AvailCopy;
376 }
377
378 MachineInstr *findAvailCopy(MachineInstr &DestCopy, MCRegister Reg,
379 const TargetRegisterInfo &TRI,
380 const TargetInstrInfo &TII, bool UseCopyInstr) {
381 // We check the first RegUnit here, since we'll only be interested in the
382 // copy if it copies the entire register anyway.
383 MCRegUnit RU = *TRI.regunits(Reg).begin();
384 MachineInstr *AvailCopy =
385 findCopyForUnit(RU, TRI, /*MustBeAvailable=*/true);
386
387 if (!AvailCopy)
388 return nullptr;
389
390 DestSourcePair CopyOperands = *isCopyInstr(*AvailCopy, TII, UseCopyInstr);
391 auto [AvailDst, AvailSrc] = getDstSrcMCRegs(CopyOperands);
392 if (!TRI.isSubRegisterEq(AvailDst, Reg))
393 return nullptr;
394
395 // Check that the available copy isn't clobbered by any regmasks between
396 // itself and the destination.
397 for (const MachineInstr &MI :
398 make_range(AvailCopy->getIterator(), DestCopy.getIterator()))
399 for (const MachineOperand &MO : MI.operands())
400 if (MO.isRegMask())
401 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDst))
402 return nullptr;
403
404 return AvailCopy;
405 }
406
407 // Find last COPY that defines Reg before Current MachineInstr.
408 MachineInstr *findLastSeenDefInCopy(const MachineInstr &Current,
409 MCRegister Reg,
410 const TargetRegisterInfo &TRI,
411 const TargetInstrInfo &TII,
412 bool UseCopyInstr) {
413 MCRegUnit RU = *TRI.regunits(Reg).begin();
414 auto CI = Copies.find(RU);
415 if (CI == Copies.end() || !CI->second.Avail)
416 return nullptr;
417
418 MachineInstr *DefCopy = CI->second.MI;
419 DestSourcePair CopyOperands = *isCopyInstr(*DefCopy, TII, UseCopyInstr);
420 MCRegister Dst = getDstMCReg(CopyOperands);
421 if (!TRI.isSubRegisterEq(Dst, Reg))
422 return nullptr;
423
424 return DefCopy;
425 }
426
427 void clobberNonPreservedRegs(const BitVector &PreservedRegUnits,
428 const TargetRegisterInfo &TRI,
429 const TargetInstrInfo &TII) {
430 SmallVector<MCRegUnit, 8> UnitsToClobber;
431 for (auto &[Unit, _] : Copies)
432 if (!PreservedRegUnits.test(static_cast<unsigned>(Unit)))
433 UnitsToClobber.push_back(Unit);
434
435 for (MCRegUnit Unit : UnitsToClobber) {
436 // If we clobber the RegUnit, it will mark all the DefReg Units
437 // as unavailable, which leads to issues if the Destination Reg Unit is
438 // preserved, and used later. As such, only mark them as unavailable if
439 // they are not preserved.
440 auto RegUnitInfo = Copies.find(Unit);
441 if (RegUnitInfo == Copies.end())
442 continue;
443
444 for (MCRegister DstReg : RegUnitInfo->second.DefRegs) {
445 for (MCRegUnit DstUnit : TRI.regunits(DstReg)) {
446 if (!PreservedRegUnits.test(static_cast<unsigned>(DstUnit))) {
447 if (auto CI = Copies.find(DstUnit); CI != Copies.end()) {
448 CI->second.Avail = false;
449 }
450 }
451 }
452 }
453 Copies.erase(RegUnitInfo);
454 }
455 }
456
457 // Find last COPY that uses Reg.
458 MachineInstr *findLastSeenUseInCopy(MCRegister Reg,
459 const TargetRegisterInfo &TRI) {
460 MCRegUnit RU = *TRI.regunits(Reg).begin();
461 auto CI = Copies.find(RU);
462 if (CI == Copies.end())
463 return nullptr;
464 return CI->second.LastSeenUseInCopy;
465 }
466
467 void clear() {
468 Copies.clear();
469 }
470};
471
472class MachineCopyPropagation {
473 const TargetRegisterInfo *TRI = nullptr;
474 const TargetInstrInfo *TII = nullptr;
475 const MachineRegisterInfo *MRI = nullptr;
476
477 // Return true if this is a copy instruction and false otherwise.
478 bool UseCopyInstr;
479
480public:
481 MachineCopyPropagation(bool CopyInstr = false)
482 : UseCopyInstr(CopyInstr || MCPUseCopyInstr) {}
483
484 bool run(MachineFunction &MF);
485
486private:
487 typedef enum { DebugUse = false, RegularUse = true } DebugType;
488
489 void readRegister(MCRegister Reg, MachineInstr &Reader, DebugType DT);
490 void readSuccessorLiveIns(const MachineBasicBlock &MBB);
491 void forwardCopyPropagateBlock(MachineBasicBlock &MBB);
492 void backwardCopyPropagateBlock(MachineBasicBlock &MBB);
493 void eliminateSpillageCopies(MachineBasicBlock &MBB);
494 bool eraseIfRedundant(MachineInstr &Copy, MCRegister Dst, MCRegister Src);
495 void forwardUses(MachineInstr &MI);
496 void propagateDefs(MachineInstr &MI);
497 bool isForwardableRegClassCopy(const MachineInstr &Copy,
498 const MachineInstr &UseI, unsigned UseIdx);
499 bool isBackwardPropagatableRegClassCopy(const MachineInstr &Copy,
500 const MachineInstr &UseI,
501 unsigned UseIdx);
502 bool isBackwardPropagatableCopy(const MachineInstr &Copy,
503 const DestSourcePair &CopyOperands);
504 /// Returns true iff a copy instruction having operand @p CopyOperand must
505 /// never be eliminated as redundant.
506 bool isNeverRedundant(MCRegister CopyOperand) {
507 // Avoid eliminating a copy from/to a reserved registers as we cannot
508 // predict the value (Example: The sparc zero register is writable but stays
509 // zero).
510 return MRI->isReserved(CopyOperand);
511 }
512 /// Returns true iff the @p Copy instruction must never be eliminated as
513 /// redundant. This overload does not consider the operands of @p Copy.
514 bool isNeverRedundant(const MachineInstr &Copy) {
515 return Copy.getFlag(MachineInstr::FrameSetup) ||
517 }
518 bool hasImplicitOverlap(const MachineInstr &MI, const MachineOperand &Use);
519 bool hasOverlappingMultipleDef(const MachineInstr &MI,
520 const MachineOperand &MODef, MCRegister Def);
521 bool canUpdateSrcUsers(const MachineInstr &Copy,
522 const MachineOperand &CopySrc);
523
524 /// Candidates for deletion.
525 SmallSetVector<MachineInstr *, 8> MaybeDeadCopies;
526
527 /// Multimap tracking debug users in current BB
528 DenseMap<MachineInstr *, SmallPtrSet<MachineInstr *, 2>> CopyDbgUsers;
529
530 CopyTracker Tracker;
531
532 bool Changed = false;
533};
534
535class MachineCopyPropagationLegacy : public MachineFunctionPass {
536 bool UseCopyInstr;
537
538public:
539 static char ID; // pass identification
540
541 MachineCopyPropagationLegacy(bool UseCopyInstr = false)
542 : MachineFunctionPass(ID), UseCopyInstr(UseCopyInstr) {}
543
544 void getAnalysisUsage(AnalysisUsage &AU) const override {
545 AU.setPreservesCFG();
547 }
548
549 bool runOnMachineFunction(MachineFunction &MF) override;
550
551 MachineFunctionProperties getRequiredProperties() const override {
552 return MachineFunctionProperties().setNoVRegs();
553 }
554};
555
556} // end anonymous namespace
557
558char MachineCopyPropagationLegacy::ID = 0;
559
560char &llvm::MachineCopyPropagationID = MachineCopyPropagationLegacy::ID;
561
562INITIALIZE_PASS(MachineCopyPropagationLegacy, DEBUG_TYPE,
563 "Machine Copy Propagation Pass", false, false)
564
565void MachineCopyPropagation::readRegister(MCRegister Reg, MachineInstr &Reader,
566 DebugType DT) {
567 // If 'Reg' is defined by a copy, the copy is no longer a candidate
568 // for elimination. If a copy is "read" by a debug user, record the user
569 // for propagation.
570 for (MCRegUnit Unit : TRI->regunits(Reg)) {
571 if (MachineInstr *Copy = Tracker.findCopyForUnit(Unit, *TRI)) {
572 if (DT == RegularUse) {
573 LLVM_DEBUG(dbgs() << "MCP: Copy is used - not dead: "; Copy->dump());
574 MaybeDeadCopies.remove(Copy);
575 } else {
576 CopyDbgUsers[Copy].insert(&Reader);
577 }
578 }
579 }
580}
581
582void MachineCopyPropagation::readSuccessorLiveIns(
583 const MachineBasicBlock &MBB) {
584 if (MaybeDeadCopies.empty())
585 return;
586
587 // If a copy result is livein to a successor, it is not dead.
588 for (const MachineBasicBlock *Succ : MBB.successors()) {
589 for (const auto &LI : Succ->liveins()) {
590 for (MCRegUnitMaskIterator U(LI.PhysReg, TRI); U.isValid(); ++U) {
591 auto [Unit, Mask] = *U;
592 if ((Mask & LI.LaneMask).any()) {
593 if (MachineInstr *Copy = Tracker.findCopyForUnit(Unit, *TRI))
594 MaybeDeadCopies.remove(Copy);
595 }
596 }
597 }
598 }
599}
600
601/// Return true if \p PreviousCopy did copy register \p Src to register \p Dst.
602/// This fact may have been obscured by sub register usage or may not be true at
603/// all even though Src and Dst are subregisters of the registers used in
604/// PreviousCopy. e.g.
605/// isNopCopy("ecx = COPY eax", AX, CX) == true
606/// isNopCopy("ecx = COPY eax", AH, CL) == false
607static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src,
609 const TargetInstrInfo *TII, bool UseCopyInstr) {
610
611 DestSourcePair CopyOperands = *isCopyInstr(PreviousCopy, *TII, UseCopyInstr);
612 auto [PreviousDst, PreviousSrc] = getDstSrcMCRegs(CopyOperands);
613 if (Src == PreviousSrc && Dst == PreviousDst)
614 return true;
615 if (!TRI->isSubRegister(PreviousSrc, Src))
616 return false;
617 unsigned SubIdx = TRI->getSubRegIndex(PreviousSrc, Src);
618 return SubIdx == TRI->getSubRegIndex(PreviousDst, Dst);
619}
620
621/// Remove instruction \p Copy if there exists a previous copy that copies the
622/// register \p Src to the register \p Dst; This may happen indirectly by
623/// copying the super registers.
624bool MachineCopyPropagation::eraseIfRedundant(MachineInstr &Copy,
625 MCRegister Dst, MCRegister Src) {
626 if (isNeverRedundant(Copy) || isNeverRedundant(Src) || isNeverRedundant(Dst))
627 return false;
628
629 // Search for an existing copy.
630 MachineInstr *PrevCopy =
631 Tracker.findAvailCopy(Copy, Dst, *TRI, *TII, UseCopyInstr);
632 if (!PrevCopy)
633 return false;
634
635 DestSourcePair PrevCopyOperands = *isCopyInstr(*PrevCopy, *TII, UseCopyInstr);
636 // Check that the existing copy uses the correct sub registers.
637 if (PrevCopyOperands.Destination->isDead())
638 return false;
639 if (!isNopCopy(*PrevCopy, Src, Dst, TRI, TII, UseCopyInstr))
640 return false;
641
642 LLVM_DEBUG(dbgs() << "MCP: copy is a NOP, removing: "; Copy.dump());
643
644 // Copy was redundantly redefining either Src or Dst. Remove earlier kill
645 // flags between Copy and PrevCopy because the value will be reused now.
646 DestSourcePair CopyOperands = *isCopyInstr(Copy, *TII, UseCopyInstr);
647
648 MCRegister CopyDst = getDstMCReg(CopyOperands);
649 assert(CopyDst == Src || CopyDst == Dst);
650 for (MachineInstr &MI :
651 make_range(PrevCopy->getIterator(), Copy.getIterator()))
652 MI.clearRegisterKills(CopyDst, TRI);
653
654 // Clear undef flag from remaining copy if needed.
655 if (!CopyOperands.Source->isUndef()) {
656 PrevCopy->getOperand(PrevCopyOperands.Source->getOperandNo())
657 .setIsUndef(false);
658 }
659
660 Copy.eraseFromParent();
661 Changed = true;
662 ++NumDeletes;
663 return true;
664}
665
666bool MachineCopyPropagation::isBackwardPropagatableRegClassCopy(
667 const MachineInstr &Copy, const MachineInstr &UseI, unsigned UseIdx) {
668 DestSourcePair CopyOperands = *isCopyInstr(Copy, *TII, UseCopyInstr);
669 MCRegister Dst = getDstMCReg(CopyOperands);
670
671 if (const TargetRegisterClass *URC =
672 UseI.getRegClassConstraint(UseIdx, TII, TRI))
673 return URC->contains(Dst);
674
675 // We don't process further if UseI is a COPY, since forward copy propagation
676 // should handle that.
677 return false;
678}
679
680bool MachineCopyPropagation::isBackwardPropagatableCopy(
681 const MachineInstr &Copy, const DestSourcePair &CopyOperands) {
682 auto [Dst, Src] = getDstSrcMCRegs(CopyOperands);
683
684 if (!Dst || !Src)
685 return false;
686
687 if (isNeverRedundant(Copy) || isNeverRedundant(Dst) || isNeverRedundant(Src))
688 return false;
689
690 return CopyOperands.Source->isRenamable() && CopyOperands.Source->isKill();
691}
692
693/// Decide whether we should forward the source of \param Copy to its use in
694/// \param UseI based on the physical register class constraints of the opcode
695/// and avoiding introducing more cross-class COPYs.
696bool MachineCopyPropagation::isForwardableRegClassCopy(const MachineInstr &Copy,
697 const MachineInstr &UseI,
698 unsigned UseIdx) {
699 DestSourcePair CopyOperands = *isCopyInstr(Copy, *TII, UseCopyInstr);
700 MCRegister CopySrc = getSrcMCReg(CopyOperands);
701
702 // If the new register meets the opcode register constraints, then allow
703 // forwarding.
704 if (const TargetRegisterClass *URC =
705 UseI.getRegClassConstraint(UseIdx, TII, TRI))
706 return URC->contains(CopySrc);
707
708 std::optional<DestSourcePair> UseICopyOperands =
709 isCopyInstr(UseI, *TII, UseCopyInstr);
710 if (!UseICopyOperands)
711 return false;
712
713 /// COPYs don't have register class constraints, so if the user instruction
714 /// is a COPY, we just try to avoid introducing additional cross-class
715 /// COPYs. For example:
716 ///
717 /// RegClassA = COPY RegClassB // Copy parameter
718 /// ...
719 /// RegClassB = COPY RegClassA // UseI parameter
720 ///
721 /// which after forwarding becomes
722 ///
723 /// RegClassA = COPY RegClassB
724 /// ...
725 /// RegClassB = COPY RegClassB
726 ///
727 /// so we have reduced the number of cross-class COPYs and potentially
728 /// introduced a nop COPY that can be removed.
729
730 // Allow forwarding if src and dst belong to any common class, so long as they
731 // don't belong to any (possibly smaller) common class that requires copies to
732 // go via a different class.
733 MCRegister UseDst = getDstMCReg(*UseICopyOperands);
734 bool Found = false;
735 bool IsCrossClass = false;
736 for (const TargetRegisterClass *RC : TRI->regclasses()) {
737 if (RC->contains(CopySrc) && RC->contains(UseDst)) {
738 Found = true;
739 if (TRI->getCrossCopyRegClass(RC) != RC) {
740 IsCrossClass = true;
741 break;
742 }
743 }
744 }
745 if (!Found)
746 return false;
747 if (!IsCrossClass)
748 return true;
749 // The forwarded copy would be cross-class. Only do this if the original copy
750 // was also cross-class.
751 MCRegister CopyDst = getDstMCReg(CopyOperands);
752 for (const TargetRegisterClass *RC : TRI->regclasses()) {
753 if (RC->contains(CopySrc) && RC->contains(CopyDst) &&
754 TRI->getCrossCopyRegClass(RC) != RC)
755 return true;
756 }
757 return false;
758}
759
760/// Check that \p MI does not have implicit uses that overlap with it's \p Use
761/// operand (the register being replaced), since these can sometimes be
762/// implicitly tied to other operands. For example, on AMDGPU:
763///
764/// V_MOVRELS_B32_e32 %VGPR2, %M0<imp-use>, %EXEC<imp-use>, %VGPR2_VGPR3_VGPR4_VGPR5<imp-use>
765///
766/// the %VGPR2 is implicitly tied to the larger reg operand, but we have no
767/// way of knowing we need to update the latter when updating the former.
768bool MachineCopyPropagation::hasImplicitOverlap(const MachineInstr &MI,
769 const MachineOperand &Use) {
770 for (const MachineOperand &MIUse : MI.uses())
771 if (&MIUse != &Use && MIUse.isReg() && MIUse.isImplicit() &&
772 MIUse.isUse() && TRI->regsOverlap(Use.getReg(), MIUse.getReg()))
773 return true;
774
775 return false;
776}
777
778/// For an MI that has multiple definitions, check whether \p MI has
779/// a definition that overlaps with another of its definitions.
780/// For example, on ARM: umull r9, r9, lr, r0
781/// The umull instruction is unpredictable unless RdHi and RdLo are different.
782bool MachineCopyPropagation::hasOverlappingMultipleDef(
783 const MachineInstr &MI, const MachineOperand &MODef, MCRegister Def) {
784 for (const MachineOperand &MIDef : MI.all_defs()) {
785 if ((&MIDef != &MODef) && MIDef.isReg() &&
786 TRI->regsOverlap(Def, MIDef.getReg()))
787 return true;
788 }
789
790 return false;
791}
792
793/// Return true if it is safe to update all users of the \p CopySrc register
794/// in the given \p Copy instruction.
795bool MachineCopyPropagation::canUpdateSrcUsers(const MachineInstr &Copy,
796 const MachineOperand &CopySrc) {
797 assert(CopySrc.isReg() && "Expected a register operand");
798 for (auto *SrcUser : Tracker.getSrcUsers(CopySrc.getReg(), *TRI)) {
799 if (hasImplicitOverlap(*SrcUser, CopySrc))
800 return false;
801
802 for (MachineOperand &MO : SrcUser->uses()) {
803 if (!MO.isReg() || !MO.isUse() || MO.getReg() != CopySrc.getReg())
804 continue;
805 if (MO.isTied() || !MO.isRenamable() ||
806 !isBackwardPropagatableRegClassCopy(Copy, *SrcUser,
807 MO.getOperandNo()))
808 return false;
809 }
810 }
811 return true;
812}
813
814/// Look for available copies whose destination register is used by \p MI and
815/// replace the use in \p MI with the copy's source register.
816void MachineCopyPropagation::forwardUses(MachineInstr &MI) {
817 if (!Tracker.hasAnyCopies())
818 return;
819
820 // Look for non-tied explicit vreg uses that have an active COPY
821 // instruction that defines the physical register allocated to them.
822 // Replace the vreg with the source of the active COPY.
823 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx < OpEnd;
824 ++OpIdx) {
825 MachineOperand &MOUse = MI.getOperand(OpIdx);
826 // Don't forward into undef use operands since doing so can cause problems
827 // with the machine verifier, since it doesn't treat undef reads as reads,
828 // so we can end up with a live range that ends on an undef read, leading to
829 // an error that the live range doesn't end on a read of the live range
830 // register.
831 if (!MOUse.isReg() || MOUse.isTied() || MOUse.isUndef() || MOUse.isDef() ||
832 MOUse.isImplicit())
833 continue;
834
835 if (!MOUse.getReg())
836 continue;
837
838 // Check that the register is marked 'renamable' so we know it is safe to
839 // rename it without violating any constraints that aren't expressed in the
840 // IR (e.g. ABI or opcode requirements).
841 if (!MOUse.isRenamable())
842 continue;
843
844 MachineInstr *Copy = Tracker.findAvailCopy(MI, MOUse.getReg().asMCReg(),
845 *TRI, *TII, UseCopyInstr);
846 if (!Copy)
847 continue;
848
849 DestSourcePair CopyOperands = *isCopyInstr(*Copy, *TII, UseCopyInstr);
850 auto [CopyDst, CopySrc] = getDstSrcMCRegs(CopyOperands);
851 const MachineOperand &CopySrcOperand = *CopyOperands.Source;
852
853 MCRegister ForwardedReg = CopySrc;
854 // MI might use a sub-register of the Copy destination, in which case the
855 // forwarded register is the matching sub-register of the Copy source.
856 if (MOUse.getReg() != CopyDst) {
857 unsigned SubRegIdx = TRI->getSubRegIndex(CopyDst, MOUse.getReg());
858 assert(SubRegIdx &&
859 "MI source is not a sub-register of Copy destination");
860 ForwardedReg = TRI->getSubReg(CopySrc, SubRegIdx);
861 if (!ForwardedReg || TRI->isArtificial(ForwardedReg)) {
862 LLVM_DEBUG(dbgs() << "MCP: Copy source does not have sub-register "
863 << TRI->getSubRegIndexName(SubRegIdx) << '\n');
864 continue;
865 }
866 }
867
868 // Don't forward COPYs of reserved regs unless they are constant.
869 if (MRI->isReserved(CopySrc) && !MRI->isConstantPhysReg(CopySrc))
870 continue;
871
872 if (!isForwardableRegClassCopy(*Copy, MI, OpIdx))
873 continue;
874
875 if (hasImplicitOverlap(MI, MOUse))
876 continue;
877
878 // Check that the instruction is not a copy that partially overwrites the
879 // original copy source that we are about to use. The tracker mechanism
880 // cannot cope with that.
881 if (isCopyInstr(MI, *TII, UseCopyInstr) &&
882 MI.modifiesRegister(CopySrc, TRI) &&
883 !MI.definesRegister(CopySrc, /*TRI=*/nullptr)) {
884 LLVM_DEBUG(dbgs() << "MCP: Copy source overlap with dest in " << MI);
885 continue;
886 }
887
888 if (!DebugCounter::shouldExecute(FwdCounter)) {
889 LLVM_DEBUG(dbgs() << "MCP: Skipping forwarding due to debug counter:\n "
890 << MI);
891 continue;
892 }
893
894 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MOUse.getReg(), TRI)
895 << "\n with " << printReg(ForwardedReg, TRI)
896 << "\n in " << MI << " from " << *Copy);
897
898 MOUse.setReg(ForwardedReg);
899
900 if (!CopySrcOperand.isRenamable())
901 MOUse.setIsRenamable(false);
902 MOUse.setIsUndef(CopySrcOperand.isUndef());
903
904 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
905
906 // Clear kill markers that may have been invalidated.
907 for (MachineInstr &KMI :
908 make_range(Copy->getIterator(), std::next(MI.getIterator())))
909 KMI.clearRegisterKills(CopySrc, TRI);
910
911 ++NumCopyForwards;
912 Changed = true;
913 }
914}
915
916void MachineCopyPropagation::forwardCopyPropagateBlock(MachineBasicBlock &MBB) {
917 LLVM_DEBUG(dbgs() << "MCP: ForwardCopyPropagateBlock " << MBB.getName()
918 << "\n");
919
920 for (MachineInstr &MI : llvm::make_early_inc_range(MBB)) {
921 // Analyze copies (which don't overlap themselves).
922 std::optional<DestSourcePair> CopyOperands =
923 isCopyInstr(MI, *TII, UseCopyInstr);
924 if (CopyOperands) {
925 auto [Dst, Src] = getDstSrcMCRegs(*CopyOperands);
926 if (!TRI->regsOverlap(Dst, Src)) {
927 // The two copies cancel out and the source of the first copy
928 // hasn't been overridden, eliminate the second one. e.g.
929 // %ecx = COPY %eax
930 // ... nothing clobbered eax.
931 // %eax = COPY %ecx
932 // =>
933 // %ecx = COPY %eax
934 //
935 // or
936 //
937 // %ecx = COPY %eax
938 // ... nothing clobbered eax.
939 // %ecx = COPY %eax
940 // =>
941 // %ecx = COPY %eax
942 if (eraseIfRedundant(MI, Dst, Src) || eraseIfRedundant(MI, Src, Dst))
943 continue;
944 }
945 }
946
947 // Clobber any earlyclobber regs first.
948 for (const MachineOperand &MO : MI.operands())
949 if (MO.isReg() && MO.isEarlyClobber()) {
950 MCRegister Reg = MO.getReg().asMCReg();
951 // If we have a tied earlyclobber, that means it is also read by this
952 // instruction, so we need to make sure we don't remove it as dead
953 // later.
954 if (MO.isTied())
955 readRegister(Reg, MI, RegularUse);
956 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
957 }
958
959 forwardUses(MI);
960
961 // Attempt to canonicalize/optimize the instruction now its arguments have
962 // been mutated. This may convert MI from a non-copy to a copy instruction.
963 if (TII->simplifyInstruction(MI)) {
964 Changed = true;
965 LLVM_DEBUG(dbgs() << "MCP: After simplifyInstruction: " << MI);
966 }
967
968 CopyOperands = isCopyInstr(MI, *TII, UseCopyInstr);
969 if (CopyOperands) {
970 auto [Dst, Src] = getDstSrcMCRegs(*CopyOperands);
971 if (!TRI->regsOverlap(Dst, Src)) {
972 // FIXME: Document why this does not consider `RegSrc`, similar to how
973 // `backwardCopyPropagateBlock` does.
974 if (!isNeverRedundant(MI) && !isNeverRedundant(Dst))
975 MaybeDeadCopies.insert(&MI);
976 }
977 }
978
980 const MachineOperand *RegMask = nullptr;
981 for (const MachineOperand &MO : MI.operands()) {
982 if (MO.isRegMask())
983 RegMask = &MO;
984 if (!MO.isReg())
985 continue;
986 Register Reg = MO.getReg();
987 if (!Reg)
988 continue;
989
991 "MachineCopyPropagation should be run after register allocation!");
992
993 if (MO.isDef() && !MO.isEarlyClobber()) {
994 // Skip invalidating constant registers.
995 if (!MRI->isConstantPhysReg(Reg)) {
996 Defs.push_back(Reg.asMCReg());
997 continue;
998 }
999 } else if (MO.readsReg()) {
1000 readRegister(Reg.asMCReg(), MI, MO.isDebug() ? DebugUse : RegularUse);
1001 }
1002 }
1003
1004 // The instruction has a register mask operand which means that it clobbers
1005 // a large set of registers. Treat clobbered registers the same way as
1006 // defined registers.
1007 if (RegMask) {
1008 BitVector &PreservedRegUnits =
1009 Tracker.getPreservedRegUnits(*RegMask, *TRI);
1010
1011 // Erase any MaybeDeadCopies whose destination register is clobbered.
1012 for (SmallSetVector<MachineInstr *, 8>::iterator DI =
1013 MaybeDeadCopies.begin();
1014 DI != MaybeDeadCopies.end();) {
1015 MachineInstr *MaybeDead = *DI;
1016 std::optional<DestSourcePair> CopyOperands =
1017 isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
1018 MCRegister Reg = CopyOperands->Destination->getReg().asMCReg();
1019 assert(!isNeverRedundant(*MaybeDead) && !isNeverRedundant(Reg));
1020
1021 if (!RegMask->clobbersPhysReg(Reg)) {
1022 ++DI;
1023 continue;
1024 }
1025
1026 // Invalidate all entries in the copy map which are not preserved by
1027 // this register mask.
1028 bool MIRefedinCopyInfo = false;
1029 for (MCRegUnit RegUnit : TRI->regunits(Reg)) {
1030 if (!PreservedRegUnits.test(static_cast<unsigned>(RegUnit)))
1031 Tracker.clobberRegUnit(RegUnit, *TRI, *TII, UseCopyInstr);
1032 else {
1033 if (MaybeDead == Tracker.findCopyForUnit(RegUnit, *TRI)) {
1034 MIRefedinCopyInfo = true;
1035 }
1036 }
1037 }
1038
1039 // erase() will return the next valid iterator pointing to the next
1040 // element after the erased one.
1041 DI = MaybeDeadCopies.erase(DI);
1042
1043 // Preserved by RegMask, DO NOT remove copy
1044 if (MIRefedinCopyInfo)
1045 continue;
1046
1047 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to regmask clobbering: "
1048 << *MaybeDead);
1049
1050 MaybeDead->eraseFromParent();
1051 Changed = true;
1052 ++NumDeletes;
1053 }
1054 }
1055
1056 // Any previous copy definition or reading the Defs is no longer available.
1057 for (MCRegister Reg : Defs)
1058 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
1059
1060 if (CopyOperands) {
1061 auto [Dst, Src] = getDstSrcMCRegs(*CopyOperands);
1062 if (!TRI->regsOverlap(Dst, Src)) {
1063 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1064 }
1065 }
1066 }
1067
1068 bool TracksLiveness = MRI->tracksLiveness();
1069
1070 // If liveness is tracked, we can use the live-in lists to know which
1071 // copies aren't dead.
1072 if (TracksLiveness)
1073 readSuccessorLiveIns(MBB);
1074
1075 // If MBB doesn't have succesor, delete copies whose defs are not used.
1076 // If MBB does have successors, we can only delete copies if we are able to
1077 // use liveness information from successors to confirm they are really dead.
1078 if (MBB.succ_empty() || TracksLiveness) {
1079 for (MachineInstr *MaybeDead : MaybeDeadCopies) {
1080 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to no live-out succ: ";
1081 MaybeDead->dump());
1082
1083 DestSourcePair CopyOperands =
1084 *isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
1085
1086 auto [Dst, Src] = getDstSrcMCRegs(CopyOperands);
1087 assert(!isNeverRedundant(*MaybeDead) && !isNeverRedundant(Dst));
1088
1089 // Update matching debug values, if any.
1090 const auto &DbgUsers = CopyDbgUsers[MaybeDead];
1091 SmallVector<MachineInstr *> MaybeDeadDbgUsers(DbgUsers.begin(),
1092 DbgUsers.end());
1093 MRI->updateDbgUsersToReg(Dst, Src, MaybeDeadDbgUsers);
1094
1095 MaybeDead->eraseFromParent();
1096 Changed = true;
1097 ++NumDeletes;
1098 }
1099 }
1100
1101 MaybeDeadCopies.clear();
1102 CopyDbgUsers.clear();
1103 Tracker.clear();
1104}
1105
1106void MachineCopyPropagation::propagateDefs(MachineInstr &MI) {
1107 if (!Tracker.hasAnyCopies())
1108 return;
1109
1110 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx != OpEnd;
1111 ++OpIdx) {
1112 MachineOperand &MODef = MI.getOperand(OpIdx);
1113
1114 if (!MODef.isReg() || MODef.isUse())
1115 continue;
1116
1117 // Ignore non-trivial cases.
1118 if (MODef.isTied() || MODef.isUndef() || MODef.isImplicit())
1119 continue;
1120
1121 if (!MODef.getReg())
1122 continue;
1123
1124 // We only handle if the register comes from a vreg.
1125 if (!MODef.isRenamable())
1126 continue;
1127
1128 MachineInstr *Copy = Tracker.findAvailBackwardCopy(
1129 MI, MODef.getReg().asMCReg(), *TRI, *TII, UseCopyInstr);
1130 if (!Copy)
1131 continue;
1132
1133 DestSourcePair CopyOperands = *isCopyInstr(*Copy, *TII, UseCopyInstr);
1134 auto [Dst, Src] = getDstSrcMCRegs(CopyOperands);
1135
1136 if (MODef.getReg() != Src)
1137 continue;
1138
1139 if (!isBackwardPropagatableRegClassCopy(*Copy, MI, OpIdx))
1140 continue;
1141
1142 if (hasImplicitOverlap(MI, MODef))
1143 continue;
1144
1145 if (hasOverlappingMultipleDef(MI, MODef, Dst))
1146 continue;
1147
1148 if (!canUpdateSrcUsers(*Copy, *CopyOperands.Source))
1149 continue;
1150
1151 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MODef.getReg(), TRI)
1152 << "\n with " << printReg(Dst, TRI) << "\n in "
1153 << MI << " from " << *Copy);
1154
1155 MODef.setReg(Dst);
1156 MODef.setIsRenamable(CopyOperands.Destination->isRenamable());
1157
1158 for (auto *SrcUser : Tracker.getSrcUsers(Src, *TRI)) {
1159 for (MachineOperand &MO : SrcUser->uses()) {
1160 if (!MO.isReg() || !MO.isUse() || MO.getReg() != Src)
1161 continue;
1162 MO.setReg(Dst);
1163 MO.setIsRenamable(CopyOperands.Destination->isRenamable());
1164 }
1165 }
1166
1167 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
1168 MaybeDeadCopies.insert(Copy);
1169 Changed = true;
1170 ++NumCopyBackwardPropagated;
1171 }
1172}
1173
1174void MachineCopyPropagation::backwardCopyPropagateBlock(
1175 MachineBasicBlock &MBB) {
1176 LLVM_DEBUG(dbgs() << "MCP: BackwardCopyPropagateBlock " << MBB.getName()
1177 << "\n");
1178
1179 for (MachineInstr &MI : llvm::make_early_inc_range(llvm::reverse(MBB))) {
1180 // Ignore non-trivial COPYs.
1181 std::optional<DestSourcePair> CopyOperands =
1182 isCopyInstr(MI, *TII, UseCopyInstr);
1183 if (CopyOperands && MI.getNumImplicitOperands() == 0) {
1184 auto [Dst, Src] = getDstSrcMCRegs(*CopyOperands);
1185
1186 if (!TRI->regsOverlap(Dst, Src)) {
1187 // Unlike forward cp, we don't invoke propagateDefs here,
1188 // just let forward cp do COPY-to-COPY propagation.
1189 if (isBackwardPropagatableCopy(MI, *CopyOperands)) {
1190 Tracker.invalidateRegister(Src, *TRI, *TII, UseCopyInstr);
1191 Tracker.invalidateRegister(Dst, *TRI, *TII, UseCopyInstr);
1192 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1193 continue;
1194 }
1195 }
1196 }
1197
1198 // Invalidate any earlyclobber regs first.
1199 for (const MachineOperand &MO : MI.operands())
1200 if (MO.isReg() && MO.isEarlyClobber()) {
1201 MCRegister Reg = MO.getReg().asMCReg();
1202 if (!Reg)
1203 continue;
1204 Tracker.invalidateRegister(Reg, *TRI, *TII, UseCopyInstr);
1205 }
1206
1207 propagateDefs(MI);
1208 for (const MachineOperand &MO : MI.operands()) {
1209 if (!MO.isReg())
1210 continue;
1211
1212 if (!MO.getReg())
1213 continue;
1214
1215 if (MO.isDef())
1216 Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1217 UseCopyInstr);
1218
1219 if (MO.readsReg()) {
1220 if (MO.isDebug()) {
1221 // Check if the register in the debug instruction is utilized
1222 // in a copy instruction, so we can update the debug info if the
1223 // register is changed.
1224 for (MCRegUnit Unit : TRI->regunits(MO.getReg().asMCReg())) {
1225 if (auto *Copy = Tracker.findCopyDefViaUnit(Unit, *TRI)) {
1226 CopyDbgUsers[Copy].insert(&MI);
1227 }
1228 }
1229 } else if (!Tracker.trackSrcUsers(MO.getReg().asMCReg(), MI, *TRI, *TII,
1230 UseCopyInstr)) {
1231 // If we can't track the source users, invalidate the register.
1232 Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1233 UseCopyInstr);
1234 }
1235 }
1236 }
1237 }
1238
1239 for (auto *Copy : MaybeDeadCopies) {
1240 DestSourcePair CopyOperands = *isCopyInstr(*Copy, *TII, UseCopyInstr);
1241 auto [Dst, Src] = getDstSrcMCRegs(CopyOperands);
1242 const auto &DbgUsers = CopyDbgUsers[Copy];
1243 SmallVector<MachineInstr *> MaybeDeadDbgUsers(DbgUsers.begin(),
1244 DbgUsers.end());
1245
1246 MRI->updateDbgUsersToReg(Src, Dst, MaybeDeadDbgUsers);
1247 Copy->eraseFromParent();
1248 ++NumDeletes;
1249 }
1250
1251 MaybeDeadCopies.clear();
1252 CopyDbgUsers.clear();
1253 Tracker.clear();
1254}
1255
1256[[maybe_unused]] static void printSpillReloadChain(
1259 MachineInstr *Leader) {
1260 auto &SC = SpillChain[Leader];
1261 auto &RC = ReloadChain[Leader];
1262 for (auto I = SC.rbegin(), E = SC.rend(); I != E; ++I)
1263 (*I)->dump();
1264 for (MachineInstr *MI : RC)
1265 MI->dump();
1266}
1267
1268// Remove spill-reload like copy chains. For example
1269// r0 = COPY r1
1270// r1 = COPY r2
1271// r2 = COPY r3
1272// r3 = COPY r4
1273// <def-use r4>
1274// r4 = COPY r3
1275// r3 = COPY r2
1276// r2 = COPY r1
1277// r1 = COPY r0
1278// will be folded into
1279// r0 = COPY r1
1280// r1 = COPY r4
1281// <def-use r4>
1282// r4 = COPY r1
1283// r1 = COPY r0
1284// TODO: Currently we don't track usage of r0 outside the chain, so we
1285// conservatively keep its value as it was before the rewrite.
1286//
1287// The algorithm is trying to keep
1288// property#1: No Dst of spill COPY in the chain is used or defined until the
1289// paired reload COPY in the chain uses the Dst.
1290//
1291// property#2: NO Source of COPY in the chain is used or defined until the next
1292// COPY in the chain defines the Source, except the innermost spill-reload
1293// pair.
1294//
1295// The algorithm is conducted by checking every COPY inside the MBB, assuming
1296// the COPY is a reload COPY, then try to find paired spill COPY by searching
1297// the COPY defines the Src of the reload COPY backward. If such pair is found,
1298// it either belongs to an existing chain or a new chain depends on
1299// last available COPY uses the Dst of the reload COPY.
1300// Implementation notes, we use CopyTracker::findLastDefCopy(Reg, ...) to find
1301// out last COPY that defines Reg; we use CopyTracker::findLastUseCopy(Reg, ...)
1302// to find out last COPY that uses Reg. When we are encountered with a Non-COPY
1303// instruction, we check registers in the operands of this instruction. If this
1304// Reg is defined by a COPY, we untrack this Reg via
1305// CopyTracker::clobberRegister(Reg, ...).
1306void MachineCopyPropagation::eliminateSpillageCopies(MachineBasicBlock &MBB) {
1307 // ChainLeader maps MI inside a spill-reload chain to its innermost reload COPY.
1308 // Thus we can track if a MI belongs to an existing spill-reload chain.
1309 DenseMap<MachineInstr *, MachineInstr *> ChainLeader;
1310 // SpillChain maps innermost reload COPY of a spill-reload chain to a sequence
1311 // of COPYs that forms spills of a spill-reload chain.
1312 // ReloadChain maps innermost reload COPY of a spill-reload chain to a
1313 // sequence of COPYs that forms reloads of a spill-reload chain.
1314 DenseMap<MachineInstr *, SmallVector<MachineInstr *>> SpillChain, ReloadChain;
1315 // If a COPY's Source has use or def until next COPY defines the Source,
1316 // we put the COPY in this set to keep property#2.
1317 DenseSet<const MachineInstr *> CopySourceInvalid;
1318
1319 auto TryFoldSpillageCopies =
1320 [&, this](const SmallVectorImpl<MachineInstr *> &SC,
1321 const SmallVectorImpl<MachineInstr *> &RC) {
1322 assert(SC.size() == RC.size() && "Spill-reload should be paired");
1323
1324 // We need at least 3 pairs of copies for the transformation to apply,
1325 // because the first outermost pair cannot be removed since we don't
1326 // recolor outside of the chain and that we need at least one temporary
1327 // spill slot to shorten the chain. If we only have a chain of two
1328 // pairs, we already have the shortest sequence this code can handle:
1329 // the outermost pair for the temporary spill slot, and the pair that
1330 // use that temporary spill slot for the other end of the chain.
1331 // TODO: We might be able to simplify to one spill-reload pair if collecting
1332 // more infomation about the outermost COPY.
1333 if (SC.size() <= 2)
1334 return;
1335
1336 // If violate property#2, we don't fold the chain.
1337 for (const MachineInstr *Spill : drop_begin(SC))
1338 if (CopySourceInvalid.count(Spill))
1339 return;
1340
1341 for (const MachineInstr *Reload : drop_end(RC))
1342 if (CopySourceInvalid.count(Reload))
1343 return;
1344
1345 auto CheckCopyConstraint = [this](Register Dst, Register Src) {
1346 return TRI->getCommonMinimalPhysRegClass(Dst, Src);
1347 };
1348
1349 auto UpdateReg = [](MachineInstr *MI, const MachineOperand *Old,
1350 const MachineOperand *New) {
1351 for (MachineOperand &MO : MI->operands()) {
1352 if (&MO == Old)
1353 MO.setReg(New->getReg());
1354 }
1355 };
1356
1357 DestSourcePair InnerMostSpillCopy =
1358 *isCopyInstr(*SC[0], *TII, UseCopyInstr);
1359 DestSourcePair OuterMostSpillCopy =
1360 *isCopyInstr(*SC.back(), *TII, UseCopyInstr);
1361 DestSourcePair InnerMostReloadCopy =
1362 *isCopyInstr(*RC[0], *TII, UseCopyInstr);
1363 DestSourcePair OuterMostReloadCopy =
1364 *isCopyInstr(*RC.back(), *TII, UseCopyInstr);
1365 if (!CheckCopyConstraint(getSrcMCReg(OuterMostSpillCopy),
1366 getSrcMCReg(InnerMostSpillCopy)) ||
1367 !CheckCopyConstraint(getDstMCReg(InnerMostReloadCopy),
1368 getDstMCReg(OuterMostReloadCopy)))
1369 return;
1370
1371 SpillageChainsLength += SC.size() + RC.size();
1372 NumSpillageChains += 1;
1373 UpdateReg(SC[0], InnerMostSpillCopy.Destination,
1374 OuterMostSpillCopy.Source);
1375 UpdateReg(RC[0], InnerMostReloadCopy.Source,
1376 OuterMostReloadCopy.Destination);
1377
1378 for (size_t I = 1; I < SC.size() - 1; ++I) {
1379 SC[I]->eraseFromParent();
1380 RC[I]->eraseFromParent();
1381 NumDeletes += 2;
1382 }
1383 };
1384
1385 auto GetFoldableCopy =
1386 [this](const MachineInstr &MaybeCopy) -> std::optional<DestSourcePair> {
1387 if (MaybeCopy.getNumImplicitOperands() > 0)
1388 return std::nullopt;
1389 std::optional<DestSourcePair> CopyOperands =
1390 isCopyInstr(MaybeCopy, *TII, UseCopyInstr);
1391 if (!CopyOperands)
1392 return std::nullopt;
1393 auto [Dst, Src] = getDstSrcMCRegs(*CopyOperands);
1394 if (Src && Dst && !TRI->regsOverlap(Src, Dst) &&
1395 CopyOperands->Source->isRenamable() &&
1396 CopyOperands->Destination->isRenamable())
1397 return CopyOperands;
1398
1399 return std::nullopt;
1400 };
1401
1402 auto IsSpillReloadPair = [&](const MachineInstr &Spill,
1403 const MachineInstr &Reload) {
1404 std::optional<DestSourcePair> FoldableSpillCopy = GetFoldableCopy(Spill);
1405 if (!FoldableSpillCopy)
1406 return false;
1407 std::optional<DestSourcePair> FoldableReloadCopy = GetFoldableCopy(Reload);
1408 if (!FoldableReloadCopy)
1409 return false;
1410 return FoldableSpillCopy->Source->getReg() ==
1411 FoldableReloadCopy->Destination->getReg() &&
1412 FoldableSpillCopy->Destination->getReg() ==
1413 FoldableReloadCopy->Source->getReg();
1414 };
1415
1416 auto IsChainedCopy = [&](const MachineInstr &Prev,
1417 const MachineInstr &Current) {
1418 std::optional<DestSourcePair> FoldablePrevCopy = GetFoldableCopy(Prev);
1419 if (!FoldablePrevCopy)
1420 return false;
1421 std::optional<DestSourcePair> FoldableCurrentCopy =
1422 GetFoldableCopy(Current);
1423 if (!FoldableCurrentCopy)
1424 return false;
1425 return FoldablePrevCopy->Source->getReg() ==
1426 FoldableCurrentCopy->Destination->getReg();
1427 };
1428
1429 for (MachineInstr &MI : llvm::make_early_inc_range(MBB)) {
1430 std::optional<DestSourcePair> CopyOperands =
1431 isCopyInstr(MI, *TII, UseCopyInstr);
1432
1433 // Update track information via non-copy instruction.
1434 SmallSet<Register, 8> RegsToClobber;
1435 if (!CopyOperands) {
1436 for (const MachineOperand &MO : MI.operands()) {
1437 if (MO.isRegMask()) {
1438 BitVector &PreservedRegUnits = Tracker.getPreservedRegUnits(MO, *TRI);
1439 Tracker.clobberNonPreservedRegs(PreservedRegUnits, *TRI, *TII);
1440 continue;
1441 }
1442 if (!MO.isReg())
1443 continue;
1444 Register Reg = MO.getReg();
1445 if (!Reg)
1446 continue;
1447 MachineInstr *LastUseCopy =
1448 Tracker.findLastSeenUseInCopy(Reg.asMCReg(), *TRI);
1449 if (LastUseCopy) {
1450 LLVM_DEBUG(dbgs() << "MCP: Copy source of\n");
1451 LLVM_DEBUG(LastUseCopy->dump());
1452 LLVM_DEBUG(dbgs() << "might be invalidated by\n");
1453 LLVM_DEBUG(MI.dump());
1454 CopySourceInvalid.insert(LastUseCopy);
1455 }
1456 // Must be noted Tracker.clobberRegister(Reg, ...) removes tracking of
1457 // Reg, i.e, COPY that defines Reg is removed from the mapping as well
1458 // as marking COPYs that uses Reg unavailable.
1459 // We don't invoke CopyTracker::clobberRegister(Reg, ...) if Reg is not
1460 // defined by a previous COPY, since we don't want to make COPYs uses
1461 // Reg unavailable.
1462 if (Tracker.findLastSeenDefInCopy(MI, Reg.asMCReg(), *TRI, *TII,
1463 UseCopyInstr))
1464 // Thus we can keep the property#1.
1465 RegsToClobber.insert(Reg);
1466 }
1467 for (Register Reg : RegsToClobber) {
1468 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
1469 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Reg, TRI)
1470 << "\n");
1471 }
1472 continue;
1473 }
1474
1475 auto [Dst, Src] = getDstSrcMCRegs(*CopyOperands);
1476 // Check if we can find a pair spill-reload copy.
1477 LLVM_DEBUG(dbgs() << "MCP: Searching paired spill for reload: ");
1478 LLVM_DEBUG(MI.dump());
1479 MachineInstr *MaybeSpill =
1480 Tracker.findLastSeenDefInCopy(MI, Src, *TRI, *TII, UseCopyInstr);
1481 bool MaybeSpillIsChained = ChainLeader.count(MaybeSpill);
1482 if (!MaybeSpillIsChained && MaybeSpill &&
1483 IsSpillReloadPair(*MaybeSpill, MI)) {
1484 // Check if we already have an existing chain. Now we have a
1485 // spill-reload pair.
1486 // L2: r2 = COPY r3
1487 // L5: r3 = COPY r2
1488 // Looking for a valid COPY before L5 which uses r3.
1489 // This can be serverial cases.
1490 // Case #1:
1491 // No COPY is found, which can be r3 is def-use between (L2, L5), we
1492 // create a new chain for L2 and L5.
1493 // Case #2:
1494 // L2: r2 = COPY r3
1495 // L5: r3 = COPY r2
1496 // Such COPY is found and is L2, we create a new chain for L2 and L5.
1497 // Case #3:
1498 // L2: r2 = COPY r3
1499 // L3: r1 = COPY r3
1500 // L5: r3 = COPY r2
1501 // we create a new chain for L2 and L5.
1502 // Case #4:
1503 // L2: r2 = COPY r3
1504 // L3: r1 = COPY r3
1505 // L4: r3 = COPY r1
1506 // L5: r3 = COPY r2
1507 // Such COPY won't be found since L4 defines r3. we create a new chain
1508 // for L2 and L5.
1509 // Case #5:
1510 // L2: r2 = COPY r3
1511 // L3: r3 = COPY r1
1512 // L4: r1 = COPY r3
1513 // L5: r3 = COPY r2
1514 // COPY is found and is L4 which belongs to an existing chain, we add
1515 // L2 and L5 to this chain.
1516 LLVM_DEBUG(dbgs() << "MCP: Found spill: ");
1517 LLVM_DEBUG(MaybeSpill->dump());
1518 MachineInstr *MaybePrevReload = Tracker.findLastSeenUseInCopy(Dst, *TRI);
1519 auto Leader = ChainLeader.find(MaybePrevReload);
1520 MachineInstr *L = nullptr;
1521 if (Leader == ChainLeader.end() ||
1522 (MaybePrevReload && !IsChainedCopy(*MaybePrevReload, MI))) {
1523 L = &MI;
1524 assert(!SpillChain.count(L) &&
1525 "SpillChain should not have contained newly found chain");
1526 } else {
1527 assert(MaybePrevReload &&
1528 "Found a valid leader through nullptr should not happend");
1529 L = Leader->second;
1530 assert(SpillChain[L].size() > 0 &&
1531 "Existing chain's length should be larger than zero");
1532 }
1533 assert(!ChainLeader.count(&MI) && !ChainLeader.count(MaybeSpill) &&
1534 "Newly found paired spill-reload should not belong to any chain "
1535 "at this point");
1536 ChainLeader.insert({MaybeSpill, L});
1537 ChainLeader.insert({&MI, L});
1538 SpillChain[L].push_back(MaybeSpill);
1539 ReloadChain[L].push_back(&MI);
1540 LLVM_DEBUG(dbgs() << "MCP: Chain " << L << " now is:\n");
1541 LLVM_DEBUG(printSpillReloadChain(SpillChain, ReloadChain, L));
1542 } else if (MaybeSpill && !MaybeSpillIsChained) {
1543 // MaybeSpill is unable to pair with MI. That's to say adding MI makes
1544 // the chain invalid.
1545 // The COPY defines Src is no longer considered as a candidate of a
1546 // valid chain. Since we expect the Dst of a spill copy isn't used by
1547 // any COPY instruction until a reload copy. For example:
1548 // L1: r1 = COPY r2
1549 // L2: r3 = COPY r1
1550 // If we later have
1551 // L1: r1 = COPY r2
1552 // L2: r3 = COPY r1
1553 // L3: r2 = COPY r1
1554 // L1 and L3 can't be a valid spill-reload pair.
1555 // Thus we keep the property#1.
1556 LLVM_DEBUG(dbgs() << "MCP: Not paired spill-reload:\n");
1557 LLVM_DEBUG(MaybeSpill->dump());
1558 LLVM_DEBUG(MI.dump());
1559 Tracker.clobberRegister(Src, *TRI, *TII, UseCopyInstr);
1560 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Src, TRI)
1561 << "\n");
1562 }
1563 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1564 }
1565
1566 for (auto I = SpillChain.begin(), E = SpillChain.end(); I != E; ++I) {
1567 auto &SC = I->second;
1568 assert(ReloadChain.count(I->first) &&
1569 "Reload chain of the same leader should exist");
1570 auto &RC = ReloadChain[I->first];
1571 TryFoldSpillageCopies(SC, RC);
1572 }
1573
1574 MaybeDeadCopies.clear();
1575 CopyDbgUsers.clear();
1576 Tracker.clear();
1577}
1578
1579bool MachineCopyPropagationLegacy::runOnMachineFunction(MachineFunction &MF) {
1580 if (skipFunction(MF.getFunction()))
1581 return false;
1582
1583 return MachineCopyPropagation(UseCopyInstr).run(MF);
1584}
1585
1586PreservedAnalyses
1589 MFPropsModifier _(*this, MF);
1590 if (!MachineCopyPropagation(UseCopyInstr).run(MF))
1591 return PreservedAnalyses::all();
1593 PA.preserveSet<CFGAnalyses>();
1594 return PA;
1595}
1596
1597bool MachineCopyPropagation::run(MachineFunction &MF) {
1598 bool IsSpillageCopyElimEnabled = false;
1600 case cl::BOU_UNSET:
1601 IsSpillageCopyElimEnabled =
1603 break;
1604 case cl::BOU_TRUE:
1605 IsSpillageCopyElimEnabled = true;
1606 break;
1607 case cl::BOU_FALSE:
1608 IsSpillageCopyElimEnabled = false;
1609 break;
1610 }
1611
1612 Changed = false;
1613
1615 TII = MF.getSubtarget().getInstrInfo();
1616 MRI = &MF.getRegInfo();
1617
1618 for (MachineBasicBlock &MBB : MF) {
1619 if (IsSpillageCopyElimEnabled)
1620 eliminateSpillageCopies(MBB);
1621 backwardCopyPropagateBlock(MBB);
1622 forwardCopyPropagateBlock(MBB);
1623 }
1624
1625 return Changed;
1626}
1627
1628MachineFunctionPass *
1629llvm::createMachineCopyPropagationPass(bool UseCopyInstr = false) {
1630 return new MachineCopyPropagationLegacy(UseCopyInstr);
1631}
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock & MBB
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This file provides an implementation of debug counters.
#define DEBUG_COUNTER(VARNAME, COUNTERNAME, DESC)
This file defines the DenseMap class.
#define DEBUG_TYPE
const HexagonInstrInfo * TII
#define _
IRTranslator LLVM IR MI
#define I(x, y, z)
Definition MD5.cpp:57
static cl::opt< cl::boolOrDefault > EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden)
static void printSpillReloadChain(DenseMap< MachineInstr *, SmallVector< MachineInstr * > > &SpillChain, DenseMap< MachineInstr *, SmallVector< MachineInstr * > > &ReloadChain, MachineInstr *Leader)
static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src, MCRegister Dst, const TargetRegisterInfo *TRI, const TargetInstrInfo *TII, bool UseCopyInstr)
Return true if PreviousCopy did copy register Src to register Dst.
static cl::opt< bool > MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false), cl::Hidden)
Register Reg
Register const TargetRegisterInfo * TRI
Promote Memory to Register
Definition Mem2Reg.cpp:110
MachineInstr unsigned OpIdx
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
Definition PassSupport.h:56
This file contains some templates that are useful if you are working with the STL at all.
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Definition Statistic.h:171
#define LLVM_DEBUG(...)
Definition Debug.h:114
LLVM_ABI void setPreservesCFG()
This function should be called by the pass, iff they do not:
Definition Pass.cpp:270
bool test(unsigned Idx) const
Definition BitVector.h:480
void resize(unsigned N, bool t=false)
resize - Grow or shrink the bitvector.
Definition BitVector.h:360
BitVector & set()
Definition BitVector.h:370
Represents analyses that only rely on functions' control flow.
Definition Analysis.h:73
static bool shouldExecute(CounterInfo &Counter)
iterator find(const_arg_type_t< KeyT > Val)
Definition DenseMap.h:178
iterator begin()
Definition DenseMap.h:78
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
Definition DenseMap.h:174
iterator end()
Definition DenseMap.h:81
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition DenseMap.h:241
Wrapper class representing physical registers. Should be passed by value.
Definition MCRegister.h:41
An RAII based helper class to modify MachineFunctionProperties when running pass.
iterator_range< succ_iterator > successors()
LLVM_ABI StringRef getName() const
Return the name of the corresponding LLVM basic block, or an empty string.
PreservedAnalyses run(MachineFunction &MF, MachineFunctionAnalysisManager &MFAM)
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
Representation of each machine instruction.
LLVM_ABI void dump() const
const MachineOperand & getOperand(unsigned i) const
LLVM_ABI MachineInstrBundleIterator< MachineInstr > eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
LLVM_ABI const TargetRegisterClass * getRegClassConstraint(unsigned OpIdx, const TargetInstrInfo *TII, const TargetRegisterInfo *TRI) const
Compute the static register class constraint for operand OpIdx.
MachineOperand class - Representation of each machine instruction operand.
LLVM_ABI unsigned getOperandNo() const
Returns the index of this operand in the instruction that it belongs to.
LLVM_ABI void setIsRenamable(bool Val=true)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
LLVM_ABI void setReg(Register Reg)
Change the register this operand corresponds to.
LLVM_ABI bool isRenamable() const
isRenamable - Returns true if this register may be renamed, i.e.
void setIsUndef(bool Val=true)
Register getReg() const
getReg - Returns the register number.
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
bool tracksLiveness() const
tracksLiveness - Returns true when tracking register liveness accurately.
bool isReserved(MCRegister PhysReg) const
isReserved - Returns true when PhysReg is a reserved register.
LLVM_ABI bool isConstantPhysReg(MCRegister PhysReg) const
Returns true if PhysReg is unallocatable and constant throughout the function.
void updateDbgUsersToReg(MCRegister OldReg, MCRegister NewReg, ArrayRef< MachineInstr * > Users) const
updateDbgUsersToReg - Update a collection of debug instructions to refer to the designated register.
void dump() const
Definition Pass.cpp:146
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition Analysis.h:118
Wrapper class representing virtual and physical registers.
Definition Register.h:20
MCRegister asMCReg() const
Utility to check-convert this value to a MCRegister.
Definition Register.h:107
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
Definition Register.h:83
void insert_range(Range &&R)
Definition SmallSet.h:196
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
Definition SmallSet.h:184
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
TargetInstrInfo - Interface to description of machine instruction set.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
virtual bool enableSpillageCopyElimination() const
Enable spillage copy elimination in MachineCopyPropagation pass.
virtual const TargetInstrInfo * getInstrInfo() const
virtual const TargetRegisterInfo * getRegisterInfo() const =0
Return the target's register information.
std::pair< iterator, bool > insert(const ValueT &V)
Definition DenseSet.h:202
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
Definition DenseSet.h:180
reverse_self_iterator getReverseIterator()
Definition ilist_node.h:126
self_iterator getIterator()
Definition ilist_node.h:123
Changed
This provides a very simple, boring adaptor for a begin and end iterator into a range type.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
initializer< Ty > init(const Ty &Val)
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
Value * readRegister(IRBuilder<> &IRB, StringRef Name)
NodeAddr< UseNode * > Use
Definition RDFGraph.h:385
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
Definition STLExtras.h:316
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
Definition STLExtras.h:1669
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
Definition STLExtras.h:634
AnalysisManager< MachineFunction > MachineFunctionAnalysisManager
LLVM_ABI PreservedAnalyses getMachineFunctionPassPreservedAnalyses()
Returns the minimum set of Analyses that all machine function passes must preserve.
auto reverse(ContainerTy &&C)
Definition STLExtras.h:408
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition Debug.cpp:207
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
auto drop_end(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the last N elements excluded.
Definition STLExtras.h:323
ArrayRef(const T &OneElt) -> ArrayRef< T >
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
Definition STLExtras.h:1947
LLVM_ABI MachineFunctionPass * createMachineCopyPropagationPass(bool UseCopyInstr)
LLVM_ABI Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.
LLVM_ABI char & MachineCopyPropagationID
MachineCopyPropagation - This pass performs copy propagation on machine instructions.
const MachineOperand * Source
const MachineOperand * Destination