LLVM 17.0.0git
MachineCopyPropagation.cpp
Go to the documentation of this file.
1//===- MachineCopyPropagation.cpp - Machine Copy Propagation Pass ---------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This is an extremely simple MachineInstr-level copy propagation pass.
10//
11// This pass forwards the source of COPYs to the users of their destinations
12// when doing so is legal. For example:
13//
14// %reg1 = COPY %reg0
15// ...
16// ... = OP %reg1
17//
18// If
19// - %reg0 has not been clobbered by the time of the use of %reg1
20// - the register class constraints are satisfied
21// - the COPY def is the only value that reaches OP
22// then this pass replaces the above with:
23//
24// %reg1 = COPY %reg0
25// ...
26// ... = OP %reg0
27//
28// This pass also removes some redundant COPYs. For example:
29//
30// %R1 = COPY %R0
31// ... // No clobber of %R1
32// %R0 = COPY %R1 <<< Removed
33//
34// or
35//
36// %R1 = COPY %R0
37// ... // No clobber of %R0
38// %R1 = COPY %R0 <<< Removed
39//
40// or
41//
42// $R0 = OP ...
43// ... // No read/clobber of $R0 and $R1
44// $R1 = COPY $R0 // $R0 is killed
45// Replace $R0 with $R1 and remove the COPY
46// $R1 = OP ...
47// ...
48//
49//===----------------------------------------------------------------------===//
50
51#include "llvm/ADT/DenseMap.h"
52#include "llvm/ADT/STLExtras.h"
53#include "llvm/ADT/SetVector.h"
54#include "llvm/ADT/SmallSet.h"
56#include "llvm/ADT/Statistic.h"
69#include "llvm/Pass.h"
70#include "llvm/Support/Debug.h"
73#include <cassert>
74#include <iterator>
75
76using namespace llvm;
77
78#define DEBUG_TYPE "machine-cp"
79
80STATISTIC(NumDeletes, "Number of dead copies deleted");
81STATISTIC(NumCopyForwards, "Number of copy uses forwarded");
82STATISTIC(NumCopyBackwardPropagated, "Number of copy defs backward propagated");
83STATISTIC(SpillageChainsLength, "Length of spillage chains");
84STATISTIC(NumSpillageChains, "Number of spillage chains");
85DEBUG_COUNTER(FwdCounter, "machine-cp-fwd",
86 "Controls which register COPYs are forwarded");
87
88static cl::opt<bool> MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false),
91 EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden);
92
93namespace {
94
95static std::optional<DestSourcePair> isCopyInstr(const MachineInstr &MI,
96 const TargetInstrInfo &TII,
97 bool UseCopyInstr) {
98 if (UseCopyInstr)
99 return TII.isCopyInstr(MI);
100
101 if (MI.isCopy())
102 return std::optional<DestSourcePair>(
103 DestSourcePair{MI.getOperand(0), MI.getOperand(1)});
104
105 return std::nullopt;
106}
107
108class CopyTracker {
109 struct CopyInfo {
110 MachineInstr *MI, *LastSeenUseInCopy;
112 bool Avail;
113 };
114
116
117public:
118 /// Mark all of the given registers and their subregisters as unavailable for
119 /// copying.
120 void markRegsUnavailable(ArrayRef<MCRegister> Regs,
121 const TargetRegisterInfo &TRI) {
122 for (MCRegister Reg : Regs) {
123 // Source of copy is no longer available for propagation.
124 for (MCRegUnitIterator RUI(Reg, &TRI); RUI.isValid(); ++RUI) {
125 auto CI = Copies.find(*RUI);
126 if (CI != Copies.end())
127 CI->second.Avail = false;
128 }
129 }
130 }
131
132 /// Remove register from copy maps.
133 void invalidateRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
134 const TargetInstrInfo &TII, bool UseCopyInstr) {
135 // Since Reg might be a subreg of some registers, only invalidate Reg is not
136 // enough. We have to find the COPY defines Reg or registers defined by Reg
137 // and invalidate all of them.
138 SmallSet<MCRegister, 8> RegsToInvalidate;
139 RegsToInvalidate.insert(Reg);
140 for (MCRegUnitIterator RUI(Reg, &TRI); RUI.isValid(); ++RUI) {
141 auto I = Copies.find(*RUI);
142 if (I != Copies.end()) {
143 if (MachineInstr *MI = I->second.MI) {
144 std::optional<DestSourcePair> CopyOperands =
145 isCopyInstr(*MI, TII, UseCopyInstr);
146 assert(CopyOperands && "Expect copy");
147
148 RegsToInvalidate.insert(
149 CopyOperands->Destination->getReg().asMCReg());
150 RegsToInvalidate.insert(CopyOperands->Source->getReg().asMCReg());
151 }
152 RegsToInvalidate.insert(I->second.DefRegs.begin(),
153 I->second.DefRegs.end());
154 }
155 }
156 for (MCRegister InvalidReg : RegsToInvalidate)
157 for (MCRegUnitIterator RUI(InvalidReg, &TRI); RUI.isValid(); ++RUI)
158 Copies.erase(*RUI);
159 }
160
161 /// Clobber a single register, removing it from the tracker's copy maps.
162 void clobberRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
163 const TargetInstrInfo &TII, bool UseCopyInstr) {
164 for (MCRegUnitIterator RUI(Reg, &TRI); RUI.isValid(); ++RUI) {
165 auto I = Copies.find(*RUI);
166 if (I != Copies.end()) {
167 // When we clobber the source of a copy, we need to clobber everything
168 // it defined.
169 markRegsUnavailable(I->second.DefRegs, TRI);
170 // When we clobber the destination of a copy, we need to clobber the
171 // whole register it defined.
172 if (MachineInstr *MI = I->second.MI) {
173 std::optional<DestSourcePair> CopyOperands =
174 isCopyInstr(*MI, TII, UseCopyInstr);
175 markRegsUnavailable({CopyOperands->Destination->getReg().asMCReg()},
176 TRI);
177 }
178 // Now we can erase the copy.
179 Copies.erase(I);
180 }
181 }
182 }
183
184 /// Add this copy's registers into the tracker's copy maps.
185 void trackCopy(MachineInstr *MI, const TargetRegisterInfo &TRI,
186 const TargetInstrInfo &TII, bool UseCopyInstr) {
187 std::optional<DestSourcePair> CopyOperands =
188 isCopyInstr(*MI, TII, UseCopyInstr);
189 assert(CopyOperands && "Tracking non-copy?");
190
191 MCRegister Src = CopyOperands->Source->getReg().asMCReg();
192 MCRegister Def = CopyOperands->Destination->getReg().asMCReg();
193
194 // Remember Def is defined by the copy.
195 for (MCRegUnitIterator RUI(Def, &TRI); RUI.isValid(); ++RUI)
196 Copies[*RUI] = {MI, nullptr, {}, true};
197
198 // Remember source that's copied to Def. Once it's clobbered, then
199 // it's no longer available for copy propagation.
200 for (MCRegUnitIterator RUI(Src, &TRI); RUI.isValid(); ++RUI) {
201 auto I = Copies.insert({*RUI, {nullptr, nullptr, {}, false}});
202 auto &Copy = I.first->second;
203 if (!is_contained(Copy.DefRegs, Def))
204 Copy.DefRegs.push_back(Def);
205 Copy.LastSeenUseInCopy = MI;
206 }
207 }
208
209 bool hasAnyCopies() {
210 return !Copies.empty();
211 }
212
213 MachineInstr *findCopyForUnit(MCRegister RegUnit,
214 const TargetRegisterInfo &TRI,
215 bool MustBeAvailable = false) {
216 auto CI = Copies.find(RegUnit);
217 if (CI == Copies.end())
218 return nullptr;
219 if (MustBeAvailable && !CI->second.Avail)
220 return nullptr;
221 return CI->second.MI;
222 }
223
224 MachineInstr *findCopyDefViaUnit(MCRegister RegUnit,
225 const TargetRegisterInfo &TRI) {
226 auto CI = Copies.find(RegUnit);
227 if (CI == Copies.end())
228 return nullptr;
229 if (CI->second.DefRegs.size() != 1)
230 return nullptr;
231 MCRegUnitIterator RUI(CI->second.DefRegs[0], &TRI);
232 return findCopyForUnit(*RUI, TRI, true);
233 }
234
235 MachineInstr *findAvailBackwardCopy(MachineInstr &I, MCRegister Reg,
236 const TargetRegisterInfo &TRI,
237 const TargetInstrInfo &TII,
238 bool UseCopyInstr) {
239 MCRegUnitIterator RUI(Reg, &TRI);
240 MachineInstr *AvailCopy = findCopyDefViaUnit(*RUI, TRI);
241
242 if (!AvailCopy)
243 return nullptr;
244
245 std::optional<DestSourcePair> CopyOperands =
246 isCopyInstr(*AvailCopy, TII, UseCopyInstr);
247 Register AvailSrc = CopyOperands->Source->getReg();
248 Register AvailDef = CopyOperands->Destination->getReg();
249 if (!TRI.isSubRegisterEq(AvailSrc, Reg))
250 return nullptr;
251
252 for (const MachineInstr &MI :
253 make_range(AvailCopy->getReverseIterator(), I.getReverseIterator()))
254 for (const MachineOperand &MO : MI.operands())
255 if (MO.isRegMask())
256 // FIXME: Shall we simultaneously invalidate AvailSrc or AvailDef?
257 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef))
258 return nullptr;
259
260 return AvailCopy;
261 }
262
263 MachineInstr *findAvailCopy(MachineInstr &DestCopy, MCRegister Reg,
264 const TargetRegisterInfo &TRI,
265 const TargetInstrInfo &TII, bool UseCopyInstr) {
266 // We check the first RegUnit here, since we'll only be interested in the
267 // copy if it copies the entire register anyway.
268 MCRegUnitIterator RUI(Reg, &TRI);
269 MachineInstr *AvailCopy =
270 findCopyForUnit(*RUI, TRI, /*MustBeAvailable=*/true);
271
272 if (!AvailCopy)
273 return nullptr;
274
275 std::optional<DestSourcePair> CopyOperands =
276 isCopyInstr(*AvailCopy, TII, UseCopyInstr);
277 Register AvailSrc = CopyOperands->Source->getReg();
278 Register AvailDef = CopyOperands->Destination->getReg();
279 if (!TRI.isSubRegisterEq(AvailDef, Reg))
280 return nullptr;
281
282 // Check that the available copy isn't clobbered by any regmasks between
283 // itself and the destination.
284 for (const MachineInstr &MI :
285 make_range(AvailCopy->getIterator(), DestCopy.getIterator()))
286 for (const MachineOperand &MO : MI.operands())
287 if (MO.isRegMask())
288 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef))
289 return nullptr;
290
291 return AvailCopy;
292 }
293
294 // Find last COPY that defines Reg before Current MachineInstr.
295 MachineInstr *findLastSeenDefInCopy(const MachineInstr &Current,
296 MCRegister Reg,
297 const TargetRegisterInfo &TRI,
298 const TargetInstrInfo &TII,
299 bool UseCopyInstr) {
300 MCRegUnitIterator RUI(Reg, &TRI);
301 auto CI = Copies.find(*RUI);
302 if (CI == Copies.end() || !CI->second.Avail)
303 return nullptr;
304
305 MachineInstr *DefCopy = CI->second.MI;
306 std::optional<DestSourcePair> CopyOperands =
307 isCopyInstr(*DefCopy, TII, UseCopyInstr);
308 Register Def = CopyOperands->Destination->getReg();
309 if (!TRI.isSubRegisterEq(Def, Reg))
310 return nullptr;
311
312 for (const MachineInstr &MI :
313 make_range(static_cast<const MachineInstr *>(DefCopy)->getIterator(),
314 Current.getIterator()))
315 for (const MachineOperand &MO : MI.operands())
316 if (MO.isRegMask())
317 if (MO.clobbersPhysReg(Def)) {
318 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of "
319 << printReg(Def, &TRI) << "\n");
320 return nullptr;
321 }
322
323 return DefCopy;
324 }
325
326 // Find last COPY that uses Reg.
327 MachineInstr *findLastSeenUseInCopy(MCRegister Reg,
328 const TargetRegisterInfo &TRI) {
329 MCRegUnitIterator RUI(Reg, &TRI);
330 auto CI = Copies.find(*RUI);
331 if (CI == Copies.end())
332 return nullptr;
333 return CI->second.LastSeenUseInCopy;
334 }
335
336 void clear() {
337 Copies.clear();
338 }
339};
340
341class MachineCopyPropagation : public MachineFunctionPass {
342 const TargetRegisterInfo *TRI;
343 const TargetInstrInfo *TII;
345
346 // Return true if this is a copy instruction and false otherwise.
347 bool UseCopyInstr;
348
349public:
350 static char ID; // Pass identification, replacement for typeid
351
352 MachineCopyPropagation(bool CopyInstr = false)
353 : MachineFunctionPass(ID), UseCopyInstr(CopyInstr || MCPUseCopyInstr) {
355 }
356
357 void getAnalysisUsage(AnalysisUsage &AU) const override {
358 AU.setPreservesCFG();
360 }
361
362 bool runOnMachineFunction(MachineFunction &MF) override;
363
366 MachineFunctionProperties::Property::NoVRegs);
367 }
368
369private:
370 typedef enum { DebugUse = false, RegularUse = true } DebugType;
371
372 void ReadRegister(MCRegister Reg, MachineInstr &Reader, DebugType DT);
373 void ForwardCopyPropagateBlock(MachineBasicBlock &MBB);
374 void BackwardCopyPropagateBlock(MachineBasicBlock &MBB);
375 void EliminateSpillageCopies(MachineBasicBlock &MBB);
376 bool eraseIfRedundant(MachineInstr &Copy, MCRegister Src, MCRegister Def);
377 void forwardUses(MachineInstr &MI);
378 void propagateDefs(MachineInstr &MI);
379 bool isForwardableRegClassCopy(const MachineInstr &Copy,
380 const MachineInstr &UseI, unsigned UseIdx);
381 bool isBackwardPropagatableRegClassCopy(const MachineInstr &Copy,
382 const MachineInstr &UseI,
383 unsigned UseIdx);
384 bool hasImplicitOverlap(const MachineInstr &MI, const MachineOperand &Use);
385 bool hasOverlappingMultipleDef(const MachineInstr &MI,
386 const MachineOperand &MODef, Register Def);
387
388 /// Candidates for deletion.
389 SmallSetVector<MachineInstr *, 8> MaybeDeadCopies;
390
391 /// Multimap tracking debug users in current BB
393
394 CopyTracker Tracker;
395
396 bool Changed;
397};
398
399} // end anonymous namespace
400
401char MachineCopyPropagation::ID = 0;
402
403char &llvm::MachineCopyPropagationID = MachineCopyPropagation::ID;
404
405INITIALIZE_PASS(MachineCopyPropagation, DEBUG_TYPE,
406 "Machine Copy Propagation Pass", false, false)
407
408void MachineCopyPropagation::ReadRegister(MCRegister Reg, MachineInstr &Reader,
409 DebugType DT) {
410 // If 'Reg' is defined by a copy, the copy is no longer a candidate
411 // for elimination. If a copy is "read" by a debug user, record the user
412 // for propagation.
413 for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI) {
414 if (MachineInstr *Copy = Tracker.findCopyForUnit(*RUI, *TRI)) {
415 if (DT == RegularUse) {
416 LLVM_DEBUG(dbgs() << "MCP: Copy is used - not dead: "; Copy->dump());
417 MaybeDeadCopies.remove(Copy);
418 } else {
419 CopyDbgUsers[Copy].insert(&Reader);
420 }
421 }
422 }
423}
424
425/// Return true if \p PreviousCopy did copy register \p Src to register \p Def.
426/// This fact may have been obscured by sub register usage or may not be true at
427/// all even though Src and Def are subregisters of the registers used in
428/// PreviousCopy. e.g.
429/// isNopCopy("ecx = COPY eax", AX, CX) == true
430/// isNopCopy("ecx = COPY eax", AH, CL) == false
431static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src,
433 const TargetInstrInfo *TII, bool UseCopyInstr) {
434
435 std::optional<DestSourcePair> CopyOperands =
436 isCopyInstr(PreviousCopy, *TII, UseCopyInstr);
437 MCRegister PreviousSrc = CopyOperands->Source->getReg().asMCReg();
438 MCRegister PreviousDef = CopyOperands->Destination->getReg().asMCReg();
439 if (Src == PreviousSrc && Def == PreviousDef)
440 return true;
441 if (!TRI->isSubRegister(PreviousSrc, Src))
442 return false;
443 unsigned SubIdx = TRI->getSubRegIndex(PreviousSrc, Src);
444 return SubIdx == TRI->getSubRegIndex(PreviousDef, Def);
445}
446
447/// Remove instruction \p Copy if there exists a previous copy that copies the
448/// register \p Src to the register \p Def; This may happen indirectly by
449/// copying the super registers.
450bool MachineCopyPropagation::eraseIfRedundant(MachineInstr &Copy,
451 MCRegister Src, MCRegister Def) {
452 // Avoid eliminating a copy from/to a reserved registers as we cannot predict
453 // the value (Example: The sparc zero register is writable but stays zero).
454 if (MRI->isReserved(Src) || MRI->isReserved(Def))
455 return false;
456
457 // Search for an existing copy.
458 MachineInstr *PrevCopy =
459 Tracker.findAvailCopy(Copy, Def, *TRI, *TII, UseCopyInstr);
460 if (!PrevCopy)
461 return false;
462
463 auto PrevCopyOperands = isCopyInstr(*PrevCopy, *TII, UseCopyInstr);
464 // Check that the existing copy uses the correct sub registers.
465 if (PrevCopyOperands->Destination->isDead())
466 return false;
467 if (!isNopCopy(*PrevCopy, Src, Def, TRI, TII, UseCopyInstr))
468 return false;
469
470 LLVM_DEBUG(dbgs() << "MCP: copy is a NOP, removing: "; Copy.dump());
471
472 // Copy was redundantly redefining either Src or Def. Remove earlier kill
473 // flags between Copy and PrevCopy because the value will be reused now.
474 std::optional<DestSourcePair> CopyOperands =
475 isCopyInstr(Copy, *TII, UseCopyInstr);
476 assert(CopyOperands);
477
478 Register CopyDef = CopyOperands->Destination->getReg();
479 assert(CopyDef == Src || CopyDef == Def);
480 for (MachineInstr &MI :
481 make_range(PrevCopy->getIterator(), Copy.getIterator()))
482 MI.clearRegisterKills(CopyDef, TRI);
483
484 Copy.eraseFromParent();
485 Changed = true;
486 ++NumDeletes;
487 return true;
488}
489
490bool MachineCopyPropagation::isBackwardPropagatableRegClassCopy(
491 const MachineInstr &Copy, const MachineInstr &UseI, unsigned UseIdx) {
492 std::optional<DestSourcePair> CopyOperands =
493 isCopyInstr(Copy, *TII, UseCopyInstr);
494 Register Def = CopyOperands->Destination->getReg();
495
496 if (const TargetRegisterClass *URC =
497 UseI.getRegClassConstraint(UseIdx, TII, TRI))
498 return URC->contains(Def);
499
500 // We don't process further if UseI is a COPY, since forward copy propagation
501 // should handle that.
502 return false;
503}
504
505/// Decide whether we should forward the source of \param Copy to its use in
506/// \param UseI based on the physical register class constraints of the opcode
507/// and avoiding introducing more cross-class COPYs.
508bool MachineCopyPropagation::isForwardableRegClassCopy(const MachineInstr &Copy,
509 const MachineInstr &UseI,
510 unsigned UseIdx) {
511 std::optional<DestSourcePair> CopyOperands =
512 isCopyInstr(Copy, *TII, UseCopyInstr);
513 Register CopySrcReg = CopyOperands->Source->getReg();
514
515 // If the new register meets the opcode register constraints, then allow
516 // forwarding.
517 if (const TargetRegisterClass *URC =
518 UseI.getRegClassConstraint(UseIdx, TII, TRI))
519 return URC->contains(CopySrcReg);
520
521 auto UseICopyOperands = isCopyInstr(UseI, *TII, UseCopyInstr);
522 if (!UseICopyOperands)
523 return false;
524
525 /// COPYs don't have register class constraints, so if the user instruction
526 /// is a COPY, we just try to avoid introducing additional cross-class
527 /// COPYs. For example:
528 ///
529 /// RegClassA = COPY RegClassB // Copy parameter
530 /// ...
531 /// RegClassB = COPY RegClassA // UseI parameter
532 ///
533 /// which after forwarding becomes
534 ///
535 /// RegClassA = COPY RegClassB
536 /// ...
537 /// RegClassB = COPY RegClassB
538 ///
539 /// so we have reduced the number of cross-class COPYs and potentially
540 /// introduced a nop COPY that can be removed.
541
542 // Allow forwarding if src and dst belong to any common class, so long as they
543 // don't belong to any (possibly smaller) common class that requires copies to
544 // go via a different class.
545 Register UseDstReg = UseICopyOperands->Destination->getReg();
546 bool Found = false;
547 bool IsCrossClass = false;
548 for (const TargetRegisterClass *RC : TRI->regclasses()) {
549 if (RC->contains(CopySrcReg) && RC->contains(UseDstReg)) {
550 Found = true;
551 if (TRI->getCrossCopyRegClass(RC) != RC) {
552 IsCrossClass = true;
553 break;
554 }
555 }
556 }
557 if (!Found)
558 return false;
559 if (!IsCrossClass)
560 return true;
561 // The forwarded copy would be cross-class. Only do this if the original copy
562 // was also cross-class.
563 Register CopyDstReg = CopyOperands->Destination->getReg();
564 for (const TargetRegisterClass *RC : TRI->regclasses()) {
565 if (RC->contains(CopySrcReg) && RC->contains(CopyDstReg) &&
566 TRI->getCrossCopyRegClass(RC) != RC)
567 return true;
568 }
569 return false;
570}
571
572/// Check that \p MI does not have implicit uses that overlap with it's \p Use
573/// operand (the register being replaced), since these can sometimes be
574/// implicitly tied to other operands. For example, on AMDGPU:
575///
576/// V_MOVRELS_B32_e32 %VGPR2, %M0<imp-use>, %EXEC<imp-use>, %VGPR2_VGPR3_VGPR4_VGPR5<imp-use>
577///
578/// the %VGPR2 is implicitly tied to the larger reg operand, but we have no
579/// way of knowing we need to update the latter when updating the former.
580bool MachineCopyPropagation::hasImplicitOverlap(const MachineInstr &MI,
581 const MachineOperand &Use) {
582 for (const MachineOperand &MIUse : MI.uses())
583 if (&MIUse != &Use && MIUse.isReg() && MIUse.isImplicit() &&
584 MIUse.isUse() && TRI->regsOverlap(Use.getReg(), MIUse.getReg()))
585 return true;
586
587 return false;
588}
589
590/// For an MI that has multiple definitions, check whether \p MI has
591/// a definition that overlaps with another of its definitions.
592/// For example, on ARM: umull r9, r9, lr, r0
593/// The umull instruction is unpredictable unless RdHi and RdLo are different.
594bool MachineCopyPropagation::hasOverlappingMultipleDef(
595 const MachineInstr &MI, const MachineOperand &MODef, Register Def) {
596 for (const MachineOperand &MIDef : MI.defs()) {
597 if ((&MIDef != &MODef) && MIDef.isReg() &&
598 TRI->regsOverlap(Def, MIDef.getReg()))
599 return true;
600 }
601
602 return false;
603}
604
605/// Look for available copies whose destination register is used by \p MI and
606/// replace the use in \p MI with the copy's source register.
607void MachineCopyPropagation::forwardUses(MachineInstr &MI) {
608 if (!Tracker.hasAnyCopies())
609 return;
610
611 // Look for non-tied explicit vreg uses that have an active COPY
612 // instruction that defines the physical register allocated to them.
613 // Replace the vreg with the source of the active COPY.
614 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx < OpEnd;
615 ++OpIdx) {
616 MachineOperand &MOUse = MI.getOperand(OpIdx);
617 // Don't forward into undef use operands since doing so can cause problems
618 // with the machine verifier, since it doesn't treat undef reads as reads,
619 // so we can end up with a live range that ends on an undef read, leading to
620 // an error that the live range doesn't end on a read of the live range
621 // register.
622 if (!MOUse.isReg() || MOUse.isTied() || MOUse.isUndef() || MOUse.isDef() ||
623 MOUse.isImplicit())
624 continue;
625
626 if (!MOUse.getReg())
627 continue;
628
629 // Check that the register is marked 'renamable' so we know it is safe to
630 // rename it without violating any constraints that aren't expressed in the
631 // IR (e.g. ABI or opcode requirements).
632 if (!MOUse.isRenamable())
633 continue;
634
635 MachineInstr *Copy = Tracker.findAvailCopy(MI, MOUse.getReg().asMCReg(),
636 *TRI, *TII, UseCopyInstr);
637 if (!Copy)
638 continue;
639
640 std::optional<DestSourcePair> CopyOperands =
641 isCopyInstr(*Copy, *TII, UseCopyInstr);
642 Register CopyDstReg = CopyOperands->Destination->getReg();
643 const MachineOperand &CopySrc = *CopyOperands->Source;
644 Register CopySrcReg = CopySrc.getReg();
645
646 Register ForwardedReg = CopySrcReg;
647 // MI might use a sub-register of the Copy destination, in which case the
648 // forwarded register is the matching sub-register of the Copy source.
649 if (MOUse.getReg() != CopyDstReg) {
650 unsigned SubRegIdx = TRI->getSubRegIndex(CopyDstReg, MOUse.getReg());
651 assert(SubRegIdx &&
652 "MI source is not a sub-register of Copy destination");
653 ForwardedReg = TRI->getSubReg(CopySrcReg, SubRegIdx);
654 if (!ForwardedReg) {
655 LLVM_DEBUG(dbgs() << "MCP: Copy source does not have sub-register "
656 << TRI->getSubRegIndexName(SubRegIdx) << '\n');
657 continue;
658 }
659 }
660
661 // Don't forward COPYs of reserved regs unless they are constant.
662 if (MRI->isReserved(CopySrcReg) && !MRI->isConstantPhysReg(CopySrcReg))
663 continue;
664
665 if (!isForwardableRegClassCopy(*Copy, MI, OpIdx))
666 continue;
667
668 if (hasImplicitOverlap(MI, MOUse))
669 continue;
670
671 // Check that the instruction is not a copy that partially overwrites the
672 // original copy source that we are about to use. The tracker mechanism
673 // cannot cope with that.
674 if (isCopyInstr(MI, *TII, UseCopyInstr) &&
675 MI.modifiesRegister(CopySrcReg, TRI) &&
676 !MI.definesRegister(CopySrcReg)) {
677 LLVM_DEBUG(dbgs() << "MCP: Copy source overlap with dest in " << MI);
678 continue;
679 }
680
681 if (!DebugCounter::shouldExecute(FwdCounter)) {
682 LLVM_DEBUG(dbgs() << "MCP: Skipping forwarding due to debug counter:\n "
683 << MI);
684 continue;
685 }
686
687 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MOUse.getReg(), TRI)
688 << "\n with " << printReg(ForwardedReg, TRI)
689 << "\n in " << MI << " from " << *Copy);
690
691 MOUse.setReg(ForwardedReg);
692
693 if (!CopySrc.isRenamable())
694 MOUse.setIsRenamable(false);
695 MOUse.setIsUndef(CopySrc.isUndef());
696
697 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
698
699 // Clear kill markers that may have been invalidated.
700 for (MachineInstr &KMI :
701 make_range(Copy->getIterator(), std::next(MI.getIterator())))
702 KMI.clearRegisterKills(CopySrcReg, TRI);
703
704 ++NumCopyForwards;
705 Changed = true;
706 }
707}
708
709void MachineCopyPropagation::ForwardCopyPropagateBlock(MachineBasicBlock &MBB) {
710 LLVM_DEBUG(dbgs() << "MCP: ForwardCopyPropagateBlock " << MBB.getName()
711 << "\n");
712
714 // Analyze copies (which don't overlap themselves).
715 std::optional<DestSourcePair> CopyOperands =
716 isCopyInstr(MI, *TII, UseCopyInstr);
717 if (CopyOperands) {
718
719 Register RegSrc = CopyOperands->Source->getReg();
720 Register RegDef = CopyOperands->Destination->getReg();
721
722 if (!TRI->regsOverlap(RegDef, RegSrc)) {
723 assert(RegDef.isPhysical() && RegSrc.isPhysical() &&
724 "MachineCopyPropagation should be run after register allocation!");
725
726 MCRegister Def = RegDef.asMCReg();
727 MCRegister Src = RegSrc.asMCReg();
728
729 // The two copies cancel out and the source of the first copy
730 // hasn't been overridden, eliminate the second one. e.g.
731 // %ecx = COPY %eax
732 // ... nothing clobbered eax.
733 // %eax = COPY %ecx
734 // =>
735 // %ecx = COPY %eax
736 //
737 // or
738 //
739 // %ecx = COPY %eax
740 // ... nothing clobbered eax.
741 // %ecx = COPY %eax
742 // =>
743 // %ecx = COPY %eax
744 if (eraseIfRedundant(MI, Def, Src) || eraseIfRedundant(MI, Src, Def))
745 continue;
746
747 forwardUses(MI);
748
749 // Src may have been changed by forwardUses()
750 CopyOperands = isCopyInstr(MI, *TII, UseCopyInstr);
751 Src = CopyOperands->Source->getReg().asMCReg();
752
753 // If Src is defined by a previous copy, the previous copy cannot be
754 // eliminated.
755 ReadRegister(Src, MI, RegularUse);
756 for (const MachineOperand &MO : MI.implicit_operands()) {
757 if (!MO.isReg() || !MO.readsReg())
758 continue;
759 MCRegister Reg = MO.getReg().asMCReg();
760 if (!Reg)
761 continue;
762 ReadRegister(Reg, MI, RegularUse);
763 }
764
765 LLVM_DEBUG(dbgs() << "MCP: Copy is a deletion candidate: "; MI.dump());
766
767 // Copy is now a candidate for deletion.
768 if (!MRI->isReserved(Def))
769 MaybeDeadCopies.insert(&MI);
770
771 // If 'Def' is previously source of another copy, then this earlier copy's
772 // source is no longer available. e.g.
773 // %xmm9 = copy %xmm2
774 // ...
775 // %xmm2 = copy %xmm0
776 // ...
777 // %xmm2 = copy %xmm9
778 Tracker.clobberRegister(Def, *TRI, *TII, UseCopyInstr);
779 for (const MachineOperand &MO : MI.implicit_operands()) {
780 if (!MO.isReg() || !MO.isDef())
781 continue;
782 MCRegister Reg = MO.getReg().asMCReg();
783 if (!Reg)
784 continue;
785 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
786 }
787
788 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
789
790 continue;
791 }
792 }
793
794 // Clobber any earlyclobber regs first.
795 for (const MachineOperand &MO : MI.operands())
796 if (MO.isReg() && MO.isEarlyClobber()) {
797 MCRegister Reg = MO.getReg().asMCReg();
798 // If we have a tied earlyclobber, that means it is also read by this
799 // instruction, so we need to make sure we don't remove it as dead
800 // later.
801 if (MO.isTied())
802 ReadRegister(Reg, MI, RegularUse);
803 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
804 }
805
806 forwardUses(MI);
807
808 // Not a copy.
810 const MachineOperand *RegMask = nullptr;
811 for (const MachineOperand &MO : MI.operands()) {
812 if (MO.isRegMask())
813 RegMask = &MO;
814 if (!MO.isReg())
815 continue;
816 Register Reg = MO.getReg();
817 if (!Reg)
818 continue;
819
820 assert(!Reg.isVirtual() &&
821 "MachineCopyPropagation should be run after register allocation!");
822
823 if (MO.isDef() && !MO.isEarlyClobber()) {
824 Defs.push_back(Reg.asMCReg());
825 continue;
826 } else if (MO.readsReg())
827 ReadRegister(Reg.asMCReg(), MI, MO.isDebug() ? DebugUse : RegularUse);
828 }
829
830 // The instruction has a register mask operand which means that it clobbers
831 // a large set of registers. Treat clobbered registers the same way as
832 // defined registers.
833 if (RegMask) {
834 // Erase any MaybeDeadCopies whose destination register is clobbered.
836 MaybeDeadCopies.begin();
837 DI != MaybeDeadCopies.end();) {
838 MachineInstr *MaybeDead = *DI;
839 std::optional<DestSourcePair> CopyOperands =
840 isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
841 MCRegister Reg = CopyOperands->Destination->getReg().asMCReg();
842 assert(!MRI->isReserved(Reg));
843
844 if (!RegMask->clobbersPhysReg(Reg)) {
845 ++DI;
846 continue;
847 }
848
849 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to regmask clobbering: ";
850 MaybeDead->dump());
851
852 // Make sure we invalidate any entries in the copy maps before erasing
853 // the instruction.
854 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
855
856 // erase() will return the next valid iterator pointing to the next
857 // element after the erased one.
858 DI = MaybeDeadCopies.erase(DI);
859 MaybeDead->eraseFromParent();
860 Changed = true;
861 ++NumDeletes;
862 }
863 }
864
865 // Any previous copy definition or reading the Defs is no longer available.
866 for (MCRegister Reg : Defs)
867 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
868 }
869
870 // If MBB doesn't have successors, delete the copies whose defs are not used.
871 // If MBB does have successors, then conservative assume the defs are live-out
872 // since we don't want to trust live-in lists.
873 if (MBB.succ_empty()) {
874 for (MachineInstr *MaybeDead : MaybeDeadCopies) {
875 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to no live-out succ: ";
876 MaybeDead->dump());
877
878 std::optional<DestSourcePair> CopyOperands =
879 isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
880 assert(CopyOperands);
881
882 Register SrcReg = CopyOperands->Source->getReg();
883 Register DestReg = CopyOperands->Destination->getReg();
884 assert(!MRI->isReserved(DestReg));
885
886 // Update matching debug values, if any.
887 SmallVector<MachineInstr *> MaybeDeadDbgUsers(
888 CopyDbgUsers[MaybeDead].begin(), CopyDbgUsers[MaybeDead].end());
889 MRI->updateDbgUsersToReg(DestReg.asMCReg(), SrcReg.asMCReg(),
890 MaybeDeadDbgUsers);
891
892 MaybeDead->eraseFromParent();
893 Changed = true;
894 ++NumDeletes;
895 }
896 }
897
898 MaybeDeadCopies.clear();
899 CopyDbgUsers.clear();
900 Tracker.clear();
901}
902
903static bool isBackwardPropagatableCopy(const DestSourcePair &CopyOperands,
905 const TargetInstrInfo &TII) {
906 Register Def = CopyOperands.Destination->getReg();
907 Register Src = CopyOperands.Source->getReg();
908
909 if (!Def || !Src)
910 return false;
911
912 if (MRI.isReserved(Def) || MRI.isReserved(Src))
913 return false;
914
915 return CopyOperands.Source->isRenamable() && CopyOperands.Source->isKill();
916}
917
918void MachineCopyPropagation::propagateDefs(MachineInstr &MI) {
919 if (!Tracker.hasAnyCopies())
920 return;
921
922 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx != OpEnd;
923 ++OpIdx) {
924 MachineOperand &MODef = MI.getOperand(OpIdx);
925
926 if (!MODef.isReg() || MODef.isUse())
927 continue;
928
929 // Ignore non-trivial cases.
930 if (MODef.isTied() || MODef.isUndef() || MODef.isImplicit())
931 continue;
932
933 if (!MODef.getReg())
934 continue;
935
936 // We only handle if the register comes from a vreg.
937 if (!MODef.isRenamable())
938 continue;
939
940 MachineInstr *Copy = Tracker.findAvailBackwardCopy(
941 MI, MODef.getReg().asMCReg(), *TRI, *TII, UseCopyInstr);
942 if (!Copy)
943 continue;
944
945 std::optional<DestSourcePair> CopyOperands =
946 isCopyInstr(*Copy, *TII, UseCopyInstr);
947 Register Def = CopyOperands->Destination->getReg();
948 Register Src = CopyOperands->Source->getReg();
949
950 if (MODef.getReg() != Src)
951 continue;
952
953 if (!isBackwardPropagatableRegClassCopy(*Copy, MI, OpIdx))
954 continue;
955
956 if (hasImplicitOverlap(MI, MODef))
957 continue;
958
959 if (hasOverlappingMultipleDef(MI, MODef, Def))
960 continue;
961
962 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MODef.getReg(), TRI)
963 << "\n with " << printReg(Def, TRI) << "\n in "
964 << MI << " from " << *Copy);
965
966 MODef.setReg(Def);
967 MODef.setIsRenamable(CopyOperands->Destination->isRenamable());
968
969 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
970 MaybeDeadCopies.insert(Copy);
971 Changed = true;
972 ++NumCopyBackwardPropagated;
973 }
974}
975
976void MachineCopyPropagation::BackwardCopyPropagateBlock(
978 LLVM_DEBUG(dbgs() << "MCP: BackwardCopyPropagateBlock " << MBB.getName()
979 << "\n");
980
982 // Ignore non-trivial COPYs.
983 std::optional<DestSourcePair> CopyOperands =
984 isCopyInstr(MI, *TII, UseCopyInstr);
985 if (CopyOperands && MI.getNumOperands() == 2) {
986 Register DefReg = CopyOperands->Destination->getReg();
987 Register SrcReg = CopyOperands->Source->getReg();
988
989 if (!TRI->regsOverlap(DefReg, SrcReg)) {
990 // Unlike forward cp, we don't invoke propagateDefs here,
991 // just let forward cp do COPY-to-COPY propagation.
992 if (isBackwardPropagatableCopy(*CopyOperands, *MRI, *TII)) {
993 Tracker.invalidateRegister(SrcReg.asMCReg(), *TRI, *TII,
994 UseCopyInstr);
995 Tracker.invalidateRegister(DefReg.asMCReg(), *TRI, *TII,
996 UseCopyInstr);
997 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
998 continue;
999 }
1000 }
1001 }
1002
1003 // Invalidate any earlyclobber regs first.
1004 for (const MachineOperand &MO : MI.operands())
1005 if (MO.isReg() && MO.isEarlyClobber()) {
1006 MCRegister Reg = MO.getReg().asMCReg();
1007 if (!Reg)
1008 continue;
1009 Tracker.invalidateRegister(Reg, *TRI, *TII, UseCopyInstr);
1010 }
1011
1012 propagateDefs(MI);
1013 for (const MachineOperand &MO : MI.operands()) {
1014 if (!MO.isReg())
1015 continue;
1016
1017 if (!MO.getReg())
1018 continue;
1019
1020 if (MO.isDef())
1021 Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1022 UseCopyInstr);
1023
1024 if (MO.readsReg()) {
1025 if (MO.isDebug()) {
1026 // Check if the register in the debug instruction is utilized
1027 // in a copy instruction, so we can update the debug info if the
1028 // register is changed.
1029 for (MCRegUnitIterator RUI(MO.getReg().asMCReg(), TRI); RUI.isValid();
1030 ++RUI) {
1031 if (auto *Copy = Tracker.findCopyDefViaUnit(*RUI, *TRI)) {
1032 CopyDbgUsers[Copy].insert(&MI);
1033 }
1034 }
1035 } else {
1036 Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1037 UseCopyInstr);
1038 }
1039 }
1040 }
1041 }
1042
1043 for (auto *Copy : MaybeDeadCopies) {
1044 std::optional<DestSourcePair> CopyOperands =
1045 isCopyInstr(*Copy, *TII, UseCopyInstr);
1046 Register Src = CopyOperands->Source->getReg();
1047 Register Def = CopyOperands->Destination->getReg();
1048 SmallVector<MachineInstr *> MaybeDeadDbgUsers(CopyDbgUsers[Copy].begin(),
1049 CopyDbgUsers[Copy].end());
1050
1051 MRI->updateDbgUsersToReg(Src.asMCReg(), Def.asMCReg(), MaybeDeadDbgUsers);
1052 Copy->eraseFromParent();
1053 ++NumDeletes;
1054 }
1055
1056 MaybeDeadCopies.clear();
1057 CopyDbgUsers.clear();
1058 Tracker.clear();
1059}
1060
1064 MachineInstr *Leader) {
1065 auto &SC = SpillChain[Leader];
1066 auto &RC = ReloadChain[Leader];
1067 for (auto I = SC.rbegin(), E = SC.rend(); I != E; ++I)
1068 (*I)->dump();
1069 for (MachineInstr *MI : RC)
1070 MI->dump();
1071}
1072
1073// Remove spill-reload like copy chains. For example
1074// r0 = COPY r1
1075// r1 = COPY r2
1076// r2 = COPY r3
1077// r3 = COPY r4
1078// <def-use r4>
1079// r4 = COPY r3
1080// r3 = COPY r2
1081// r2 = COPY r1
1082// r1 = COPY r0
1083// will be folded into
1084// r0 = COPY r1
1085// r1 = COPY r4
1086// <def-use r4>
1087// r4 = COPY r1
1088// r1 = COPY r0
1089// TODO: Currently we don't track usage of r0 outside the chain, so we
1090// conservatively keep its value as it was before the rewrite.
1091//
1092// The algorithm is trying to keep
1093// property#1: No Def of spill COPY in the chain is used or defined until the
1094// paired reload COPY in the chain uses the Def.
1095//
1096// property#2: NO Source of COPY in the chain is used or defined until the next
1097// COPY in the chain defines the Source, except the innermost spill-reload
1098// pair.
1099//
1100// The algorithm is conducted by checking every COPY inside the MBB, assuming
1101// the COPY is a reload COPY, then try to find paired spill COPY by searching
1102// the COPY defines the Src of the reload COPY backward. If such pair is found,
1103// it either belongs to an existing chain or a new chain depends on
1104// last available COPY uses the Def of the reload COPY.
1105// Implementation notes, we use CopyTracker::findLastDefCopy(Reg, ...) to find
1106// out last COPY that defines Reg; we use CopyTracker::findLastUseCopy(Reg, ...)
1107// to find out last COPY that uses Reg. When we are encountered with a Non-COPY
1108// instruction, we check registers in the operands of this instruction. If this
1109// Reg is defined by a COPY, we untrack this Reg via
1110// CopyTracker::clobberRegister(Reg, ...).
1111void MachineCopyPropagation::EliminateSpillageCopies(MachineBasicBlock &MBB) {
1112 // ChainLeader maps MI inside a spill-reload chain to its innermost reload COPY.
1113 // Thus we can track if a MI belongs to an existing spill-reload chain.
1115 // SpillChain maps innermost reload COPY of a spill-reload chain to a sequence
1116 // of COPYs that forms spills of a spill-reload chain.
1117 // ReloadChain maps innermost reload COPY of a spill-reload chain to a
1118 // sequence of COPYs that forms reloads of a spill-reload chain.
1120 // If a COPY's Source has use or def until next COPY defines the Source,
1121 // we put the COPY in this set to keep property#2.
1122 DenseSet<const MachineInstr *> CopySourceInvalid;
1123
1124 auto TryFoldSpillageCopies =
1125 [&, this](const SmallVectorImpl<MachineInstr *> &SC,
1127 assert(SC.size() == RC.size() && "Spill-reload should be paired");
1128
1129 // We need at least 3 pairs of copies for the transformation to apply,
1130 // because the first outermost pair cannot be removed since we don't
1131 // recolor outside of the chain and that we need at least one temporary
1132 // spill slot to shorten the chain. If we only have a chain of two
1133 // pairs, we already have the shortest sequence this code can handle:
1134 // the outermost pair for the temporary spill slot, and the pair that
1135 // use that temporary spill slot for the other end of the chain.
1136 // TODO: We might be able to simplify to one spill-reload pair if collecting
1137 // more infomation about the outermost COPY.
1138 if (SC.size() <= 2)
1139 return;
1140
1141 // If violate property#2, we don't fold the chain.
1142 for (const MachineInstr *Spill : make_range(SC.begin() + 1, SC.end()))
1143 if (CopySourceInvalid.count(Spill))
1144 return;
1145
1146 for (const MachineInstr *Reload : make_range(RC.begin(), RC.end() - 1))
1147 if (CopySourceInvalid.count(Reload))
1148 return;
1149
1150 auto CheckCopyConstraint = [this](Register Def, Register Src) {
1151 for (const TargetRegisterClass *RC : TRI->regclasses()) {
1152 if (RC->contains(Def) && RC->contains(Src))
1153 return true;
1154 }
1155 return false;
1156 };
1157
1158 auto UpdateReg = [](MachineInstr *MI, const MachineOperand *Old,
1159 const MachineOperand *New) {
1160 for (MachineOperand &MO : MI->operands()) {
1161 if (&MO == Old)
1162 MO.setReg(New->getReg());
1163 }
1164 };
1165
1166 std::optional<DestSourcePair> InnerMostSpillCopy =
1167 isCopyInstr(*SC[0], *TII, UseCopyInstr);
1168 std::optional<DestSourcePair> OuterMostSpillCopy =
1169 isCopyInstr(*SC.back(), *TII, UseCopyInstr);
1170 std::optional<DestSourcePair> InnerMostReloadCopy =
1171 isCopyInstr(*RC[0], *TII, UseCopyInstr);
1172 std::optional<DestSourcePair> OuterMostReloadCopy =
1173 isCopyInstr(*RC.back(), *TII, UseCopyInstr);
1174 if (!CheckCopyConstraint(OuterMostSpillCopy->Source->getReg(),
1175 InnerMostSpillCopy->Source->getReg()) ||
1176 !CheckCopyConstraint(InnerMostReloadCopy->Destination->getReg(),
1177 OuterMostReloadCopy->Destination->getReg()))
1178 return;
1179
1180 SpillageChainsLength += SC.size() + RC.size();
1181 NumSpillageChains += 1;
1182 UpdateReg(SC[0], InnerMostSpillCopy->Destination,
1183 OuterMostSpillCopy->Source);
1184 UpdateReg(RC[0], InnerMostReloadCopy->Source,
1185 OuterMostReloadCopy->Destination);
1186
1187 for (size_t I = 1; I < SC.size() - 1; ++I) {
1188 SC[I]->eraseFromParent();
1189 RC[I]->eraseFromParent();
1190 NumDeletes += 2;
1191 }
1192 };
1193
1194 auto IsFoldableCopy = [this](const MachineInstr &MaybeCopy) {
1195 if (MaybeCopy.getNumImplicitOperands() > 0)
1196 return false;
1197 std::optional<DestSourcePair> CopyOperands =
1198 isCopyInstr(MaybeCopy, *TII, UseCopyInstr);
1199 if (!CopyOperands)
1200 return false;
1201 Register Src = CopyOperands->Source->getReg();
1202 Register Def = CopyOperands->Destination->getReg();
1203 return Src && Def && !TRI->regsOverlap(Src, Def) &&
1204 CopyOperands->Source->isRenamable() &&
1205 CopyOperands->Destination->isRenamable();
1206 };
1207
1208 auto IsSpillReloadPair = [&, this](const MachineInstr &Spill,
1209 const MachineInstr &Reload) {
1210 if (!IsFoldableCopy(Spill) || !IsFoldableCopy(Reload))
1211 return false;
1212 std::optional<DestSourcePair> SpillCopy =
1213 isCopyInstr(Spill, *TII, UseCopyInstr);
1214 std::optional<DestSourcePair> ReloadCopy =
1215 isCopyInstr(Reload, *TII, UseCopyInstr);
1216 if (!SpillCopy || !ReloadCopy)
1217 return false;
1218 return SpillCopy->Source->getReg() == ReloadCopy->Destination->getReg() &&
1219 SpillCopy->Destination->getReg() == ReloadCopy->Source->getReg();
1220 };
1221
1222 auto IsChainedCopy = [&, this](const MachineInstr &Prev,
1223 const MachineInstr &Current) {
1224 if (!IsFoldableCopy(Prev) || !IsFoldableCopy(Current))
1225 return false;
1226 std::optional<DestSourcePair> PrevCopy =
1227 isCopyInstr(Prev, *TII, UseCopyInstr);
1228 std::optional<DestSourcePair> CurrentCopy =
1229 isCopyInstr(Current, *TII, UseCopyInstr);
1230 if (!PrevCopy || !CurrentCopy)
1231 return false;
1232 return PrevCopy->Source->getReg() == CurrentCopy->Destination->getReg();
1233 };
1234
1236 std::optional<DestSourcePair> CopyOperands =
1237 isCopyInstr(MI, *TII, UseCopyInstr);
1238
1239 // Update track information via non-copy instruction.
1240 SmallSet<Register, 8> RegsToClobber;
1241 if (!CopyOperands) {
1242 for (const MachineOperand &MO : MI.operands()) {
1243 if (!MO.isReg())
1244 continue;
1245 Register Reg = MO.getReg();
1246 if (!Reg)
1247 continue;
1248 MachineInstr *LastUseCopy =
1249 Tracker.findLastSeenUseInCopy(Reg.asMCReg(), *TRI);
1250 if (LastUseCopy) {
1251 LLVM_DEBUG(dbgs() << "MCP: Copy source of\n");
1252 LLVM_DEBUG(LastUseCopy->dump());
1253 LLVM_DEBUG(dbgs() << "might be invalidated by\n");
1254 LLVM_DEBUG(MI.dump());
1255 CopySourceInvalid.insert(LastUseCopy);
1256 }
1257 // Must be noted Tracker.clobberRegister(Reg, ...) removes tracking of
1258 // Reg, i.e, COPY that defines Reg is removed from the mapping as well
1259 // as marking COPYs that uses Reg unavailable.
1260 // We don't invoke CopyTracker::clobberRegister(Reg, ...) if Reg is not
1261 // defined by a previous COPY, since we don't want to make COPYs uses
1262 // Reg unavailable.
1263 if (Tracker.findLastSeenDefInCopy(MI, Reg.asMCReg(), *TRI, *TII,
1264 UseCopyInstr))
1265 // Thus we can keep the property#1.
1266 RegsToClobber.insert(Reg);
1267 }
1268 for (Register Reg : RegsToClobber) {
1269 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
1270 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Reg, TRI)
1271 << "\n");
1272 }
1273 continue;
1274 }
1275
1276 Register Src = CopyOperands->Source->getReg();
1277 Register Def = CopyOperands->Destination->getReg();
1278 // Check if we can find a pair spill-reload copy.
1279 LLVM_DEBUG(dbgs() << "MCP: Searching paired spill for reload: ");
1280 LLVM_DEBUG(MI.dump());
1281 MachineInstr *MaybeSpill =
1282 Tracker.findLastSeenDefInCopy(MI, Src.asMCReg(), *TRI, *TII, UseCopyInstr);
1283 bool MaybeSpillIsChained = ChainLeader.count(MaybeSpill);
1284 if (!MaybeSpillIsChained && MaybeSpill &&
1285 IsSpillReloadPair(*MaybeSpill, MI)) {
1286 // Check if we already have an existing chain. Now we have a
1287 // spill-reload pair.
1288 // L2: r2 = COPY r3
1289 // L5: r3 = COPY r2
1290 // Looking for a valid COPY before L5 which uses r3.
1291 // This can be serverial cases.
1292 // Case #1:
1293 // No COPY is found, which can be r3 is def-use between (L2, L5), we
1294 // create a new chain for L2 and L5.
1295 // Case #2:
1296 // L2: r2 = COPY r3
1297 // L5: r3 = COPY r2
1298 // Such COPY is found and is L2, we create a new chain for L2 and L5.
1299 // Case #3:
1300 // L2: r2 = COPY r3
1301 // L3: r1 = COPY r3
1302 // L5: r3 = COPY r2
1303 // we create a new chain for L2 and L5.
1304 // Case #4:
1305 // L2: r2 = COPY r3
1306 // L3: r1 = COPY r3
1307 // L4: r3 = COPY r1
1308 // L5: r3 = COPY r2
1309 // Such COPY won't be found since L4 defines r3. we create a new chain
1310 // for L2 and L5.
1311 // Case #5:
1312 // L2: r2 = COPY r3
1313 // L3: r3 = COPY r1
1314 // L4: r1 = COPY r3
1315 // L5: r3 = COPY r2
1316 // COPY is found and is L4 which belongs to an existing chain, we add
1317 // L2 and L5 to this chain.
1318 LLVM_DEBUG(dbgs() << "MCP: Found spill: ");
1319 LLVM_DEBUG(MaybeSpill->dump());
1320 MachineInstr *MaybePrevReload =
1321 Tracker.findLastSeenUseInCopy(Def.asMCReg(), *TRI);
1322 auto Leader = ChainLeader.find(MaybePrevReload);
1323 MachineInstr *L = nullptr;
1324 if (Leader == ChainLeader.end() ||
1325 (MaybePrevReload && !IsChainedCopy(*MaybePrevReload, MI))) {
1326 L = &MI;
1327 assert(!SpillChain.count(L) &&
1328 "SpillChain should not have contained newly found chain");
1329 } else {
1330 assert(MaybePrevReload &&
1331 "Found a valid leader through nullptr should not happend");
1332 L = Leader->second;
1333 assert(SpillChain[L].size() > 0 &&
1334 "Existing chain's length should be larger than zero");
1335 }
1336 assert(!ChainLeader.count(&MI) && !ChainLeader.count(MaybeSpill) &&
1337 "Newly found paired spill-reload should not belong to any chain "
1338 "at this point");
1339 ChainLeader.insert({MaybeSpill, L});
1340 ChainLeader.insert({&MI, L});
1341 SpillChain[L].push_back(MaybeSpill);
1342 ReloadChain[L].push_back(&MI);
1343 LLVM_DEBUG(dbgs() << "MCP: Chain " << L << " now is:\n");
1344 LLVM_DEBUG(printSpillReloadChain(SpillChain, ReloadChain, L));
1345 } else if (MaybeSpill && !MaybeSpillIsChained) {
1346 // MaybeSpill is unable to pair with MI. That's to say adding MI makes
1347 // the chain invalid.
1348 // The COPY defines Src is no longer considered as a candidate of a
1349 // valid chain. Since we expect the Def of a spill copy isn't used by
1350 // any COPY instruction until a reload copy. For example:
1351 // L1: r1 = COPY r2
1352 // L2: r3 = COPY r1
1353 // If we later have
1354 // L1: r1 = COPY r2
1355 // L2: r3 = COPY r1
1356 // L3: r2 = COPY r1
1357 // L1 and L3 can't be a valid spill-reload pair.
1358 // Thus we keep the property#1.
1359 LLVM_DEBUG(dbgs() << "MCP: Not paired spill-reload:\n");
1360 LLVM_DEBUG(MaybeSpill->dump());
1361 LLVM_DEBUG(MI.dump());
1362 Tracker.clobberRegister(Src.asMCReg(), *TRI, *TII, UseCopyInstr);
1363 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Src, TRI)
1364 << "\n");
1365 }
1366 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1367 }
1368
1369 for (auto I = SpillChain.begin(), E = SpillChain.end(); I != E; ++I) {
1370 auto &SC = I->second;
1371 assert(ReloadChain.count(I->first) &&
1372 "Reload chain of the same leader should exist");
1373 auto &RC = ReloadChain[I->first];
1374 TryFoldSpillageCopies(SC, RC);
1375 }
1376
1377 MaybeDeadCopies.clear();
1378 CopyDbgUsers.clear();
1379 Tracker.clear();
1380}
1381
1382bool MachineCopyPropagation::runOnMachineFunction(MachineFunction &MF) {
1383 if (skipFunction(MF.getFunction()))
1384 return false;
1385
1386 bool isSpillageCopyElimEnabled = false;
1388 case cl::BOU_UNSET:
1389 isSpillageCopyElimEnabled =
1391 break;
1392 case cl::BOU_TRUE:
1393 isSpillageCopyElimEnabled = true;
1394 break;
1395 case cl::BOU_FALSE:
1396 isSpillageCopyElimEnabled = false;
1397 break;
1398 }
1399
1400 Changed = false;
1401
1403 TII = MF.getSubtarget().getInstrInfo();
1404 MRI = &MF.getRegInfo();
1405
1406 for (MachineBasicBlock &MBB : MF) {
1407 if (isSpillageCopyElimEnabled)
1408 EliminateSpillageCopies(MBB);
1409 BackwardCopyPropagateBlock(MBB);
1410 ForwardCopyPropagateBlock(MBB);
1411 }
1412
1413 return Changed;
1414}
1415
1417llvm::createMachineCopyPropagationPass(bool UseCopyInstr = false) {
1418 return new MachineCopyPropagation(UseCopyInstr);
1419}
unsigned const MachineRegisterInfo * MRI
MachineBasicBlock & MBB
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
#define LLVM_ATTRIBUTE_UNUSED
Definition: Compiler.h:172
static void clear(coro::Shape &Shape)
Definition: Coroutines.cpp:149
This file provides an implementation of debug counters.
#define DEBUG_COUNTER(VARNAME, COUNTERNAME, DESC)
Definition: DebugCounter.h:182
#define LLVM_DEBUG(X)
Definition: Debug.h:101
This file defines the DenseMap class.
const HexagonInstrInfo * TII
IRTranslator LLVM IR MI
#define I(x, y, z)
Definition: MD5.cpp:58
static cl::opt< cl::boolOrDefault > EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden)
static void LLVM_ATTRIBUTE_UNUSED printSpillReloadChain(DenseMap< MachineInstr *, SmallVector< MachineInstr * > > &SpillChain, DenseMap< MachineInstr *, SmallVector< MachineInstr * > > &ReloadChain, MachineInstr *Leader)
static bool isBackwardPropagatableCopy(const DestSourcePair &CopyOperands, const MachineRegisterInfo &MRI, const TargetInstrInfo &TII)
static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src, MCRegister Def, const TargetRegisterInfo *TRI, const TargetInstrInfo *TII, bool UseCopyInstr)
Return true if PreviousCopy did copy register Src to register Def.
static cl::opt< bool > MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false), cl::Hidden)
#define DEBUG_TYPE
unsigned const TargetRegisterInfo * TRI
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:38
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
SI Lower i1 Copies
This file contains some templates that are useful if you are working with the STL at all.
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Definition: Statistic.h:167
Represent the analysis usage information of a pass.
void setPreservesCFG()
This function should be called by the pass, iff they do not:
Definition: Pass.cpp:265
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
Definition: ArrayRef.h:41
static bool shouldExecute(unsigned CounterName)
Definition: DebugCounter.h:72
iterator find(const_arg_type_t< KeyT > Val)
Definition: DenseMap.h:155
iterator begin()
Definition: DenseMap.h:75
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
Definition: DenseMap.h:151
iterator end()
Definition: DenseMap.h:84
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition: DenseMap.h:220
Implements a dense probed hash-table based set.
Definition: DenseSet.h:271
bool isValid() const
isValid - returns true if this iterator is not yet at the end.
Wrapper class representing physical registers. Should be passed by value.
Definition: MCRegister.h:24
StringRef getName() const
Return the name of the corresponding LLVM basic block, or an empty string.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
MachineFunctionProperties & set(Property P)
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
Representation of each machine instruction.
Definition: MachineInstr.h:68
void eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
const TargetRegisterClass * getRegClassConstraint(unsigned OpIdx, const TargetInstrInfo *TII, const TargetRegisterInfo *TRI) const
Compute the static register class constraint for operand OpIdx.
MachineOperand class - Representation of each machine instruction operand.
bool isImplicit() const
void setIsRenamable(bool Val=true)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
void setReg(Register Reg)
Change the register this operand corresponds to.
bool isRenamable() const
isRenamable - Returns true if this register may be renamed, i.e.
void setIsUndef(bool Val=true)
Register getReg() const
getReg - Returns the register number.
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
void dump() const
Definition: Pass.cpp:136
Wrapper class representing virtual and physical registers.
Definition: Register.h:19
bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
Definition: Register.h:97
MCRegister asMCReg() const
Utility to check-convert this value to a MCRegister.
Definition: Register.h:120
iterator begin()
Get an iterator to the beginning of the SetVector.
Definition: SetVector.h:93
A SetVector that performs no allocations if smaller than a certain size.
Definition: SetVector.h:312
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
Definition: SmallSet.h:135
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
Definition: SmallSet.h:179
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
Definition: SmallVector.h:577
void push_back(const T &Elt)
Definition: SmallVector.h:416
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1200
TargetInstrInfo - Interface to description of machine instruction set.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
virtual bool enableSpillageCopyElimination() const
Enable spillage copy elimination in MachineCopyPropagation pass.
virtual const TargetInstrInfo * getInstrInfo() const
A Use represents the edge between a Value definition and its users.
Definition: Use.h:43
std::pair< iterator, bool > insert(const ValueT &V)
Definition: DenseSet.h:206
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
Definition: DenseSet.h:97
reverse_self_iterator getReverseIterator()
Definition: ilist_node.h:85
self_iterator getIterator()
Definition: ilist_node.h:82
This provides a very simple, boring adaptor for a begin and end iterator into a range type.
DebugType
Definition: COFF.h:652
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Definition: CallingConv.h:24
@ SC
CHAIN = SC CHAIN, Imm128 - System call.
Reg
All possible values of the reg field in the ModR/M byte.
initializer< Ty > init(const Ty &Val)
Definition: CommandLine.h:445
const_iterator begin(StringRef path, Style style=Style::native)
Get begin iterator over path.
Definition: Path.cpp:226
const_iterator end(StringRef path)
Get end iterator over path.
Definition: Path.cpp:235
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
Definition: STLExtras.h:1777
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
Definition: STLExtras.h:748
auto reverse(ContainerTy &&C)
Definition: STLExtras.h:511
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:163
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
Definition: STLExtras.h:1976
MachineFunctionPass * createMachineCopyPropagationPass(bool UseCopyInstr)
void initializeMachineCopyPropagationPass(PassRegistry &)
Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.
char & MachineCopyPropagationID
MachineCopyPropagation - This pass performs copy propagation on machine instructions.
const MachineOperand * Source
const MachineOperand * Destination