LLVM 22.0.0git
MachineCopyPropagation.cpp
Go to the documentation of this file.
1//===- MachineCopyPropagation.cpp - Machine Copy Propagation Pass ---------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This is an extremely simple MachineInstr-level copy propagation pass.
10//
11// This pass forwards the source of COPYs to the users of their destinations
12// when doing so is legal. For example:
13//
14// %reg1 = COPY %reg0
15// ...
16// ... = OP %reg1
17//
18// If
19// - %reg0 has not been clobbered by the time of the use of %reg1
20// - the register class constraints are satisfied
21// - the COPY def is the only value that reaches OP
22// then this pass replaces the above with:
23//
24// %reg1 = COPY %reg0
25// ...
26// ... = OP %reg0
27//
28// This pass also removes some redundant COPYs. For example:
29//
30// %R1 = COPY %R0
31// ... // No clobber of %R1
32// %R0 = COPY %R1 <<< Removed
33//
34// or
35//
36// %R1 = COPY %R0
37// ... // No clobber of %R0
38// %R1 = COPY %R0 <<< Removed
39//
40// or
41//
42// $R0 = OP ...
43// ... // No read/clobber of $R0 and $R1
44// $R1 = COPY $R0 // $R0 is killed
45// Replace $R0 with $R1 and remove the COPY
46// $R1 = OP ...
47// ...
48//
49//===----------------------------------------------------------------------===//
50
52#include "llvm/ADT/DenseMap.h"
53#include "llvm/ADT/STLExtras.h"
54#include "llvm/ADT/SetVector.h"
55#include "llvm/ADT/SmallSet.h"
57#include "llvm/ADT/Statistic.h"
69#include "llvm/MC/MCRegister.h"
71#include "llvm/Pass.h"
72#include "llvm/Support/Debug.h"
75#include <cassert>
76#include <iterator>
77
78using namespace llvm;
79
80#define DEBUG_TYPE "machine-cp"
81
82STATISTIC(NumDeletes, "Number of dead copies deleted");
83STATISTIC(NumCopyForwards, "Number of copy uses forwarded");
84STATISTIC(NumCopyBackwardPropagated, "Number of copy defs backward propagated");
85STATISTIC(SpillageChainsLength, "Length of spillage chains");
86STATISTIC(NumSpillageChains, "Number of spillage chains");
87DEBUG_COUNTER(FwdCounter, "machine-cp-fwd",
88 "Controls which register COPYs are forwarded");
89
90static cl::opt<bool> MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false),
93 EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden);
94
95namespace {
96
97static std::optional<DestSourcePair> isCopyInstr(const MachineInstr &MI,
98 const TargetInstrInfo &TII,
99 bool UseCopyInstr) {
100 if (UseCopyInstr)
101 return TII.isCopyInstr(MI);
102
103 if (MI.isCopy())
104 return std::optional<DestSourcePair>(
105 DestSourcePair{MI.getOperand(0), MI.getOperand(1)});
106
107 return std::nullopt;
108}
109
110class CopyTracker {
111 struct CopyInfo {
112 MachineInstr *MI = nullptr;
113 MachineInstr *LastSeenUseInCopy = nullptr;
116 bool Avail = false;
117 };
118
120
121 // Memoised sets of register units which are preserved by each register mask,
122 // needed to efficiently remove copies which are invalidated by call
123 // instructions.
124 DenseMap<const uint32_t *, BitVector> RegMaskToPreservedRegUnits;
125
126public:
127 /// Get the set of register units which are preserved by RegMaskOp.
128 BitVector &getPreservedRegUnits(const MachineOperand &RegMaskOp,
129 const TargetRegisterInfo &TRI) {
130 const uint32_t *RegMask = RegMaskOp.getRegMask();
131 auto [It, Inserted] = RegMaskToPreservedRegUnits.try_emplace(RegMask);
132 if (!Inserted) {
133 return It->second;
134 } else {
135 BitVector &PreservedRegUnits = It->second;
136
137 PreservedRegUnits.resize(TRI.getNumRegUnits());
138 for (unsigned SafeReg = 0, E = TRI.getNumRegs(); SafeReg < E; ++SafeReg)
139 if (!RegMaskOp.clobbersPhysReg(SafeReg))
140 for (auto SafeUnit : TRI.regunits(SafeReg))
141 PreservedRegUnits.set(SafeUnit);
142
143 return PreservedRegUnits;
144 }
145 }
146
147 /// Mark all of the given registers and their subregisters as unavailable for
148 /// copying.
149 void markRegsUnavailable(ArrayRef<MCRegister> Regs,
150 const TargetRegisterInfo &TRI) {
151 for (MCRegister Reg : Regs) {
152 // Source of copy is no longer available for propagation.
153 for (MCRegUnit Unit : TRI.regunits(Reg)) {
154 auto CI = Copies.find(Unit);
155 if (CI != Copies.end())
156 CI->second.Avail = false;
157 }
158 }
159 }
160
161 /// Remove register from copy maps.
162 void invalidateRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
163 const TargetInstrInfo &TII, bool UseCopyInstr) {
164 // Since Reg might be a subreg of some registers, only invalidate Reg is not
165 // enough. We have to find the COPY defines Reg or registers defined by Reg
166 // and invalidate all of them. Similarly, we must invalidate all of the
167 // the subregisters used in the source of the COPY.
168 SmallSet<MCRegUnit, 8> RegUnitsToInvalidate;
169 auto InvalidateCopy = [&](MachineInstr *MI) {
170 std::optional<DestSourcePair> CopyOperands =
171 isCopyInstr(*MI, TII, UseCopyInstr);
172 assert(CopyOperands && "Expect copy");
173
174 auto Dest = TRI.regunits(CopyOperands->Destination->getReg().asMCReg());
175 auto Src = TRI.regunits(CopyOperands->Source->getReg().asMCReg());
176 RegUnitsToInvalidate.insert_range(Dest);
177 RegUnitsToInvalidate.insert_range(Src);
178 };
179
180 for (MCRegUnit Unit : TRI.regunits(Reg)) {
181 auto I = Copies.find(Unit);
182 if (I != Copies.end()) {
183 if (MachineInstr *MI = I->second.MI)
184 InvalidateCopy(MI);
185 if (MachineInstr *MI = I->second.LastSeenUseInCopy)
186 InvalidateCopy(MI);
187 }
188 }
189 for (MCRegUnit Unit : RegUnitsToInvalidate)
190 Copies.erase(Unit);
191 }
192
193 /// Clobber a single register unit, removing it from the tracker's copy maps.
194 void clobberRegUnit(MCRegUnit Unit, const TargetRegisterInfo &TRI,
195 const TargetInstrInfo &TII, bool UseCopyInstr) {
196 auto I = Copies.find(Unit);
197 if (I != Copies.end()) {
198 // When we clobber the source of a copy, we need to clobber everything
199 // it defined.
200 markRegsUnavailable(I->second.DefRegs, TRI);
201 // When we clobber the destination of a copy, we need to clobber the
202 // whole register it defined.
203 if (MachineInstr *MI = I->second.MI) {
204 std::optional<DestSourcePair> CopyOperands =
205 isCopyInstr(*MI, TII, UseCopyInstr);
206
207 MCRegister Def = CopyOperands->Destination->getReg().asMCReg();
208 MCRegister Src = CopyOperands->Source->getReg().asMCReg();
209
210 markRegsUnavailable(Def, TRI);
211
212 // Since we clobber the destination of a copy, the semantic of Src's
213 // "DefRegs" to contain Def is no longer effectual. We will also need
214 // to remove the record from the copy maps that indicates Src defined
215 // Def. Failing to do so might cause the target to miss some
216 // opportunities to further eliminate redundant copy instructions.
217 // Consider the following sequence during the
218 // ForwardCopyPropagateBlock procedure:
219 // L1: r0 = COPY r9 <- TrackMI
220 // L2: r0 = COPY r8 <- TrackMI (Remove r9 defined r0 from tracker)
221 // L3: use r0 <- Remove L2 from MaybeDeadCopies
222 // L4: early-clobber r9 <- Clobber r9 (L2 is still valid in tracker)
223 // L5: r0 = COPY r8 <- Remove NopCopy
224 for (MCRegUnit SrcUnit : TRI.regunits(Src)) {
225 auto SrcCopy = Copies.find(SrcUnit);
226 if (SrcCopy != Copies.end() && SrcCopy->second.LastSeenUseInCopy) {
227 // If SrcCopy defines multiple values, we only need
228 // to erase the record for Def in DefRegs.
229 for (auto itr = SrcCopy->second.DefRegs.begin();
230 itr != SrcCopy->second.DefRegs.end(); itr++) {
231 if (*itr == Def) {
232 SrcCopy->second.DefRegs.erase(itr);
233 // If DefReg becomes empty after removal, we can remove the
234 // SrcCopy from the tracker's copy maps. We only remove those
235 // entries solely record the Def is defined by Src. If an
236 // entry also contains the definition record of other Def'
237 // registers, it cannot be cleared.
238 if (SrcCopy->second.DefRegs.empty() && !SrcCopy->second.MI) {
239 Copies.erase(SrcCopy);
240 }
241 break;
242 }
243 }
244 }
245 }
246 }
247 // Now we can erase the copy.
248 Copies.erase(I);
249 }
250 }
251
252 /// Clobber a single register, removing it from the tracker's copy maps.
253 void clobberRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
254 const TargetInstrInfo &TII, bool UseCopyInstr) {
255 for (MCRegUnit Unit : TRI.regunits(Reg)) {
256 clobberRegUnit(Unit, TRI, TII, UseCopyInstr);
257 }
258 }
259
260 /// Track copy's src users, and return false if that can't be done.
261 /// We can only track if we have a COPY instruction which source is
262 /// the same as the Reg.
263 bool trackSrcUsers(MCRegister Reg, MachineInstr &MI,
265 bool UseCopyInstr) {
266 MCRegUnit RU = *TRI.regunits(Reg).begin();
267 MachineInstr *AvailCopy = findCopyDefViaUnit(RU, TRI);
268 if (!AvailCopy)
269 return false;
270
271 std::optional<DestSourcePair> CopyOperands =
272 isCopyInstr(*AvailCopy, TII, UseCopyInstr);
273 Register Src = CopyOperands->Source->getReg();
274
275 // Bail out, if the source of the copy is not the same as the Reg.
276 if (Src != Reg)
277 return false;
278
279 auto I = Copies.find(RU);
280 if (I == Copies.end())
281 return false;
282
283 I->second.SrcUsers.insert(&MI);
284 return true;
285 }
286
287 /// Return the users for a given register.
289 const TargetRegisterInfo &TRI) {
290 MCRegUnit RU = *TRI.regunits(Reg).begin();
291 auto I = Copies.find(RU);
292 if (I == Copies.end())
293 return {};
294 return I->second.SrcUsers;
295 }
296
297 /// Add this copy's registers into the tracker's copy maps.
298 void trackCopy(MachineInstr *MI, const TargetRegisterInfo &TRI,
299 const TargetInstrInfo &TII, bool UseCopyInstr) {
300 std::optional<DestSourcePair> CopyOperands =
301 isCopyInstr(*MI, TII, UseCopyInstr);
302 assert(CopyOperands && "Tracking non-copy?");
303
304 MCRegister Src = CopyOperands->Source->getReg().asMCReg();
305 MCRegister Def = CopyOperands->Destination->getReg().asMCReg();
306
307 // Remember Def is defined by the copy.
308 for (MCRegUnit Unit : TRI.regunits(Def))
309 Copies[Unit] = {MI, nullptr, {}, {}, true};
310
311 // Remember source that's copied to Def. Once it's clobbered, then
312 // it's no longer available for copy propagation.
313 for (MCRegUnit Unit : TRI.regunits(Src)) {
314 auto &Copy = Copies[Unit];
315 if (!is_contained(Copy.DefRegs, Def))
316 Copy.DefRegs.push_back(Def);
317 Copy.LastSeenUseInCopy = MI;
318 }
319 }
320
321 bool hasAnyCopies() {
322 return !Copies.empty();
323 }
324
325 MachineInstr *findCopyForUnit(MCRegUnit RegUnit,
326 const TargetRegisterInfo &TRI,
327 bool MustBeAvailable = false) {
328 auto CI = Copies.find(RegUnit);
329 if (CI == Copies.end())
330 return nullptr;
331 if (MustBeAvailable && !CI->second.Avail)
332 return nullptr;
333 return CI->second.MI;
334 }
335
336 MachineInstr *findCopyDefViaUnit(MCRegUnit RegUnit,
337 const TargetRegisterInfo &TRI) {
338 auto CI = Copies.find(RegUnit);
339 if (CI == Copies.end())
340 return nullptr;
341 if (CI->second.DefRegs.size() != 1)
342 return nullptr;
343 MCRegUnit RU = *TRI.regunits(CI->second.DefRegs[0]).begin();
344 return findCopyForUnit(RU, TRI, true);
345 }
346
347 MachineInstr *findAvailBackwardCopy(MachineInstr &I, MCRegister Reg,
348 const TargetRegisterInfo &TRI,
349 const TargetInstrInfo &TII,
350 bool UseCopyInstr) {
351 MCRegUnit RU = *TRI.regunits(Reg).begin();
352 MachineInstr *AvailCopy = findCopyDefViaUnit(RU, TRI);
353
354 if (!AvailCopy)
355 return nullptr;
356
357 std::optional<DestSourcePair> CopyOperands =
358 isCopyInstr(*AvailCopy, TII, UseCopyInstr);
359 Register AvailSrc = CopyOperands->Source->getReg();
360 Register AvailDef = CopyOperands->Destination->getReg();
361 if (!TRI.isSubRegisterEq(AvailSrc, Reg))
362 return nullptr;
363
364 for (const MachineInstr &MI :
365 make_range(AvailCopy->getReverseIterator(), I.getReverseIterator()))
366 for (const MachineOperand &MO : MI.operands())
367 if (MO.isRegMask())
368 // FIXME: Shall we simultaneously invalidate AvailSrc or AvailDef?
369 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef))
370 return nullptr;
371
372 return AvailCopy;
373 }
374
375 MachineInstr *findAvailCopy(MachineInstr &DestCopy, MCRegister Reg,
376 const TargetRegisterInfo &TRI,
377 const TargetInstrInfo &TII, bool UseCopyInstr) {
378 // We check the first RegUnit here, since we'll only be interested in the
379 // copy if it copies the entire register anyway.
380 MCRegUnit RU = *TRI.regunits(Reg).begin();
381 MachineInstr *AvailCopy =
382 findCopyForUnit(RU, TRI, /*MustBeAvailable=*/true);
383
384 if (!AvailCopy)
385 return nullptr;
386
387 std::optional<DestSourcePair> CopyOperands =
388 isCopyInstr(*AvailCopy, TII, UseCopyInstr);
389 Register AvailSrc = CopyOperands->Source->getReg();
390 Register AvailDef = CopyOperands->Destination->getReg();
391 if (!TRI.isSubRegisterEq(AvailDef, Reg))
392 return nullptr;
393
394 // Check that the available copy isn't clobbered by any regmasks between
395 // itself and the destination.
396 for (const MachineInstr &MI :
397 make_range(AvailCopy->getIterator(), DestCopy.getIterator()))
398 for (const MachineOperand &MO : MI.operands())
399 if (MO.isRegMask())
400 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef))
401 return nullptr;
402
403 return AvailCopy;
404 }
405
406 // Find last COPY that defines Reg before Current MachineInstr.
407 MachineInstr *findLastSeenDefInCopy(const MachineInstr &Current,
408 MCRegister Reg,
409 const TargetRegisterInfo &TRI,
410 const TargetInstrInfo &TII,
411 bool UseCopyInstr) {
412 MCRegUnit RU = *TRI.regunits(Reg).begin();
413 auto CI = Copies.find(RU);
414 if (CI == Copies.end() || !CI->second.Avail)
415 return nullptr;
416
417 MachineInstr *DefCopy = CI->second.MI;
418 std::optional<DestSourcePair> CopyOperands =
419 isCopyInstr(*DefCopy, TII, UseCopyInstr);
420 Register Def = CopyOperands->Destination->getReg();
421 if (!TRI.isSubRegisterEq(Def, Reg))
422 return nullptr;
423
424 for (const MachineInstr &MI :
425 make_range(static_cast<const MachineInstr *>(DefCopy)->getIterator(),
426 Current.getIterator()))
427 for (const MachineOperand &MO : MI.operands())
428 if (MO.isRegMask())
429 if (MO.clobbersPhysReg(Def)) {
430 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of "
431 << printReg(Def, &TRI) << "\n");
432 return nullptr;
433 }
434
435 return DefCopy;
436 }
437
438 // Find last COPY that uses Reg.
439 MachineInstr *findLastSeenUseInCopy(MCRegister Reg,
440 const TargetRegisterInfo &TRI) {
441 MCRegUnit RU = *TRI.regunits(Reg).begin();
442 auto CI = Copies.find(RU);
443 if (CI == Copies.end())
444 return nullptr;
445 return CI->second.LastSeenUseInCopy;
446 }
447
448 void clear() {
449 Copies.clear();
450 }
451};
452
453class MachineCopyPropagation {
454 const TargetRegisterInfo *TRI = nullptr;
455 const TargetInstrInfo *TII = nullptr;
456 const MachineRegisterInfo *MRI = nullptr;
457
458 // Return true if this is a copy instruction and false otherwise.
459 bool UseCopyInstr;
460
461public:
462 MachineCopyPropagation(bool CopyInstr = false)
463 : UseCopyInstr(CopyInstr || MCPUseCopyInstr) {}
464
465 bool run(MachineFunction &MF);
466
467private:
468 typedef enum { DebugUse = false, RegularUse = true } DebugType;
469
470 void ReadRegister(MCRegister Reg, MachineInstr &Reader, DebugType DT);
471 void readSuccessorLiveIns(const MachineBasicBlock &MBB);
472 void ForwardCopyPropagateBlock(MachineBasicBlock &MBB);
473 void BackwardCopyPropagateBlock(MachineBasicBlock &MBB);
474 void EliminateSpillageCopies(MachineBasicBlock &MBB);
475 bool eraseIfRedundant(MachineInstr &Copy, MCRegister Src, MCRegister Def);
476 void forwardUses(MachineInstr &MI);
477 void propagateDefs(MachineInstr &MI);
478 bool isForwardableRegClassCopy(const MachineInstr &Copy,
479 const MachineInstr &UseI, unsigned UseIdx);
480 bool isBackwardPropagatableRegClassCopy(const MachineInstr &Copy,
481 const MachineInstr &UseI,
482 unsigned UseIdx);
483 bool hasImplicitOverlap(const MachineInstr &MI, const MachineOperand &Use);
484 bool hasOverlappingMultipleDef(const MachineInstr &MI,
485 const MachineOperand &MODef, Register Def);
486 bool canUpdateSrcUsers(const MachineInstr &Copy,
487 const MachineOperand &CopySrc);
488
489 /// Candidates for deletion.
490 SmallSetVector<MachineInstr *, 8> MaybeDeadCopies;
491
492 /// Multimap tracking debug users in current BB
494
495 CopyTracker Tracker;
496
497 bool Changed = false;
498};
499
500class MachineCopyPropagationLegacy : public MachineFunctionPass {
501 bool UseCopyInstr;
502
503public:
504 static char ID; // pass identification
505
506 MachineCopyPropagationLegacy(bool UseCopyInstr = false)
507 : MachineFunctionPass(ID), UseCopyInstr(UseCopyInstr) {}
508
509 void getAnalysisUsage(AnalysisUsage &AU) const override {
510 AU.setPreservesCFG();
512 }
513
514 bool runOnMachineFunction(MachineFunction &MF) override;
515
517 return MachineFunctionProperties().setNoVRegs();
518 }
519};
520
521} // end anonymous namespace
522
523char MachineCopyPropagationLegacy::ID = 0;
524
525char &llvm::MachineCopyPropagationID = MachineCopyPropagationLegacy::ID;
526
527INITIALIZE_PASS(MachineCopyPropagationLegacy, DEBUG_TYPE,
528 "Machine Copy Propagation Pass", false, false)
529
530void MachineCopyPropagation::ReadRegister(MCRegister Reg, MachineInstr &Reader,
531 DebugType DT) {
532 // If 'Reg' is defined by a copy, the copy is no longer a candidate
533 // for elimination. If a copy is "read" by a debug user, record the user
534 // for propagation.
535 for (MCRegUnit Unit : TRI->regunits(Reg)) {
536 if (MachineInstr *Copy = Tracker.findCopyForUnit(Unit, *TRI)) {
537 if (DT == RegularUse) {
538 LLVM_DEBUG(dbgs() << "MCP: Copy is used - not dead: "; Copy->dump());
539 MaybeDeadCopies.remove(Copy);
540 } else {
541 CopyDbgUsers[Copy].insert(&Reader);
542 }
543 }
544 }
545}
546
547void MachineCopyPropagation::readSuccessorLiveIns(
548 const MachineBasicBlock &MBB) {
549 if (MaybeDeadCopies.empty())
550 return;
551
552 // If a copy result is livein to a successor, it is not dead.
553 for (const MachineBasicBlock *Succ : MBB.successors()) {
554 for (const auto &LI : Succ->liveins()) {
555 for (MCRegUnitMaskIterator U(LI.PhysReg, TRI); U.isValid(); ++U) {
556 auto [Unit, Mask] = *U;
557 if ((Mask & LI.LaneMask).any()) {
558 if (MachineInstr *Copy = Tracker.findCopyForUnit(Unit, *TRI))
559 MaybeDeadCopies.remove(Copy);
560 }
561 }
562 }
563 }
564}
565
566/// Return true if \p PreviousCopy did copy register \p Src to register \p Def.
567/// This fact may have been obscured by sub register usage or may not be true at
568/// all even though Src and Def are subregisters of the registers used in
569/// PreviousCopy. e.g.
570/// isNopCopy("ecx = COPY eax", AX, CX) == true
571/// isNopCopy("ecx = COPY eax", AH, CL) == false
572static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src,
574 const TargetInstrInfo *TII, bool UseCopyInstr) {
575
576 std::optional<DestSourcePair> CopyOperands =
577 isCopyInstr(PreviousCopy, *TII, UseCopyInstr);
578 MCRegister PreviousSrc = CopyOperands->Source->getReg().asMCReg();
579 MCRegister PreviousDef = CopyOperands->Destination->getReg().asMCReg();
580 if (Src == PreviousSrc && Def == PreviousDef)
581 return true;
582 if (!TRI->isSubRegister(PreviousSrc, Src))
583 return false;
584 unsigned SubIdx = TRI->getSubRegIndex(PreviousSrc, Src);
585 return SubIdx == TRI->getSubRegIndex(PreviousDef, Def);
586}
587
588/// Remove instruction \p Copy if there exists a previous copy that copies the
589/// register \p Src to the register \p Def; This may happen indirectly by
590/// copying the super registers.
591bool MachineCopyPropagation::eraseIfRedundant(MachineInstr &Copy,
592 MCRegister Src, MCRegister Def) {
593 // Avoid eliminating a copy from/to a reserved registers as we cannot predict
594 // the value (Example: The sparc zero register is writable but stays zero).
595 if (MRI->isReserved(Src) || MRI->isReserved(Def))
596 return false;
597
598 // Search for an existing copy.
599 MachineInstr *PrevCopy =
600 Tracker.findAvailCopy(Copy, Def, *TRI, *TII, UseCopyInstr);
601 if (!PrevCopy)
602 return false;
603
604 auto PrevCopyOperands = isCopyInstr(*PrevCopy, *TII, UseCopyInstr);
605 // Check that the existing copy uses the correct sub registers.
606 if (PrevCopyOperands->Destination->isDead())
607 return false;
608 if (!isNopCopy(*PrevCopy, Src, Def, TRI, TII, UseCopyInstr))
609 return false;
610
611 LLVM_DEBUG(dbgs() << "MCP: copy is a NOP, removing: "; Copy.dump());
612
613 // Copy was redundantly redefining either Src or Def. Remove earlier kill
614 // flags between Copy and PrevCopy because the value will be reused now.
615 std::optional<DestSourcePair> CopyOperands =
616 isCopyInstr(Copy, *TII, UseCopyInstr);
617 assert(CopyOperands);
618
619 Register CopyDef = CopyOperands->Destination->getReg();
620 assert(CopyDef == Src || CopyDef == Def);
621 for (MachineInstr &MI :
622 make_range(PrevCopy->getIterator(), Copy.getIterator()))
623 MI.clearRegisterKills(CopyDef, TRI);
624
625 // Clear undef flag from remaining copy if needed.
626 if (!CopyOperands->Source->isUndef()) {
627 PrevCopy->getOperand(PrevCopyOperands->Source->getOperandNo())
628 .setIsUndef(false);
629 }
630
631 Copy.eraseFromParent();
632 Changed = true;
633 ++NumDeletes;
634 return true;
635}
636
637bool MachineCopyPropagation::isBackwardPropagatableRegClassCopy(
638 const MachineInstr &Copy, const MachineInstr &UseI, unsigned UseIdx) {
639 std::optional<DestSourcePair> CopyOperands =
640 isCopyInstr(Copy, *TII, UseCopyInstr);
641 Register Def = CopyOperands->Destination->getReg();
642
643 if (const TargetRegisterClass *URC =
644 UseI.getRegClassConstraint(UseIdx, TII, TRI))
645 return URC->contains(Def);
646
647 // We don't process further if UseI is a COPY, since forward copy propagation
648 // should handle that.
649 return false;
650}
651
652/// Decide whether we should forward the source of \param Copy to its use in
653/// \param UseI based on the physical register class constraints of the opcode
654/// and avoiding introducing more cross-class COPYs.
655bool MachineCopyPropagation::isForwardableRegClassCopy(const MachineInstr &Copy,
656 const MachineInstr &UseI,
657 unsigned UseIdx) {
658 std::optional<DestSourcePair> CopyOperands =
659 isCopyInstr(Copy, *TII, UseCopyInstr);
660 Register CopySrcReg = CopyOperands->Source->getReg();
661
662 // If the new register meets the opcode register constraints, then allow
663 // forwarding.
664 if (const TargetRegisterClass *URC =
665 UseI.getRegClassConstraint(UseIdx, TII, TRI))
666 return URC->contains(CopySrcReg);
667
668 auto UseICopyOperands = isCopyInstr(UseI, *TII, UseCopyInstr);
669 if (!UseICopyOperands)
670 return false;
671
672 /// COPYs don't have register class constraints, so if the user instruction
673 /// is a COPY, we just try to avoid introducing additional cross-class
674 /// COPYs. For example:
675 ///
676 /// RegClassA = COPY RegClassB // Copy parameter
677 /// ...
678 /// RegClassB = COPY RegClassA // UseI parameter
679 ///
680 /// which after forwarding becomes
681 ///
682 /// RegClassA = COPY RegClassB
683 /// ...
684 /// RegClassB = COPY RegClassB
685 ///
686 /// so we have reduced the number of cross-class COPYs and potentially
687 /// introduced a nop COPY that can be removed.
688
689 // Allow forwarding if src and dst belong to any common class, so long as they
690 // don't belong to any (possibly smaller) common class that requires copies to
691 // go via a different class.
692 Register UseDstReg = UseICopyOperands->Destination->getReg();
693 bool Found = false;
694 bool IsCrossClass = false;
695 for (const TargetRegisterClass *RC : TRI->regclasses()) {
696 if (RC->contains(CopySrcReg) && RC->contains(UseDstReg)) {
697 Found = true;
698 if (TRI->getCrossCopyRegClass(RC) != RC) {
699 IsCrossClass = true;
700 break;
701 }
702 }
703 }
704 if (!Found)
705 return false;
706 if (!IsCrossClass)
707 return true;
708 // The forwarded copy would be cross-class. Only do this if the original copy
709 // was also cross-class.
710 Register CopyDstReg = CopyOperands->Destination->getReg();
711 for (const TargetRegisterClass *RC : TRI->regclasses()) {
712 if (RC->contains(CopySrcReg) && RC->contains(CopyDstReg) &&
713 TRI->getCrossCopyRegClass(RC) != RC)
714 return true;
715 }
716 return false;
717}
718
719/// Check that \p MI does not have implicit uses that overlap with it's \p Use
720/// operand (the register being replaced), since these can sometimes be
721/// implicitly tied to other operands. For example, on AMDGPU:
722///
723/// V_MOVRELS_B32_e32 %VGPR2, %M0<imp-use>, %EXEC<imp-use>, %VGPR2_VGPR3_VGPR4_VGPR5<imp-use>
724///
725/// the %VGPR2 is implicitly tied to the larger reg operand, but we have no
726/// way of knowing we need to update the latter when updating the former.
727bool MachineCopyPropagation::hasImplicitOverlap(const MachineInstr &MI,
728 const MachineOperand &Use) {
729 for (const MachineOperand &MIUse : MI.uses())
730 if (&MIUse != &Use && MIUse.isReg() && MIUse.isImplicit() &&
731 MIUse.isUse() && TRI->regsOverlap(Use.getReg(), MIUse.getReg()))
732 return true;
733
734 return false;
735}
736
737/// For an MI that has multiple definitions, check whether \p MI has
738/// a definition that overlaps with another of its definitions.
739/// For example, on ARM: umull r9, r9, lr, r0
740/// The umull instruction is unpredictable unless RdHi and RdLo are different.
741bool MachineCopyPropagation::hasOverlappingMultipleDef(
742 const MachineInstr &MI, const MachineOperand &MODef, Register Def) {
743 for (const MachineOperand &MIDef : MI.all_defs()) {
744 if ((&MIDef != &MODef) && MIDef.isReg() &&
745 TRI->regsOverlap(Def, MIDef.getReg()))
746 return true;
747 }
748
749 return false;
750}
751
752/// Return true if it is safe to update all users of the \p CopySrc register
753/// in the given \p Copy instruction.
754bool MachineCopyPropagation::canUpdateSrcUsers(const MachineInstr &Copy,
755 const MachineOperand &CopySrc) {
756 assert(CopySrc.isReg() && "Expected a register operand");
757 for (auto *SrcUser : Tracker.getSrcUsers(CopySrc.getReg(), *TRI)) {
758 if (hasImplicitOverlap(*SrcUser, CopySrc))
759 return false;
760
761 for (MachineOperand &MO : SrcUser->uses()) {
762 if (!MO.isReg() || !MO.isUse() || MO.getReg() != CopySrc.getReg())
763 continue;
764 if (MO.isTied() || !MO.isRenamable() ||
765 !isBackwardPropagatableRegClassCopy(Copy, *SrcUser,
766 MO.getOperandNo()))
767 return false;
768 }
769 }
770 return true;
771}
772
773/// Look for available copies whose destination register is used by \p MI and
774/// replace the use in \p MI with the copy's source register.
775void MachineCopyPropagation::forwardUses(MachineInstr &MI) {
776 if (!Tracker.hasAnyCopies())
777 return;
778
779 // Look for non-tied explicit vreg uses that have an active COPY
780 // instruction that defines the physical register allocated to them.
781 // Replace the vreg with the source of the active COPY.
782 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx < OpEnd;
783 ++OpIdx) {
784 MachineOperand &MOUse = MI.getOperand(OpIdx);
785 // Don't forward into undef use operands since doing so can cause problems
786 // with the machine verifier, since it doesn't treat undef reads as reads,
787 // so we can end up with a live range that ends on an undef read, leading to
788 // an error that the live range doesn't end on a read of the live range
789 // register.
790 if (!MOUse.isReg() || MOUse.isTied() || MOUse.isUndef() || MOUse.isDef() ||
791 MOUse.isImplicit())
792 continue;
793
794 if (!MOUse.getReg())
795 continue;
796
797 // Check that the register is marked 'renamable' so we know it is safe to
798 // rename it without violating any constraints that aren't expressed in the
799 // IR (e.g. ABI or opcode requirements).
800 if (!MOUse.isRenamable())
801 continue;
802
803 MachineInstr *Copy = Tracker.findAvailCopy(MI, MOUse.getReg().asMCReg(),
804 *TRI, *TII, UseCopyInstr);
805 if (!Copy)
806 continue;
807
808 std::optional<DestSourcePair> CopyOperands =
809 isCopyInstr(*Copy, *TII, UseCopyInstr);
810 Register CopyDstReg = CopyOperands->Destination->getReg();
811 const MachineOperand &CopySrc = *CopyOperands->Source;
812 Register CopySrcReg = CopySrc.getReg();
813
814 Register ForwardedReg = CopySrcReg;
815 // MI might use a sub-register of the Copy destination, in which case the
816 // forwarded register is the matching sub-register of the Copy source.
817 if (MOUse.getReg() != CopyDstReg) {
818 unsigned SubRegIdx = TRI->getSubRegIndex(CopyDstReg, MOUse.getReg());
819 assert(SubRegIdx &&
820 "MI source is not a sub-register of Copy destination");
821 ForwardedReg = TRI->getSubReg(CopySrcReg, SubRegIdx);
822 if (!ForwardedReg) {
823 LLVM_DEBUG(dbgs() << "MCP: Copy source does not have sub-register "
824 << TRI->getSubRegIndexName(SubRegIdx) << '\n');
825 continue;
826 }
827 }
828
829 // Don't forward COPYs of reserved regs unless they are constant.
830 if (MRI->isReserved(CopySrcReg) && !MRI->isConstantPhysReg(CopySrcReg))
831 continue;
832
833 if (!isForwardableRegClassCopy(*Copy, MI, OpIdx))
834 continue;
835
836 if (hasImplicitOverlap(MI, MOUse))
837 continue;
838
839 // Check that the instruction is not a copy that partially overwrites the
840 // original copy source that we are about to use. The tracker mechanism
841 // cannot cope with that.
842 if (isCopyInstr(MI, *TII, UseCopyInstr) &&
843 MI.modifiesRegister(CopySrcReg, TRI) &&
844 !MI.definesRegister(CopySrcReg, /*TRI=*/nullptr)) {
845 LLVM_DEBUG(dbgs() << "MCP: Copy source overlap with dest in " << MI);
846 continue;
847 }
848
849 if (!DebugCounter::shouldExecute(FwdCounter)) {
850 LLVM_DEBUG(dbgs() << "MCP: Skipping forwarding due to debug counter:\n "
851 << MI);
852 continue;
853 }
854
855 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MOUse.getReg(), TRI)
856 << "\n with " << printReg(ForwardedReg, TRI)
857 << "\n in " << MI << " from " << *Copy);
858
859 MOUse.setReg(ForwardedReg);
860
861 if (!CopySrc.isRenamable())
862 MOUse.setIsRenamable(false);
863 MOUse.setIsUndef(CopySrc.isUndef());
864
865 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
866
867 // Clear kill markers that may have been invalidated.
868 for (MachineInstr &KMI :
869 make_range(Copy->getIterator(), std::next(MI.getIterator())))
870 KMI.clearRegisterKills(CopySrcReg, TRI);
871
872 ++NumCopyForwards;
873 Changed = true;
874 }
875}
876
877void MachineCopyPropagation::ForwardCopyPropagateBlock(MachineBasicBlock &MBB) {
878 LLVM_DEBUG(dbgs() << "MCP: ForwardCopyPropagateBlock " << MBB.getName()
879 << "\n");
880
882 // Analyze copies (which don't overlap themselves).
883 std::optional<DestSourcePair> CopyOperands =
884 isCopyInstr(MI, *TII, UseCopyInstr);
885 if (CopyOperands) {
886 Register RegSrc = CopyOperands->Source->getReg();
887 Register RegDef = CopyOperands->Destination->getReg();
888 if (!TRI->regsOverlap(RegDef, RegSrc)) {
889 assert(RegDef.isPhysical() && RegSrc.isPhysical() &&
890 "MachineCopyPropagation should be run after register allocation!");
891
892 MCRegister Def = RegDef.asMCReg();
893 MCRegister Src = RegSrc.asMCReg();
894
895 // The two copies cancel out and the source of the first copy
896 // hasn't been overridden, eliminate the second one. e.g.
897 // %ecx = COPY %eax
898 // ... nothing clobbered eax.
899 // %eax = COPY %ecx
900 // =>
901 // %ecx = COPY %eax
902 //
903 // or
904 //
905 // %ecx = COPY %eax
906 // ... nothing clobbered eax.
907 // %ecx = COPY %eax
908 // =>
909 // %ecx = COPY %eax
910 if (eraseIfRedundant(MI, Def, Src) || eraseIfRedundant(MI, Src, Def))
911 continue;
912 }
913 }
914
915 // Clobber any earlyclobber regs first.
916 for (const MachineOperand &MO : MI.operands())
917 if (MO.isReg() && MO.isEarlyClobber()) {
918 MCRegister Reg = MO.getReg().asMCReg();
919 // If we have a tied earlyclobber, that means it is also read by this
920 // instruction, so we need to make sure we don't remove it as dead
921 // later.
922 if (MO.isTied())
923 ReadRegister(Reg, MI, RegularUse);
924 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
925 }
926
927 forwardUses(MI);
928
929 // Attempt to canonicalize/optimize the instruction now its arguments have
930 // been mutated. This may convert MI from a non-copy to a copy instruction.
931 if (TII->simplifyInstruction(MI)) {
932 Changed = true;
933 LLVM_DEBUG(dbgs() << "MCP: After simplifyInstruction: " << MI);
934 }
935
936 CopyOperands = isCopyInstr(MI, *TII, UseCopyInstr);
937 if (CopyOperands) {
938 Register RegSrc = CopyOperands->Source->getReg();
939 Register RegDef = CopyOperands->Destination->getReg();
940 // It's possible that the previous transformations have resulted in a
941 // no-op register move (i.e. one where source and destination registers
942 // are the same and are not referring to a reserved register). If so,
943 // delete it.
944 if (RegSrc == RegDef && !MRI->isReserved(RegSrc)) {
945 MI.eraseFromParent();
946 NumDeletes++;
947 Changed = true;
948 continue;
949 }
950
951 if (!TRI->regsOverlap(RegDef, RegSrc)) {
952 // Copy is now a candidate for deletion.
953 MCRegister Def = RegDef.asMCReg();
954 if (!MRI->isReserved(Def))
955 MaybeDeadCopies.insert(&MI);
956 }
957 }
958
960 const MachineOperand *RegMask = nullptr;
961 for (const MachineOperand &MO : MI.operands()) {
962 if (MO.isRegMask())
963 RegMask = &MO;
964 if (!MO.isReg())
965 continue;
966 Register Reg = MO.getReg();
967 if (!Reg)
968 continue;
969
970 assert(!Reg.isVirtual() &&
971 "MachineCopyPropagation should be run after register allocation!");
972
973 if (MO.isDef() && !MO.isEarlyClobber()) {
974 // Skip invalidating constant registers.
975 if (!MRI->isConstantPhysReg(Reg)) {
976 Defs.push_back(Reg.asMCReg());
977 continue;
978 }
979 } else if (MO.readsReg())
980 ReadRegister(Reg.asMCReg(), MI, MO.isDebug() ? DebugUse : RegularUse);
981 }
982
983 // The instruction has a register mask operand which means that it clobbers
984 // a large set of registers. Treat clobbered registers the same way as
985 // defined registers.
986 if (RegMask) {
987 BitVector &PreservedRegUnits =
988 Tracker.getPreservedRegUnits(*RegMask, *TRI);
989
990 // Erase any MaybeDeadCopies whose destination register is clobbered.
992 MaybeDeadCopies.begin();
993 DI != MaybeDeadCopies.end();) {
994 MachineInstr *MaybeDead = *DI;
995 std::optional<DestSourcePair> CopyOperands =
996 isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
997 MCRegister Reg = CopyOperands->Destination->getReg().asMCReg();
998 assert(!MRI->isReserved(Reg));
999
1000 if (!RegMask->clobbersPhysReg(Reg)) {
1001 ++DI;
1002 continue;
1003 }
1004
1005 // Invalidate all entries in the copy map which are not preserved by
1006 // this register mask.
1007 bool MIRefedinCopyInfo = false;
1008 for (unsigned RegUnit : TRI->regunits(Reg)) {
1009 if (!PreservedRegUnits.test(RegUnit))
1010 Tracker.clobberRegUnit(RegUnit, *TRI, *TII, UseCopyInstr);
1011 else {
1012 if (MaybeDead == Tracker.findCopyForUnit(RegUnit, *TRI)) {
1013 MIRefedinCopyInfo = true;
1014 }
1015 }
1016 }
1017
1018 // erase() will return the next valid iterator pointing to the next
1019 // element after the erased one.
1020 DI = MaybeDeadCopies.erase(DI);
1021
1022 // Preserved by RegMask, DO NOT remove copy
1023 if (MIRefedinCopyInfo)
1024 continue;
1025
1026 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to regmask clobbering: "
1027 << *MaybeDead);
1028
1029 MaybeDead->eraseFromParent();
1030 Changed = true;
1031 ++NumDeletes;
1032 }
1033 }
1034
1035 // Any previous copy definition or reading the Defs is no longer available.
1036 for (MCRegister Reg : Defs)
1037 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
1038
1039 if (CopyOperands) {
1040 Register RegSrc = CopyOperands->Source->getReg();
1041 Register RegDef = CopyOperands->Destination->getReg();
1042 if (!TRI->regsOverlap(RegDef, RegSrc)) {
1043 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1044 }
1045 }
1046 }
1047
1048 bool TracksLiveness = MRI->tracksLiveness();
1049
1050 // If liveness is tracked, we can use the live-in lists to know which
1051 // copies aren't dead.
1052 if (TracksLiveness)
1053 readSuccessorLiveIns(MBB);
1054
1055 // If MBB doesn't have succesor, delete copies whose defs are not used.
1056 // If MBB does have successors, we can only delete copies if we are able to
1057 // use liveness information from successors to confirm they are really dead.
1058 if (MBB.succ_empty() || TracksLiveness) {
1059 for (MachineInstr *MaybeDead : MaybeDeadCopies) {
1060 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to no live-out succ: ";
1061 MaybeDead->dump());
1062
1063 std::optional<DestSourcePair> CopyOperands =
1064 isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
1065 assert(CopyOperands);
1066
1067 Register SrcReg = CopyOperands->Source->getReg();
1068 Register DestReg = CopyOperands->Destination->getReg();
1069 assert(!MRI->isReserved(DestReg));
1070
1071 // Update matching debug values, if any.
1072 const auto &DbgUsers = CopyDbgUsers[MaybeDead];
1073 SmallVector<MachineInstr *> MaybeDeadDbgUsers(DbgUsers.begin(),
1074 DbgUsers.end());
1075 MRI->updateDbgUsersToReg(DestReg.asMCReg(), SrcReg.asMCReg(),
1076 MaybeDeadDbgUsers);
1077
1078 MaybeDead->eraseFromParent();
1079 Changed = true;
1080 ++NumDeletes;
1081 }
1082 }
1083
1084 MaybeDeadCopies.clear();
1085 CopyDbgUsers.clear();
1086 Tracker.clear();
1087}
1088
1089static bool isBackwardPropagatableCopy(const DestSourcePair &CopyOperands,
1090 const MachineRegisterInfo &MRI) {
1091 Register Def = CopyOperands.Destination->getReg();
1092 Register Src = CopyOperands.Source->getReg();
1093
1094 if (!Def || !Src)
1095 return false;
1096
1097 if (MRI.isReserved(Def) || MRI.isReserved(Src))
1098 return false;
1099
1100 return CopyOperands.Source->isRenamable() && CopyOperands.Source->isKill();
1101}
1102
1103void MachineCopyPropagation::propagateDefs(MachineInstr &MI) {
1104 if (!Tracker.hasAnyCopies())
1105 return;
1106
1107 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx != OpEnd;
1108 ++OpIdx) {
1109 MachineOperand &MODef = MI.getOperand(OpIdx);
1110
1111 if (!MODef.isReg() || MODef.isUse())
1112 continue;
1113
1114 // Ignore non-trivial cases.
1115 if (MODef.isTied() || MODef.isUndef() || MODef.isImplicit())
1116 continue;
1117
1118 if (!MODef.getReg())
1119 continue;
1120
1121 // We only handle if the register comes from a vreg.
1122 if (!MODef.isRenamable())
1123 continue;
1124
1125 MachineInstr *Copy = Tracker.findAvailBackwardCopy(
1126 MI, MODef.getReg().asMCReg(), *TRI, *TII, UseCopyInstr);
1127 if (!Copy)
1128 continue;
1129
1130 std::optional<DestSourcePair> CopyOperands =
1131 isCopyInstr(*Copy, *TII, UseCopyInstr);
1132 Register Def = CopyOperands->Destination->getReg();
1133 Register Src = CopyOperands->Source->getReg();
1134
1135 if (MODef.getReg() != Src)
1136 continue;
1137
1138 if (!isBackwardPropagatableRegClassCopy(*Copy, MI, OpIdx))
1139 continue;
1140
1141 if (hasImplicitOverlap(MI, MODef))
1142 continue;
1143
1144 if (hasOverlappingMultipleDef(MI, MODef, Def))
1145 continue;
1146
1147 if (!canUpdateSrcUsers(*Copy, *CopyOperands->Source))
1148 continue;
1149
1150 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MODef.getReg(), TRI)
1151 << "\n with " << printReg(Def, TRI) << "\n in "
1152 << MI << " from " << *Copy);
1153
1154 MODef.setReg(Def);
1155 MODef.setIsRenamable(CopyOperands->Destination->isRenamable());
1156
1157 for (auto *SrcUser : Tracker.getSrcUsers(Src, *TRI)) {
1158 for (MachineOperand &MO : SrcUser->uses()) {
1159 if (!MO.isReg() || !MO.isUse() || MO.getReg() != Src)
1160 continue;
1161 MO.setReg(Def);
1162 MO.setIsRenamable(CopyOperands->Destination->isRenamable());
1163 }
1164 }
1165
1166 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
1167 MaybeDeadCopies.insert(Copy);
1168 Changed = true;
1169 ++NumCopyBackwardPropagated;
1170 }
1171}
1172
1173void MachineCopyPropagation::BackwardCopyPropagateBlock(
1175 LLVM_DEBUG(dbgs() << "MCP: BackwardCopyPropagateBlock " << MBB.getName()
1176 << "\n");
1177
1179 // Ignore non-trivial COPYs.
1180 std::optional<DestSourcePair> CopyOperands =
1181 isCopyInstr(MI, *TII, UseCopyInstr);
1182 if (CopyOperands && MI.getNumImplicitOperands() == 0) {
1183 Register DefReg = CopyOperands->Destination->getReg();
1184 Register SrcReg = CopyOperands->Source->getReg();
1185
1186 if (!TRI->regsOverlap(DefReg, SrcReg)) {
1187 // Unlike forward cp, we don't invoke propagateDefs here,
1188 // just let forward cp do COPY-to-COPY propagation.
1189 if (isBackwardPropagatableCopy(*CopyOperands, *MRI)) {
1190 Tracker.invalidateRegister(SrcReg.asMCReg(), *TRI, *TII,
1191 UseCopyInstr);
1192 Tracker.invalidateRegister(DefReg.asMCReg(), *TRI, *TII,
1193 UseCopyInstr);
1194 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1195 continue;
1196 }
1197 }
1198 }
1199
1200 // Invalidate any earlyclobber regs first.
1201 for (const MachineOperand &MO : MI.operands())
1202 if (MO.isReg() && MO.isEarlyClobber()) {
1203 MCRegister Reg = MO.getReg().asMCReg();
1204 if (!Reg)
1205 continue;
1206 Tracker.invalidateRegister(Reg, *TRI, *TII, UseCopyInstr);
1207 }
1208
1209 propagateDefs(MI);
1210 for (const MachineOperand &MO : MI.operands()) {
1211 if (!MO.isReg())
1212 continue;
1213
1214 if (!MO.getReg())
1215 continue;
1216
1217 if (MO.isDef())
1218 Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1219 UseCopyInstr);
1220
1221 if (MO.readsReg()) {
1222 if (MO.isDebug()) {
1223 // Check if the register in the debug instruction is utilized
1224 // in a copy instruction, so we can update the debug info if the
1225 // register is changed.
1226 for (MCRegUnit Unit : TRI->regunits(MO.getReg().asMCReg())) {
1227 if (auto *Copy = Tracker.findCopyDefViaUnit(Unit, *TRI)) {
1228 CopyDbgUsers[Copy].insert(&MI);
1229 }
1230 }
1231 } else if (!Tracker.trackSrcUsers(MO.getReg().asMCReg(), MI, *TRI, *TII,
1232 UseCopyInstr)) {
1233 // If we can't track the source users, invalidate the register.
1234 Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1235 UseCopyInstr);
1236 }
1237 }
1238 }
1239 }
1240
1241 for (auto *Copy : MaybeDeadCopies) {
1242 std::optional<DestSourcePair> CopyOperands =
1243 isCopyInstr(*Copy, *TII, UseCopyInstr);
1244 Register Src = CopyOperands->Source->getReg();
1245 Register Def = CopyOperands->Destination->getReg();
1246 const auto &DbgUsers = CopyDbgUsers[Copy];
1247 SmallVector<MachineInstr *> MaybeDeadDbgUsers(DbgUsers.begin(),
1248 DbgUsers.end());
1249
1250 MRI->updateDbgUsersToReg(Src.asMCReg(), Def.asMCReg(), MaybeDeadDbgUsers);
1251 Copy->eraseFromParent();
1252 ++NumDeletes;
1253 }
1254
1255 MaybeDeadCopies.clear();
1256 CopyDbgUsers.clear();
1257 Tracker.clear();
1258}
1259
1263 MachineInstr *Leader) {
1264 auto &SC = SpillChain[Leader];
1265 auto &RC = ReloadChain[Leader];
1266 for (auto I = SC.rbegin(), E = SC.rend(); I != E; ++I)
1267 (*I)->dump();
1268 for (MachineInstr *MI : RC)
1269 MI->dump();
1270}
1271
1272// Remove spill-reload like copy chains. For example
1273// r0 = COPY r1
1274// r1 = COPY r2
1275// r2 = COPY r3
1276// r3 = COPY r4
1277// <def-use r4>
1278// r4 = COPY r3
1279// r3 = COPY r2
1280// r2 = COPY r1
1281// r1 = COPY r0
1282// will be folded into
1283// r0 = COPY r1
1284// r1 = COPY r4
1285// <def-use r4>
1286// r4 = COPY r1
1287// r1 = COPY r0
1288// TODO: Currently we don't track usage of r0 outside the chain, so we
1289// conservatively keep its value as it was before the rewrite.
1290//
1291// The algorithm is trying to keep
1292// property#1: No Def of spill COPY in the chain is used or defined until the
1293// paired reload COPY in the chain uses the Def.
1294//
1295// property#2: NO Source of COPY in the chain is used or defined until the next
1296// COPY in the chain defines the Source, except the innermost spill-reload
1297// pair.
1298//
1299// The algorithm is conducted by checking every COPY inside the MBB, assuming
1300// the COPY is a reload COPY, then try to find paired spill COPY by searching
1301// the COPY defines the Src of the reload COPY backward. If such pair is found,
1302// it either belongs to an existing chain or a new chain depends on
1303// last available COPY uses the Def of the reload COPY.
1304// Implementation notes, we use CopyTracker::findLastDefCopy(Reg, ...) to find
1305// out last COPY that defines Reg; we use CopyTracker::findLastUseCopy(Reg, ...)
1306// to find out last COPY that uses Reg. When we are encountered with a Non-COPY
1307// instruction, we check registers in the operands of this instruction. If this
1308// Reg is defined by a COPY, we untrack this Reg via
1309// CopyTracker::clobberRegister(Reg, ...).
1310void MachineCopyPropagation::EliminateSpillageCopies(MachineBasicBlock &MBB) {
1311 // ChainLeader maps MI inside a spill-reload chain to its innermost reload COPY.
1312 // Thus we can track if a MI belongs to an existing spill-reload chain.
1314 // SpillChain maps innermost reload COPY of a spill-reload chain to a sequence
1315 // of COPYs that forms spills of a spill-reload chain.
1316 // ReloadChain maps innermost reload COPY of a spill-reload chain to a
1317 // sequence of COPYs that forms reloads of a spill-reload chain.
1319 // If a COPY's Source has use or def until next COPY defines the Source,
1320 // we put the COPY in this set to keep property#2.
1321 DenseSet<const MachineInstr *> CopySourceInvalid;
1322
1323 auto TryFoldSpillageCopies =
1324 [&, this](const SmallVectorImpl<MachineInstr *> &SC,
1326 assert(SC.size() == RC.size() && "Spill-reload should be paired");
1327
1328 // We need at least 3 pairs of copies for the transformation to apply,
1329 // because the first outermost pair cannot be removed since we don't
1330 // recolor outside of the chain and that we need at least one temporary
1331 // spill slot to shorten the chain. If we only have a chain of two
1332 // pairs, we already have the shortest sequence this code can handle:
1333 // the outermost pair for the temporary spill slot, and the pair that
1334 // use that temporary spill slot for the other end of the chain.
1335 // TODO: We might be able to simplify to one spill-reload pair if collecting
1336 // more infomation about the outermost COPY.
1337 if (SC.size() <= 2)
1338 return;
1339
1340 // If violate property#2, we don't fold the chain.
1341 for (const MachineInstr *Spill : drop_begin(SC))
1342 if (CopySourceInvalid.count(Spill))
1343 return;
1344
1345 for (const MachineInstr *Reload : drop_end(RC))
1346 if (CopySourceInvalid.count(Reload))
1347 return;
1348
1349 auto CheckCopyConstraint = [this](Register Def, Register Src) {
1350 for (const TargetRegisterClass *RC : TRI->regclasses()) {
1351 if (RC->contains(Def) && RC->contains(Src))
1352 return true;
1353 }
1354 return false;
1355 };
1356
1357 auto UpdateReg = [](MachineInstr *MI, const MachineOperand *Old,
1358 const MachineOperand *New) {
1359 for (MachineOperand &MO : MI->operands()) {
1360 if (&MO == Old)
1361 MO.setReg(New->getReg());
1362 }
1363 };
1364
1365 std::optional<DestSourcePair> InnerMostSpillCopy =
1366 isCopyInstr(*SC[0], *TII, UseCopyInstr);
1367 std::optional<DestSourcePair> OuterMostSpillCopy =
1368 isCopyInstr(*SC.back(), *TII, UseCopyInstr);
1369 std::optional<DestSourcePair> InnerMostReloadCopy =
1370 isCopyInstr(*RC[0], *TII, UseCopyInstr);
1371 std::optional<DestSourcePair> OuterMostReloadCopy =
1372 isCopyInstr(*RC.back(), *TII, UseCopyInstr);
1373 if (!CheckCopyConstraint(OuterMostSpillCopy->Source->getReg(),
1374 InnerMostSpillCopy->Source->getReg()) ||
1375 !CheckCopyConstraint(InnerMostReloadCopy->Destination->getReg(),
1376 OuterMostReloadCopy->Destination->getReg()))
1377 return;
1378
1379 SpillageChainsLength += SC.size() + RC.size();
1380 NumSpillageChains += 1;
1381 UpdateReg(SC[0], InnerMostSpillCopy->Destination,
1382 OuterMostSpillCopy->Source);
1383 UpdateReg(RC[0], InnerMostReloadCopy->Source,
1384 OuterMostReloadCopy->Destination);
1385
1386 for (size_t I = 1; I < SC.size() - 1; ++I) {
1387 SC[I]->eraseFromParent();
1388 RC[I]->eraseFromParent();
1389 NumDeletes += 2;
1390 }
1391 };
1392
1393 auto IsFoldableCopy = [this](const MachineInstr &MaybeCopy) {
1394 if (MaybeCopy.getNumImplicitOperands() > 0)
1395 return false;
1396 std::optional<DestSourcePair> CopyOperands =
1397 isCopyInstr(MaybeCopy, *TII, UseCopyInstr);
1398 if (!CopyOperands)
1399 return false;
1400 Register Src = CopyOperands->Source->getReg();
1401 Register Def = CopyOperands->Destination->getReg();
1402 return Src && Def && !TRI->regsOverlap(Src, Def) &&
1403 CopyOperands->Source->isRenamable() &&
1404 CopyOperands->Destination->isRenamable();
1405 };
1406
1407 auto IsSpillReloadPair = [&, this](const MachineInstr &Spill,
1408 const MachineInstr &Reload) {
1409 if (!IsFoldableCopy(Spill) || !IsFoldableCopy(Reload))
1410 return false;
1411 std::optional<DestSourcePair> SpillCopy =
1412 isCopyInstr(Spill, *TII, UseCopyInstr);
1413 std::optional<DestSourcePair> ReloadCopy =
1414 isCopyInstr(Reload, *TII, UseCopyInstr);
1415 if (!SpillCopy || !ReloadCopy)
1416 return false;
1417 return SpillCopy->Source->getReg() == ReloadCopy->Destination->getReg() &&
1418 SpillCopy->Destination->getReg() == ReloadCopy->Source->getReg();
1419 };
1420
1421 auto IsChainedCopy = [&, this](const MachineInstr &Prev,
1422 const MachineInstr &Current) {
1423 if (!IsFoldableCopy(Prev) || !IsFoldableCopy(Current))
1424 return false;
1425 std::optional<DestSourcePair> PrevCopy =
1426 isCopyInstr(Prev, *TII, UseCopyInstr);
1427 std::optional<DestSourcePair> CurrentCopy =
1428 isCopyInstr(Current, *TII, UseCopyInstr);
1429 if (!PrevCopy || !CurrentCopy)
1430 return false;
1431 return PrevCopy->Source->getReg() == CurrentCopy->Destination->getReg();
1432 };
1433
1435 std::optional<DestSourcePair> CopyOperands =
1436 isCopyInstr(MI, *TII, UseCopyInstr);
1437
1438 // Update track information via non-copy instruction.
1439 SmallSet<Register, 8> RegsToClobber;
1440 if (!CopyOperands) {
1441 for (const MachineOperand &MO : MI.operands()) {
1442 if (!MO.isReg())
1443 continue;
1444 Register Reg = MO.getReg();
1445 if (!Reg)
1446 continue;
1447 MachineInstr *LastUseCopy =
1448 Tracker.findLastSeenUseInCopy(Reg.asMCReg(), *TRI);
1449 if (LastUseCopy) {
1450 LLVM_DEBUG(dbgs() << "MCP: Copy source of\n");
1451 LLVM_DEBUG(LastUseCopy->dump());
1452 LLVM_DEBUG(dbgs() << "might be invalidated by\n");
1453 LLVM_DEBUG(MI.dump());
1454 CopySourceInvalid.insert(LastUseCopy);
1455 }
1456 // Must be noted Tracker.clobberRegister(Reg, ...) removes tracking of
1457 // Reg, i.e, COPY that defines Reg is removed from the mapping as well
1458 // as marking COPYs that uses Reg unavailable.
1459 // We don't invoke CopyTracker::clobberRegister(Reg, ...) if Reg is not
1460 // defined by a previous COPY, since we don't want to make COPYs uses
1461 // Reg unavailable.
1462 if (Tracker.findLastSeenDefInCopy(MI, Reg.asMCReg(), *TRI, *TII,
1463 UseCopyInstr))
1464 // Thus we can keep the property#1.
1465 RegsToClobber.insert(Reg);
1466 }
1467 for (Register Reg : RegsToClobber) {
1468 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
1469 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Reg, TRI)
1470 << "\n");
1471 }
1472 continue;
1473 }
1474
1475 Register Src = CopyOperands->Source->getReg();
1476 Register Def = CopyOperands->Destination->getReg();
1477 // Check if we can find a pair spill-reload copy.
1478 LLVM_DEBUG(dbgs() << "MCP: Searching paired spill for reload: ");
1479 LLVM_DEBUG(MI.dump());
1480 MachineInstr *MaybeSpill =
1481 Tracker.findLastSeenDefInCopy(MI, Src.asMCReg(), *TRI, *TII, UseCopyInstr);
1482 bool MaybeSpillIsChained = ChainLeader.count(MaybeSpill);
1483 if (!MaybeSpillIsChained && MaybeSpill &&
1484 IsSpillReloadPair(*MaybeSpill, MI)) {
1485 // Check if we already have an existing chain. Now we have a
1486 // spill-reload pair.
1487 // L2: r2 = COPY r3
1488 // L5: r3 = COPY r2
1489 // Looking for a valid COPY before L5 which uses r3.
1490 // This can be serverial cases.
1491 // Case #1:
1492 // No COPY is found, which can be r3 is def-use between (L2, L5), we
1493 // create a new chain for L2 and L5.
1494 // Case #2:
1495 // L2: r2 = COPY r3
1496 // L5: r3 = COPY r2
1497 // Such COPY is found and is L2, we create a new chain for L2 and L5.
1498 // Case #3:
1499 // L2: r2 = COPY r3
1500 // L3: r1 = COPY r3
1501 // L5: r3 = COPY r2
1502 // we create a new chain for L2 and L5.
1503 // Case #4:
1504 // L2: r2 = COPY r3
1505 // L3: r1 = COPY r3
1506 // L4: r3 = COPY r1
1507 // L5: r3 = COPY r2
1508 // Such COPY won't be found since L4 defines r3. we create a new chain
1509 // for L2 and L5.
1510 // Case #5:
1511 // L2: r2 = COPY r3
1512 // L3: r3 = COPY r1
1513 // L4: r1 = COPY r3
1514 // L5: r3 = COPY r2
1515 // COPY is found and is L4 which belongs to an existing chain, we add
1516 // L2 and L5 to this chain.
1517 LLVM_DEBUG(dbgs() << "MCP: Found spill: ");
1518 LLVM_DEBUG(MaybeSpill->dump());
1519 MachineInstr *MaybePrevReload =
1520 Tracker.findLastSeenUseInCopy(Def.asMCReg(), *TRI);
1521 auto Leader = ChainLeader.find(MaybePrevReload);
1522 MachineInstr *L = nullptr;
1523 if (Leader == ChainLeader.end() ||
1524 (MaybePrevReload && !IsChainedCopy(*MaybePrevReload, MI))) {
1525 L = &MI;
1526 assert(!SpillChain.count(L) &&
1527 "SpillChain should not have contained newly found chain");
1528 } else {
1529 assert(MaybePrevReload &&
1530 "Found a valid leader through nullptr should not happend");
1531 L = Leader->second;
1532 assert(SpillChain[L].size() > 0 &&
1533 "Existing chain's length should be larger than zero");
1534 }
1535 assert(!ChainLeader.count(&MI) && !ChainLeader.count(MaybeSpill) &&
1536 "Newly found paired spill-reload should not belong to any chain "
1537 "at this point");
1538 ChainLeader.insert({MaybeSpill, L});
1539 ChainLeader.insert({&MI, L});
1540 SpillChain[L].push_back(MaybeSpill);
1541 ReloadChain[L].push_back(&MI);
1542 LLVM_DEBUG(dbgs() << "MCP: Chain " << L << " now is:\n");
1543 LLVM_DEBUG(printSpillReloadChain(SpillChain, ReloadChain, L));
1544 } else if (MaybeSpill && !MaybeSpillIsChained) {
1545 // MaybeSpill is unable to pair with MI. That's to say adding MI makes
1546 // the chain invalid.
1547 // The COPY defines Src is no longer considered as a candidate of a
1548 // valid chain. Since we expect the Def of a spill copy isn't used by
1549 // any COPY instruction until a reload copy. For example:
1550 // L1: r1 = COPY r2
1551 // L2: r3 = COPY r1
1552 // If we later have
1553 // L1: r1 = COPY r2
1554 // L2: r3 = COPY r1
1555 // L3: r2 = COPY r1
1556 // L1 and L3 can't be a valid spill-reload pair.
1557 // Thus we keep the property#1.
1558 LLVM_DEBUG(dbgs() << "MCP: Not paired spill-reload:\n");
1559 LLVM_DEBUG(MaybeSpill->dump());
1560 LLVM_DEBUG(MI.dump());
1561 Tracker.clobberRegister(Src.asMCReg(), *TRI, *TII, UseCopyInstr);
1562 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Src, TRI)
1563 << "\n");
1564 }
1565 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1566 }
1567
1568 for (auto I = SpillChain.begin(), E = SpillChain.end(); I != E; ++I) {
1569 auto &SC = I->second;
1570 assert(ReloadChain.count(I->first) &&
1571 "Reload chain of the same leader should exist");
1572 auto &RC = ReloadChain[I->first];
1573 TryFoldSpillageCopies(SC, RC);
1574 }
1575
1576 MaybeDeadCopies.clear();
1577 CopyDbgUsers.clear();
1578 Tracker.clear();
1579}
1580
1581bool MachineCopyPropagationLegacy::runOnMachineFunction(MachineFunction &MF) {
1582 if (skipFunction(MF.getFunction()))
1583 return false;
1584
1585 return MachineCopyPropagation(UseCopyInstr).run(MF);
1586}
1587
1591 MFPropsModifier _(*this, MF);
1592 if (!MachineCopyPropagation(UseCopyInstr).run(MF))
1593 return PreservedAnalyses::all();
1595 PA.preserveSet<CFGAnalyses>();
1596 return PA;
1597}
1598
1599bool MachineCopyPropagation::run(MachineFunction &MF) {
1600 bool isSpillageCopyElimEnabled = false;
1602 case cl::BOU_UNSET:
1603 isSpillageCopyElimEnabled =
1605 break;
1606 case cl::BOU_TRUE:
1607 isSpillageCopyElimEnabled = true;
1608 break;
1609 case cl::BOU_FALSE:
1610 isSpillageCopyElimEnabled = false;
1611 break;
1612 }
1613
1614 Changed = false;
1615
1617 TII = MF.getSubtarget().getInstrInfo();
1618 MRI = &MF.getRegInfo();
1619
1620 for (MachineBasicBlock &MBB : MF) {
1621 if (isSpillageCopyElimEnabled)
1622 EliminateSpillageCopies(MBB);
1623 BackwardCopyPropagateBlock(MBB);
1624 ForwardCopyPropagateBlock(MBB);
1625 }
1626
1627 return Changed;
1628}
1629
1631llvm::createMachineCopyPropagationPass(bool UseCopyInstr = false) {
1632 return new MachineCopyPropagationLegacy(UseCopyInstr);
1633}
unsigned const MachineRegisterInfo * MRI
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock & MBB
#define LLVM_ATTRIBUTE_UNUSED
Definition: Compiler.h:298
This file provides an implementation of debug counters.
#define DEBUG_COUNTER(VARNAME, COUNTERNAME, DESC)
Definition: DebugCounter.h:194
This file defines the DenseMap class.
const HexagonInstrInfo * TII
#define _
IRTranslator LLVM IR MI
#define I(x, y, z)
Definition: MD5.cpp:58
static cl::opt< cl::boolOrDefault > EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden)
static void LLVM_ATTRIBUTE_UNUSED printSpillReloadChain(DenseMap< MachineInstr *, SmallVector< MachineInstr * > > &SpillChain, DenseMap< MachineInstr *, SmallVector< MachineInstr * > > &ReloadChain, MachineInstr *Leader)
static bool isBackwardPropagatableCopy(const DestSourcePair &CopyOperands, const MachineRegisterInfo &MRI)
static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src, MCRegister Def, const TargetRegisterInfo *TRI, const TargetInstrInfo *TII, bool UseCopyInstr)
Return true if PreviousCopy did copy register Src to register Def.
static cl::opt< bool > MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false), cl::Hidden)
#define DEBUG_TYPE
Register const TargetRegisterInfo * TRI
MachineInstr unsigned OpIdx
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:56
SI Lower i1 Copies
This file contains some templates that are useful if you are working with the STL at all.
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Definition: Statistic.h:167
#define LLVM_DEBUG(...)
Definition: Debug.h:119
A container for analyses that lazily runs them and caches their results.
Definition: PassManager.h:255
Represent the analysis usage information of a pass.
LLVM_ABI void setPreservesCFG()
This function should be called by the pass, iff they do not:
Definition: Pass.cpp:270
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
Definition: ArrayRef.h:41
bool test(unsigned Idx) const
Definition: BitVector.h:461
void resize(unsigned N, bool t=false)
resize - Grow or shrink the bitvector.
Definition: BitVector.h:341
BitVector & set()
Definition: BitVector.h:351
Represents analyses that only rely on functions' control flow.
Definition: Analysis.h:73
static bool shouldExecute(unsigned CounterName)
Definition: DebugCounter.h:88
iterator find(const_arg_type_t< KeyT > Val)
Definition: DenseMap.h:177
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
Definition: DenseMap.h:245
iterator begin()
Definition: DenseMap.h:78
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
Definition: DenseMap.h:173
iterator end()
Definition: DenseMap.h:87
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition: DenseMap.h:230
Implements a dense probed hash-table based set.
Definition: DenseSet.h:263
MCRegUnitMaskIterator enumerates a list of register units and their associated lane masks for Reg.
Wrapper class representing physical registers. Should be passed by value.
Definition: MCRegister.h:33
An RAII based helper class to modify MachineFunctionProperties when running pass.
iterator_range< succ_iterator > successors()
LLVM_ABI StringRef getName() const
Return the name of the corresponding LLVM basic block, or an empty string.
PreservedAnalyses run(MachineFunction &MF, MachineFunctionAnalysisManager &MFAM)
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
Representation of each machine instruction.
Definition: MachineInstr.h:72
LLVM_ABI void eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
LLVM_ABI void dump() const
const MachineOperand & getOperand(unsigned i) const
Definition: MachineInstr.h:595
LLVM_ABI const TargetRegisterClass * getRegClassConstraint(unsigned OpIdx, const TargetInstrInfo *TII, const TargetRegisterInfo *TRI) const
Compute the static register class constraint for operand OpIdx.
MachineOperand class - Representation of each machine instruction operand.
bool isImplicit() const
LLVM_ABI void setIsRenamable(bool Val=true)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
LLVM_ABI void setReg(Register Reg)
Change the register this operand corresponds to.
LLVM_ABI bool isRenamable() const
isRenamable - Returns true if this register may be renamed, i.e.
void setIsUndef(bool Val=true)
Register getReg() const
getReg - Returns the register number.
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
void dump() const
Definition: Pass.cpp:146
A set of analyses that are preserved following a run of a transformation pass.
Definition: Analysis.h:112
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition: Analysis.h:118
Wrapper class representing virtual and physical registers.
Definition: Register.h:19
MCRegister asMCReg() const
Utility to check-convert this value to a MCRegister.
Definition: Register.h:102
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
Definition: Register.h:78
iterator begin()
Get an iterator to the beginning of the SetVector.
Definition: SetVector.h:109
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
Definition: SmallPtrSet.h:541
A SetVector that performs no allocations if smaller than a certain size.
Definition: SetVector.h:356
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
Definition: SmallSet.h:134
void insert_range(Range &&R)
Definition: SmallSet.h:194
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
Definition: SmallSet.h:182
size_t size() const
Definition: SmallVector.h:79
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
Definition: SmallVector.h:574
void push_back(const T &Elt)
Definition: SmallVector.h:414
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1197
TargetInstrInfo - Interface to description of machine instruction set.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
virtual bool enableSpillageCopyElimination() const
Enable spillage copy elimination in MachineCopyPropagation pass.
virtual const TargetInstrInfo * getInstrInfo() const
virtual const TargetRegisterInfo * getRegisterInfo() const =0
Return the target's register information.
A Use represents the edge between a Value definition and its users.
Definition: Use.h:35
std::pair< iterator, bool > insert(const ValueT &V)
Definition: DenseSet.h:194
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
Definition: DenseSet.h:174
reverse_self_iterator getReverseIterator()
Definition: ilist_node.h:137
self_iterator getIterator()
Definition: ilist_node.h:134
This provides a very simple, boring adaptor for a begin and end iterator into a range type.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
Definition: BitmaskEnum.h:126
DebugType
Definition: COFF.h:717
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Definition: CallingConv.h:24
Reg
All possible values of the reg field in the ModR/M byte.
initializer< Ty > init(const Ty &Val)
Definition: CommandLine.h:444
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
NodeAddr< DefNode * > Def
Definition: RDFGraph.h:384
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
Definition: STLExtras.h:338
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
Definition: STLExtras.h:1702
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
Definition: STLExtras.h:663
LLVM_ABI PreservedAnalyses getMachineFunctionPassPreservedAnalyses()
Returns the minimum set of Analyses that all machine function passes must preserve.
auto reverse(ContainerTy &&C)
Definition: STLExtras.h:428
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:207
auto drop_end(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the last N elements excluded.
Definition: STLExtras.h:345
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
Definition: STLExtras.h:1916
LLVM_ABI MachineFunctionPass * createMachineCopyPropagationPass(bool UseCopyInstr)
LLVM_ABI Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.
LLVM_ABI char & MachineCopyPropagationID
MachineCopyPropagation - This pass performs copy propagation on machine instructions.
const MachineOperand * Source
const MachineOperand * Destination