LLVM 22.0.0git
LiveInterval.cpp
Go to the documentation of this file.
1//===- LiveInterval.cpp - Live Interval Representation --------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements the LiveRange and LiveInterval classes. Given some
10// numbering of each the machine instructions an interval [i, j) is said to be a
11// live range for register v if there is no instruction with number j' >= j
12// such that v is live at j' and there is no instruction with number i' < i such
13// that v is live at i'. In this implementation ranges can have holes,
14// i.e. a range might look like [1,20), [50,65), [1000,1001). Each
15// individual segment is represented as an instance of LiveRange::Segment,
16// and the whole range is represented as an instance of LiveRange.
17//
18//===----------------------------------------------------------------------===//
19
21#include "LiveRangeUtils.h"
22#include "RegisterCoalescer.h"
23#include "llvm/ADT/ArrayRef.h"
24#include "llvm/ADT/STLExtras.h"
35#include "llvm/Config/llvm-config.h"
36#include "llvm/MC/LaneBitmask.h"
38#include "llvm/Support/Debug.h"
40#include <algorithm>
41#include <cassert>
42#include <cstddef>
43#include <iterator>
44#include <utility>
45
46using namespace llvm;
47
48namespace {
49
50//===----------------------------------------------------------------------===//
51// Implementation of various methods necessary for calculation of live ranges.
52// The implementation of the methods abstracts from the concrete type of the
53// segment collection.
54//
55// Implementation of the class follows the Template design pattern. The base
56// class contains generic algorithms that call collection-specific methods,
57// which are provided in concrete subclasses. In order to avoid virtual calls
58// these methods are provided by means of C++ template instantiation.
59// The base class calls the methods of the subclass through method impl(),
60// which casts 'this' pointer to the type of the subclass.
61//
62//===----------------------------------------------------------------------===//
63
64template <typename ImplT, typename IteratorT, typename CollectionT>
65class CalcLiveRangeUtilBase {
66protected:
67 LiveRange *LR;
68
69protected:
70 CalcLiveRangeUtilBase(LiveRange *LR) : LR(LR) {}
71
72public:
73 using Segment = LiveRange::Segment;
74 using iterator = IteratorT;
75
76 /// A counterpart of LiveRange::createDeadDef: Make sure the range has a
77 /// value defined at @p Def.
78 /// If @p ForVNI is null, and there is no value defined at @p Def, a new
79 /// value will be allocated using @p VNInfoAllocator.
80 /// If @p ForVNI is null, the return value is the value defined at @p Def,
81 /// either a pre-existing one, or the one newly created.
82 /// If @p ForVNI is not null, then @p Def should be the location where
83 /// @p ForVNI is defined. If the range does not have a value defined at
84 /// @p Def, the value @p ForVNI will be used instead of allocating a new
85 /// one. If the range already has a value defined at @p Def, it must be
86 /// same as @p ForVNI. In either case, @p ForVNI will be the return value.
87 VNInfo *createDeadDef(SlotIndex Def, VNInfo::Allocator *VNInfoAllocator,
88 VNInfo *ForVNI) {
89 assert(!Def.isDead() && "Cannot define a value at the dead slot");
90 assert((!ForVNI || ForVNI->def == Def) &&
91 "If ForVNI is specified, it must match Def");
92 iterator I = impl().find(Def);
93 if (I == segments().end()) {
94 VNInfo *VNI = ForVNI ? ForVNI : LR->getNextValue(Def, *VNInfoAllocator);
95 impl().insertAtEnd(Segment(Def, Def.getDeadSlot(), VNI));
96 return VNI;
97 }
98
99 Segment *S = segmentAt(I);
100 if (SlotIndex::isSameInstr(Def, S->start)) {
101 assert((!ForVNI || ForVNI == S->valno) && "Value number mismatch");
102 assert(S->valno->def == S->start && "Inconsistent existing value def");
103
104 // It is possible to have both normal and early-clobber defs of the same
105 // register on an instruction. It doesn't make a lot of sense, but it is
106 // possible to specify in inline assembly.
107 //
108 // Just convert everything to early-clobber.
109 Def = std::min(Def, S->start);
110 if (Def != S->start)
111 S->start = S->valno->def = Def;
112 return S->valno;
113 }
114 assert(SlotIndex::isEarlierInstr(Def, S->start) && "Already live at def");
115 VNInfo *VNI = ForVNI ? ForVNI : LR->getNextValue(Def, *VNInfoAllocator);
116 segments().insert(I, Segment(Def, Def.getDeadSlot(), VNI));
117 return VNI;
118 }
119
120 VNInfo *extendInBlock(SlotIndex StartIdx, SlotIndex Use) {
121 if (segments().empty())
122 return nullptr;
123 iterator I =
124 impl().findInsertPos(Segment(Use.getPrevSlot(), Use, nullptr));
125 if (I == segments().begin())
126 return nullptr;
127 --I;
128 if (I->end <= StartIdx)
129 return nullptr;
130 if (I->end < Use)
131 extendSegmentEndTo(I, Use);
132 return I->valno;
133 }
134
135 std::pair<VNInfo*,bool> extendInBlock(ArrayRef<SlotIndex> Undefs,
136 SlotIndex StartIdx, SlotIndex Use) {
137 if (segments().empty())
138 return std::make_pair(nullptr, false);
139 SlotIndex BeforeUse = Use.getPrevSlot();
140 iterator I = impl().findInsertPos(Segment(BeforeUse, Use, nullptr));
141 if (I == segments().begin())
142 return std::make_pair(nullptr, LR->isUndefIn(Undefs, StartIdx, BeforeUse));
143 --I;
144 if (I->end <= StartIdx)
145 return std::make_pair(nullptr, LR->isUndefIn(Undefs, StartIdx, BeforeUse));
146 if (I->end < Use) {
147 if (LR->isUndefIn(Undefs, I->end, BeforeUse))
148 return std::make_pair(nullptr, true);
149 extendSegmentEndTo(I, Use);
150 }
151 return std::make_pair(I->valno, false);
152 }
153
154 /// This method is used when we want to extend the segment specified
155 /// by I to end at the specified endpoint. To do this, we should
156 /// merge and eliminate all segments that this will overlap
157 /// with. The iterator is not invalidated.
158 void extendSegmentEndTo(iterator I, SlotIndex NewEnd) {
159 assert(I != segments().end() && "Not a valid segment!");
160 Segment *S = segmentAt(I);
161 VNInfo *ValNo = I->valno;
162
163 // Search for the first segment that we can't merge with.
164 iterator MergeTo = std::next(I);
165 for (; MergeTo != segments().end() && NewEnd >= MergeTo->end; ++MergeTo)
166 assert(MergeTo->valno == ValNo && "Cannot merge with differing values!");
167
168 // If NewEnd was in the middle of a segment, make sure to get its endpoint.
169 S->end = std::max(NewEnd, std::prev(MergeTo)->end);
170
171 // If the newly formed segment now touches the segment after it and if they
172 // have the same value number, merge the two segments into one segment.
173 if (MergeTo != segments().end() && MergeTo->start <= I->end &&
174 MergeTo->valno == ValNo) {
175 S->end = MergeTo->end;
176 ++MergeTo;
177 }
178
179 // Erase any dead segments.
180 segments().erase(std::next(I), MergeTo);
181 }
182
183 /// This method is used when we want to extend the segment specified
184 /// by I to start at the specified endpoint. To do this, we should
185 /// merge and eliminate all segments that this will overlap with.
186 iterator extendSegmentStartTo(iterator I, SlotIndex NewStart) {
187 assert(I != segments().end() && "Not a valid segment!");
188 Segment *S = segmentAt(I);
189 VNInfo *ValNo = I->valno;
190
191 // Search for the first segment that we can't merge with.
192 iterator MergeTo = I;
193 do {
194 if (MergeTo == segments().begin()) {
195 S->start = NewStart;
196 segments().erase(MergeTo, I);
197 return I;
198 }
199 assert(MergeTo->valno == ValNo && "Cannot merge with differing values!");
200 --MergeTo;
201 } while (NewStart <= MergeTo->start);
202
203 // If we start in the middle of another segment, just delete a range and
204 // extend that segment.
205 if (MergeTo->end >= NewStart && MergeTo->valno == ValNo) {
206 segmentAt(MergeTo)->end = S->end;
207 } else {
208 // Otherwise, extend the segment right after.
209 ++MergeTo;
210 Segment *MergeToSeg = segmentAt(MergeTo);
211 MergeToSeg->start = NewStart;
212 MergeToSeg->end = S->end;
213 }
214
215 segments().erase(std::next(MergeTo), std::next(I));
216 return MergeTo;
217 }
218
219 iterator addSegment(Segment S) {
220 SlotIndex Start = S.start, End = S.end;
221 iterator I = impl().findInsertPos(S);
222
223 // If the inserted segment starts in the middle or right at the end of
224 // another segment, just extend that segment to contain the segment of S.
225 if (I != segments().begin()) {
226 iterator B = std::prev(I);
227 if (S.valno == B->valno) {
228 if (B->start <= Start && B->end >= Start) {
229 extendSegmentEndTo(B, End);
230 return B;
231 }
232 } else {
233 // Check to make sure that we are not overlapping two live segments with
234 // different valno's.
235 assert(B->end <= Start &&
236 "Cannot overlap two segments with differing ValID's"
237 " (did you def the same reg twice in a MachineInstr?)");
238 }
239 }
240
241 // Otherwise, if this segment ends in the middle of, or right next
242 // to, another segment, merge it into that segment.
243 if (I != segments().end()) {
244 if (S.valno == I->valno) {
245 if (I->start <= End) {
246 I = extendSegmentStartTo(I, Start);
247
248 // If S is a complete superset of a segment, we may need to grow its
249 // endpoint as well.
250 if (End > I->end)
251 extendSegmentEndTo(I, End);
252 return I;
253 }
254 } else {
255 // Check to make sure that we are not overlapping two live segments with
256 // different valno's.
257 assert(I->start >= End &&
258 "Cannot overlap two segments with differing ValID's");
259 }
260 }
261
262 // Otherwise, this is just a new segment that doesn't interact with
263 // anything.
264 // Insert it.
265 return segments().insert(I, S);
266 }
267
268private:
269 ImplT &impl() { return *static_cast<ImplT *>(this); }
270
271 CollectionT &segments() { return impl().segmentsColl(); }
272
273 Segment *segmentAt(iterator I) { return const_cast<Segment *>(&(*I)); }
274};
275
276//===----------------------------------------------------------------------===//
277// Instantiation of the methods for calculation of live ranges
278// based on a segment vector.
279//===----------------------------------------------------------------------===//
280
281class CalcLiveRangeUtilVector;
282using CalcLiveRangeUtilVectorBase =
283 CalcLiveRangeUtilBase<CalcLiveRangeUtilVector, LiveRange::iterator,
285
286class CalcLiveRangeUtilVector : public CalcLiveRangeUtilVectorBase {
287public:
288 CalcLiveRangeUtilVector(LiveRange *LR) : CalcLiveRangeUtilVectorBase(LR) {}
289
290private:
291 friend CalcLiveRangeUtilVectorBase;
292
293 LiveRange::Segments &segmentsColl() { return LR->segments; }
294
295 void insertAtEnd(const Segment &S) { LR->segments.push_back(S); }
296
297 iterator find(SlotIndex Pos) { return LR->find(Pos); }
298
299 iterator findInsertPos(Segment S) { return llvm::upper_bound(*LR, S.start); }
300};
301
302//===----------------------------------------------------------------------===//
303// Instantiation of the methods for calculation of live ranges
304// based on a segment set.
305//===----------------------------------------------------------------------===//
306
307class CalcLiveRangeUtilSet;
308using CalcLiveRangeUtilSetBase =
309 CalcLiveRangeUtilBase<CalcLiveRangeUtilSet, LiveRange::SegmentSet::iterator,
311
312class CalcLiveRangeUtilSet : public CalcLiveRangeUtilSetBase {
313public:
314 CalcLiveRangeUtilSet(LiveRange *LR) : CalcLiveRangeUtilSetBase(LR) {}
315
316private:
317 friend CalcLiveRangeUtilSetBase;
318
319 LiveRange::SegmentSet &segmentsColl() { return *LR->segmentSet; }
320
321 void insertAtEnd(const Segment &S) {
322 LR->segmentSet->insert(LR->segmentSet->end(), S);
323 }
324
325 iterator find(SlotIndex Pos) {
326 iterator I =
327 LR->segmentSet->upper_bound(Segment(Pos, Pos.getNextSlot(), nullptr));
328 if (I == LR->segmentSet->begin())
329 return I;
330 iterator PrevI = std::prev(I);
331 if (Pos < (*PrevI).end)
332 return PrevI;
333 return I;
334 }
335
336 iterator findInsertPos(Segment S) {
337 iterator I = LR->segmentSet->upper_bound(S);
338 if (I != LR->segmentSet->end() && !(S.start < *I))
339 ++I;
340 return I;
341 }
342};
343
344} // end anonymous namespace
345
346//===----------------------------------------------------------------------===//
347// LiveRange methods
348//===----------------------------------------------------------------------===//
349
351 return llvm::partition_point(*this,
352 [&](const Segment &X) { return X.end <= Pos; });
353}
354
356 // Use the segment set, if it is available.
357 if (segmentSet != nullptr)
358 return CalcLiveRangeUtilSet(this).createDeadDef(Def, &VNIAlloc, nullptr);
359 // Otherwise use the segment vector.
360 return CalcLiveRangeUtilVector(this).createDeadDef(Def, &VNIAlloc, nullptr);
361}
362
364 // Use the segment set, if it is available.
365 if (segmentSet != nullptr)
366 return CalcLiveRangeUtilSet(this).createDeadDef(VNI->def, nullptr, VNI);
367 // Otherwise use the segment vector.
368 return CalcLiveRangeUtilVector(this).createDeadDef(VNI->def, nullptr, VNI);
369}
370
371// overlaps - Return true if the intersection of the two live ranges is
372// not empty.
373//
374// An example for overlaps():
375//
376// 0: A = ...
377// 4: B = ...
378// 8: C = A + B ;; last use of A
379//
380// The live ranges should look like:
381//
382// A = [3, 11)
383// B = [7, x)
384// C = [11, y)
385//
386// A->overlaps(C) should return false since we want to be able to join
387// A and C.
388//
390 const_iterator StartPos) const {
391 assert(!empty() && "empty range");
392 const_iterator i = begin();
393 const_iterator ie = end();
394 const_iterator j = StartPos;
395 const_iterator je = other.end();
396
397 assert((StartPos->start <= i->start || StartPos == other.begin()) &&
398 StartPos != other.end() && "Bogus start position hint!");
399
400 if (i->start < j->start) {
401 i = std::upper_bound(i, ie, j->start);
402 if (i != begin()) --i;
403 } else if (j->start < i->start) {
404 ++StartPos;
405 if (StartPos != other.end() && StartPos->start <= i->start) {
406 assert(StartPos < other.end() && i < end());
407 j = std::upper_bound(j, je, i->start);
408 if (j != other.begin()) --j;
409 }
410 } else {
411 return true;
412 }
413
414 if (j == je) return false;
415
416 while (i != ie) {
417 if (i->start > j->start) {
418 std::swap(i, j);
419 std::swap(ie, je);
420 }
421
422 if (i->end > j->start)
423 return true;
424 ++i;
425 }
426
427 return false;
428}
429
431 const SlotIndexes &Indexes) const {
432 assert(!empty() && "empty range");
433 if (Other.empty())
434 return false;
435
436 // Use binary searches to find initial positions.
437 const_iterator I = find(Other.beginIndex());
438 const_iterator IE = end();
439 if (I == IE)
440 return false;
441 const_iterator J = Other.find(I->start);
442 const_iterator JE = Other.end();
443 if (J == JE)
444 return false;
445
446 while (true) {
447 // J has just been advanced to satisfy:
448 assert(J->end > I->start);
449 // Check for an overlap.
450 if (J->start < I->end) {
451 // I and J are overlapping. Find the later start.
452 SlotIndex Def = std::max(I->start, J->start);
453 // Allow the overlap if Def is a coalescable copy.
454 if (Def.isBlock() ||
455 !CP.isCoalescable(Indexes.getInstructionFromIndex(Def)))
456 return true;
457 }
458 // Advance the iterator that ends first to check for more overlaps.
459 if (J->end > I->end) {
460 std::swap(I, J);
461 std::swap(IE, JE);
462 }
463 // Advance J until J->end > I->start.
464 do
465 if (++J == JE)
466 return false;
467 while (J->end <= I->start);
468 }
469}
470
471/// overlaps - Return true if the live range overlaps an interval specified
472/// by [Start, End).
474 assert(Start < End && "Invalid range");
475 const_iterator I = lower_bound(*this, End);
476 return I != begin() && (--I)->end > Start;
477}
478
480 if (empty())
481 return Other.empty();
482
484 for (const Segment &O : Other.segments) {
485 I = advanceTo(I, O.start);
486 if (I == end() || I->start > O.start)
487 return false;
488
489 // Check adjacent live segments and see if we can get behind O.end.
490 while (I->end < O.end) {
492 // Get next segment and abort if it was not adjacent.
493 ++I;
494 if (I == end() || Last->end != I->start)
495 return false;
496 }
497 }
498 return true;
499}
500
501/// ValNo is dead, remove it. If it is the largest value number, just nuke it
502/// (and any other deleted values neighboring it), otherwise mark it as ~1U so
503/// it can be nuked later.
504void LiveRange::markValNoForDeletion(VNInfo *ValNo) {
505 if (ValNo->id == getNumValNums()-1) {
506 do {
508 } while (!valnos.empty() && valnos.back()->isUnused());
509 } else {
510 ValNo->markUnused();
511 }
512}
513
514/// RenumberValues - Renumber all values in order of appearance and delete the
515/// remaining unused values.
518 valnos.clear();
519 for (const Segment &S : segments) {
520 VNInfo *VNI = S.valno;
521 if (!Seen.insert(VNI).second)
522 continue;
523 assert(!VNI->isUnused() && "Unused valno used by live segment");
524 VNI->id = (unsigned)valnos.size();
525 valnos.push_back(VNI);
526 }
527}
528
529void LiveRange::addSegmentToSet(Segment S) {
530 CalcLiveRangeUtilSet(this).addSegment(S);
531}
532
534 // Use the segment set, if it is available.
535 if (segmentSet != nullptr) {
536 addSegmentToSet(S);
537 return end();
538 }
539 // Otherwise use the segment vector.
540 return CalcLiveRangeUtilVector(this).addSegment(S);
541}
542
544 // Check that the segment belongs to the back of the list.
545 assert(segments.empty() || segments.back().end <= S.start);
546 segments.push_back(S);
547}
548
549std::pair<VNInfo*,bool> LiveRange::extendInBlock(ArrayRef<SlotIndex> Undefs,
550 SlotIndex StartIdx, SlotIndex Kill) {
551 // Use the segment set, if it is available.
552 if (segmentSet != nullptr)
553 return CalcLiveRangeUtilSet(this).extendInBlock(Undefs, StartIdx, Kill);
554 // Otherwise use the segment vector.
555 return CalcLiveRangeUtilVector(this).extendInBlock(Undefs, StartIdx, Kill);
556}
557
559 // Use the segment set, if it is available.
560 if (segmentSet != nullptr)
561 return CalcLiveRangeUtilSet(this).extendInBlock(StartIdx, Kill);
562 // Otherwise use the segment vector.
563 return CalcLiveRangeUtilVector(this).extendInBlock(StartIdx, Kill);
564}
565
567 bool RemoveDeadValNo) {
568 // Find the Segment containing this span.
569 iterator I = find(Start);
570
571 // No Segment found, so nothing to do.
572 if (I == end())
573 return;
574
575 assert(I->containsInterval(Start, End)
576 && "Segment is not entirely in range!");
577
578 // If the span we are removing is at the start of the Segment, adjust it.
579 VNInfo *ValNo = I->valno;
580 if (I->start == Start) {
581 if (I->end == End) {
582 segments.erase(I); // Removed the whole Segment.
583
584 if (RemoveDeadValNo)
585 removeValNoIfDead(ValNo);
586 } else
587 I->start = End;
588 return;
589 }
590
591 // Otherwise if the span we are removing is at the end of the Segment,
592 // adjust the other way.
593 if (I->end == End) {
594 I->end = Start;
595 return;
596 }
597
598 // Otherwise, we are splitting the Segment into two pieces.
599 SlotIndex OldEnd = I->end;
600 I->end = Start; // Trim the old segment.
601
602 // Insert the new one.
603 segments.insert(std::next(I), Segment(End, OldEnd, ValNo));
604}
605
607 VNInfo *ValNo = I->valno;
608 I = segments.erase(I);
609 if (RemoveDeadValNo)
610 removeValNoIfDead(ValNo);
611 return I;
612}
613
615 if (none_of(*this, [=](const Segment &S) { return S.valno == ValNo; }))
616 markValNoForDeletion(ValNo);
617}
618
619/// removeValNo - Remove all the segments defined by the specified value#.
620/// Also remove the value# from value# list.
622 if (empty()) return;
624 [ValNo](const Segment &S) { return S.valno == ValNo; });
625 // Now that ValNo is dead, remove it.
626 markValNoForDeletion(ValNo);
627}
628
630 const int *LHSValNoAssignments,
631 const int *RHSValNoAssignments,
632 SmallVectorImpl<VNInfo *> &NewVNInfo) {
633 assert(verify());
634 assert(Other.verify());
635
636 // Determine if any of our values are mapped. This is uncommon, so we want
637 // to avoid the range scan if not.
638 bool MustMapCurValNos = false;
639 unsigned NumVals = getNumValNums();
640 unsigned NumNewVals = NewVNInfo.size();
641 for (unsigned i = 0; i != NumVals; ++i) {
642 unsigned LHSValID = LHSValNoAssignments[i];
643 if (i != LHSValID ||
644 (NewVNInfo[LHSValID] && NewVNInfo[LHSValID] != getValNumInfo(i))) {
645 MustMapCurValNos = true;
646 break;
647 }
648 }
649
650 // If we have to apply a mapping to our base range assignment, rewrite it now.
651 if (MustMapCurValNos && !empty()) {
652 // Map the first live range.
653
654 iterator OutIt = begin();
655 OutIt->valno = NewVNInfo[LHSValNoAssignments[OutIt->valno->id]];
656 for (iterator I = std::next(OutIt), E = end(); I != E; ++I) {
657 VNInfo* nextValNo = NewVNInfo[LHSValNoAssignments[I->valno->id]];
658 assert(nextValNo && "Huh?");
659
660 // If this live range has the same value # as its immediate predecessor,
661 // and if they are neighbors, remove one Segment. This happens when we
662 // have [0,4:0)[4,7:1) and map 0/1 onto the same value #.
663 if (OutIt->valno == nextValNo && OutIt->end == I->start) {
664 OutIt->end = I->end;
665 } else {
666 // Didn't merge. Move OutIt to the next segment,
667 ++OutIt;
668 OutIt->valno = nextValNo;
669 if (OutIt != I) {
670 OutIt->start = I->start;
671 OutIt->end = I->end;
672 }
673 }
674 }
675 // If we merge some segments, chop off the end.
676 ++OutIt;
677 segments.erase(OutIt, end());
678 }
679
680 // Rewrite Other values before changing the VNInfo ids.
681 // This can leave Other in an invalid state because we're not coalescing
682 // touching segments that now have identical values. That's OK since Other is
683 // not supposed to be valid after calling join();
684 for (Segment &S : Other.segments)
685 S.valno = NewVNInfo[RHSValNoAssignments[S.valno->id]];
686
687 // Update val# info. Renumber them and make sure they all belong to this
688 // LiveRange now. Also remove dead val#'s.
689 unsigned NumValNos = 0;
690 for (unsigned i = 0; i < NumNewVals; ++i) {
691 VNInfo *VNI = NewVNInfo[i];
692 if (VNI) {
693 if (NumValNos >= NumVals)
694 valnos.push_back(VNI);
695 else
696 valnos[NumValNos] = VNI;
697 VNI->id = NumValNos++; // Renumber val#.
698 }
699 }
700 if (NumNewVals < NumVals)
701 valnos.resize(NumNewVals); // shrinkify
702
703 // Okay, now insert the RHS live segments into the LHS.
704 LiveRangeUpdater Updater(this);
705 for (Segment &S : Other.segments)
706 Updater.add(S);
707}
708
709/// Merge all of the segments in RHS into this live range as the specified
710/// value number. The segments in RHS are allowed to overlap with segments in
711/// the current range, but only if the overlapping segments have the
712/// specified value number.
714 VNInfo *LHSValNo) {
715 LiveRangeUpdater Updater(this);
716 for (const Segment &S : RHS.segments)
717 Updater.add(S.start, S.end, LHSValNo);
718}
719
720/// MergeValueInAsValue - Merge all of the live segments of a specific val#
721/// in RHS into this live range as the specified value number.
722/// The segments in RHS are allowed to overlap with segments in the
723/// current range, it will replace the value numbers of the overlaped
724/// segments with the specified value number.
726 const VNInfo *RHSValNo,
727 VNInfo *LHSValNo) {
728 LiveRangeUpdater Updater(this);
729 for (const Segment &S : RHS.segments)
730 if (S.valno == RHSValNo)
731 Updater.add(S.start, S.end, LHSValNo);
732}
733
734/// MergeValueNumberInto - This method is called when two value nubmers
735/// are found to be equivalent. This eliminates V1, replacing all
736/// segments with the V1 value number with the V2 value number. This can
737/// cause merging of V1/V2 values numbers and compaction of the value space.
739 assert(V1 != V2 && "Identical value#'s are always equivalent!");
740
741 // This code actually merges the (numerically) larger value number into the
742 // smaller value number, which is likely to allow us to compactify the value
743 // space. The only thing we have to be careful of is to preserve the
744 // instruction that defines the result value.
745
746 // Make sure V2 is smaller than V1.
747 if (V1->id < V2->id) {
748 V1->copyFrom(*V2);
749 std::swap(V1, V2);
750 }
751
752 // Merge V1 segments into V2.
753 for (iterator I = begin(); I != end(); ) {
754 iterator S = I++;
755 if (S->valno != V1) continue; // Not a V1 Segment.
756
757 // Okay, we found a V1 live range. If it had a previous, touching, V2 live
758 // range, extend it.
759 if (S != begin()) {
760 iterator Prev = S-1;
761 if (Prev->valno == V2 && Prev->end == S->start) {
762 Prev->end = S->end;
763
764 // Erase this live-range.
765 segments.erase(S);
766 I = Prev+1;
767 S = Prev;
768 }
769 }
770
771 // Okay, now we have a V1 or V2 live range that is maximally merged forward.
772 // Ensure that it is a V2 live-range.
773 S->valno = V2;
774
775 // If we can merge it into later V2 segments, do so now. We ignore any
776 // following V1 segments, as they will be merged in subsequent iterations
777 // of the loop.
778 if (I != end()) {
779 if (I->start == S->end && I->valno == V2) {
780 S->end = I->end;
781 segments.erase(I);
782 I = S+1;
783 }
784 }
785 }
786
787 // Now that V1 is dead, remove it.
788 markValNoForDeletion(V1);
789
790 return V2;
791}
792
794 assert(segmentSet != nullptr && "segment set must have been created");
795 assert(
796 segments.empty() &&
797 "segment set can be used only initially before switching to the array");
798 segments.append(segmentSet->begin(), segmentSet->end());
799 segmentSet = nullptr;
800 assert(verify());
801}
802
804 ArrayRef<SlotIndex>::iterator SlotI = Slots.begin();
805 ArrayRef<SlotIndex>::iterator SlotE = Slots.end();
806
807 // If there are no regmask slots, we have nothing to search.
808 if (SlotI == SlotE)
809 return false;
810
811 // Start our search at the first segment that ends after the first slot.
812 const_iterator SegmentI = find(*SlotI);
813 const_iterator SegmentE = end();
814
815 // If there are no segments that end after the first slot, we're done.
816 if (SegmentI == SegmentE)
817 return false;
818
819 // Look for each slot in the live range.
820 for ( ; SlotI != SlotE; ++SlotI) {
821 // Go to the next segment that ends after the current slot.
822 // The slot may be within a hole in the range.
823 SegmentI = advanceTo(SegmentI, *SlotI);
824 if (SegmentI == SegmentE)
825 return false;
826
827 // If this segment contains the slot, we're done.
828 if (SegmentI->contains(*SlotI))
829 return true;
830 // Otherwise, look for the next slot.
831 }
832
833 // We didn't find a segment containing any of the slots.
834 return false;
835}
836
837void LiveInterval::freeSubRange(SubRange *S) {
838 S->~SubRange();
839 // Memory was allocated with BumpPtr allocator and is not freed here.
840}
841
843 SubRange **NextPtr = &SubRanges;
844 SubRange *I = *NextPtr;
845 while (I != nullptr) {
846 if (!I->empty()) {
847 NextPtr = &I->Next;
848 I = *NextPtr;
849 continue;
850 }
851 // Skip empty subranges until we find the first nonempty one.
852 do {
853 SubRange *Next = I->Next;
854 freeSubRange(I);
855 I = Next;
856 } while (I != nullptr && I->empty());
857 *NextPtr = I;
858 }
859}
860
862 for (SubRange *I = SubRanges, *Next; I != nullptr; I = Next) {
863 Next = I->Next;
864 freeSubRange(I);
865 }
866 SubRanges = nullptr;
867}
868
869/// For each VNI in \p SR, check whether or not that value defines part
870/// of the mask describe by \p LaneMask and if not, remove that value
871/// from \p SR.
873 LaneBitmask LaneMask,
874 const SlotIndexes &Indexes,
875 const TargetRegisterInfo &TRI,
876 unsigned ComposeSubRegIdx) {
877 // Phys reg should not be tracked at subreg level.
878 // Same for noreg (Reg == 0).
879 if (!Reg || !Reg.isVirtual())
880 return;
881 // Remove the values that don't define those lanes.
882 SmallVector<VNInfo *, 8> ToBeRemoved;
883 for (VNInfo *VNI : SR.valnos) {
884 if (VNI->isUnused())
885 continue;
886 // PHI definitions don't have MI attached, so there is nothing
887 // we can use to strip the VNI.
888 if (VNI->isPHIDef())
889 continue;
890 const MachineInstr *MI = Indexes.getInstructionFromIndex(VNI->def);
891 assert(MI && "Cannot find the definition of a value");
892 bool hasDef = false;
893 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) {
894 if (!MOI->isReg() || !MOI->isDef())
895 continue;
896 if (MOI->getReg() != Reg)
897 continue;
898 LaneBitmask OrigMask = TRI.getSubRegIndexLaneMask(MOI->getSubReg());
899 LaneBitmask ExpectedDefMask =
900 ComposeSubRegIdx
901 ? TRI.composeSubRegIndexLaneMask(ComposeSubRegIdx, OrigMask)
902 : OrigMask;
903 if ((ExpectedDefMask & LaneMask).none())
904 continue;
905 hasDef = true;
906 break;
907 }
908
909 if (!hasDef)
910 ToBeRemoved.push_back(VNI);
911 }
912 for (VNInfo *VNI : ToBeRemoved)
913 SR.removeValNo(VNI);
914
915 // If the subrange is empty at this point, the MIR is invalid. Do not assert
916 // and let the verifier catch this case.
917}
918
920 BumpPtrAllocator &Allocator, LaneBitmask LaneMask,
921 std::function<void(LiveInterval::SubRange &)> Apply,
922 const SlotIndexes &Indexes, const TargetRegisterInfo &TRI,
923 unsigned ComposeSubRegIdx) {
924 LaneBitmask ToApply = LaneMask;
925 for (SubRange &SR : subranges()) {
926 LaneBitmask SRMask = SR.LaneMask;
927 LaneBitmask Matching = SRMask & LaneMask;
928 if (Matching.none())
929 continue;
930
931 SubRange *MatchingRange;
932 if (SRMask == Matching) {
933 // The subrange fits (it does not cover bits outside \p LaneMask).
934 MatchingRange = &SR;
935 } else {
936 // We have to split the subrange into a matching and non-matching part.
937 // Reduce lanemask of existing lane to non-matching part.
938 SR.LaneMask = SRMask & ~Matching;
939 // Create a new subrange for the matching part
940 MatchingRange = createSubRangeFrom(Allocator, Matching, SR);
941 // Now that the subrange is split in half, make sure we
942 // only keep in the subranges the VNIs that touch the related half.
943 stripValuesNotDefiningMask(reg(), *MatchingRange, Matching, Indexes, TRI,
944 ComposeSubRegIdx);
945 stripValuesNotDefiningMask(reg(), SR, SR.LaneMask, Indexes, TRI,
946 ComposeSubRegIdx);
947 }
948 Apply(*MatchingRange);
949 ToApply &= ~Matching;
950 }
951 // Create a new subrange if there are uncovered bits left.
952 if (ToApply.any()) {
953 SubRange *NewRange = createSubRange(Allocator, ToApply);
954 Apply(*NewRange);
955 }
956}
957
958unsigned LiveInterval::getSize() const {
959 unsigned Sum = 0;
960 for (const Segment &S : segments)
961 Sum += S.start.distance(S.end);
962 return Sum;
963}
964
966 LaneBitmask LaneMask,
968 const SlotIndexes &Indexes) const {
969 assert(reg().isVirtual());
970 LaneBitmask VRegMask = MRI.getMaxLaneMaskForVReg(reg());
971 assert((VRegMask & LaneMask).any());
972 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
973 for (const MachineOperand &MO : MRI.def_operands(reg())) {
974 if (!MO.isUndef())
975 continue;
976 unsigned SubReg = MO.getSubReg();
977 assert(SubReg != 0 && "Undef should only be set on subreg defs");
978 LaneBitmask DefMask = TRI.getSubRegIndexLaneMask(SubReg);
979 LaneBitmask UndefMask = VRegMask & ~DefMask;
980 if ((UndefMask & LaneMask).any()) {
981 const MachineInstr &MI = *MO.getParent();
982 bool EarlyClobber = MO.isEarlyClobber();
983 SlotIndex Pos = Indexes.getInstructionIndex(MI).getRegSlot(EarlyClobber);
984 Undefs.push_back(Pos);
985 }
986 }
987}
988
990 return OS << '[' << S.start << ',' << S.end << ':' << S.valno->id << ')';
991}
992
993#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
995 dbgs() << *this << '\n';
996}
997#endif
998
999void VNInfo::print(raw_ostream &OS) const {
1000 OS << id << '@';
1001 if (isUnused()) {
1002 OS << 'x';
1003 } else {
1004 OS << def;
1005 if (isPHIDef())
1006 OS << "-phi";
1007 }
1008}
1009
1011 if (empty())
1012 OS << "EMPTY";
1013 else {
1014 for (const Segment &S : segments) {
1015 OS << S;
1016 assert(S.valno == getValNumInfo(S.valno->id) && "Bad VNInfo");
1017 }
1018 }
1019
1020 // Print value number info.
1021 if (getNumValNums()) {
1022 OS << ' ';
1023 unsigned vnum = 0;
1024 for (const_vni_iterator i = vni_begin(), e = vni_end(); i != e;
1025 ++i, ++vnum) {
1026 const VNInfo *vni = *i;
1027 if (vnum)
1028 OS << ' ';
1029 OS << *vni;
1030 assert(vnum == vni->id && "Bad VNInfo");
1031 }
1032 }
1033}
1034
1036 OS << " L" << PrintLaneMask(LaneMask) << ' '
1037 << static_cast<const LiveRange &>(*this);
1038}
1039
1041 OS << printReg(reg()) << ' ';
1042 super::print(OS);
1043 // Print subranges
1044 for (const SubRange &SR : subranges())
1045 OS << SR;
1046 OS << " weight:" << Weight;
1047}
1048
1049#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
1050LLVM_DUMP_METHOD void VNInfo::dump() const { dbgs() << *this << '\n'; }
1051
1052LLVM_DUMP_METHOD void LiveRange::dump() const { dbgs() << *this << '\n'; }
1053
1055 dbgs() << *this << '\n';
1056}
1057
1059 dbgs() << *this << '\n';
1060}
1061#endif
1062
1063#ifndef NDEBUG
1064bool LiveRange::verify() const {
1065 for (const_iterator I = begin(), E = end(); I != E; ++I) {
1066 if (!I->start.isValid())
1067 return false;
1068 if (!I->end.isValid())
1069 return false;
1070 if (I->start >= I->end)
1071 return false;
1072 if (I->valno == nullptr)
1073 return false;
1074 if (I->valno->id >= valnos.size())
1075 return false;
1076 if (I->valno != valnos[I->valno->id])
1077 return false;
1078 if (std::next(I) != E) {
1079 if (I->end > std::next(I)->start)
1080 return false;
1081 if (I->end == std::next(I)->start) {
1082 if (I->valno == std::next(I)->valno)
1083 return false;
1084 }
1085 }
1086 }
1087
1088 return true;
1089}
1090
1092 if (!super::verify())
1093 return false;
1094
1095 // Make sure SubRanges are fine and LaneMasks are disjunct.
1096 LaneBitmask Mask;
1097 LaneBitmask MaxMask = MRI != nullptr ? MRI->getMaxLaneMaskForVReg(reg())
1099 for (const SubRange &SR : subranges()) {
1100 // Subrange lanemask should be disjunct to any previous subrange masks.
1101 if ((Mask & SR.LaneMask).any())
1102 return false;
1103
1104 Mask |= SR.LaneMask;
1105
1106 // subrange mask should not contained in maximum lane mask for the vreg.
1107 if ((Mask & ~MaxMask).any())
1108 return false;
1109
1110 // empty subranges must be removed.
1111 if (SR.empty())
1112 return false;
1113
1114 if (!SR.verify())
1115 return false;
1116
1117 // Main liverange should cover subrange.
1118 if (!covers(SR))
1119 return false;
1120 }
1121
1122 return true;
1123}
1124#endif
1125
1126//===----------------------------------------------------------------------===//
1127// LiveRangeUpdater class
1128//===----------------------------------------------------------------------===//
1129//
1130// The LiveRangeUpdater class always maintains these invariants:
1131//
1132// - When LastStart is invalid, Spills is empty and the iterators are invalid.
1133// This is the initial state, and the state created by flush().
1134// In this state, isDirty() returns false.
1135//
1136// Otherwise, segments are kept in three separate areas:
1137//
1138// 1. [begin; WriteI) at the front of LR.
1139// 2. [ReadI; end) at the back of LR.
1140// 3. Spills.
1141//
1142// - LR.begin() <= WriteI <= ReadI <= LR.end().
1143// - Segments in all three areas are fully ordered and coalesced.
1144// - Segments in area 1 precede and can't coalesce with segments in area 2.
1145// - Segments in Spills precede and can't coalesce with segments in area 2.
1146// - No coalescing is possible between segments in Spills and segments in area
1147// 1, and there are no overlapping segments.
1148//
1149// The segments in Spills are not ordered with respect to the segments in area
1150// 1. They need to be merged.
1151//
1152// When they exist, Spills.back().start <= LastStart,
1153// and WriteI[-1].start <= LastStart.
1154
1155#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
1157 if (!isDirty()) {
1158 if (LR)
1159 OS << "Clean updater: " << *LR << '\n';
1160 else
1161 OS << "Null updater.\n";
1162 return;
1163 }
1164 assert(LR && "Can't have null LR in dirty updater.");
1165 OS << " updater with gap = " << (ReadI - WriteI)
1166 << ", last start = " << LastStart
1167 << ":\n Area 1:";
1168 for (const auto &S : make_range(LR->begin(), WriteI))
1169 OS << ' ' << S;
1170 OS << "\n Spills:";
1171 for (const LiveRange::Segment &Spill : Spills)
1172 OS << ' ' << Spill;
1173 OS << "\n Area 2:";
1174 for (const auto &S : make_range(ReadI, LR->end()))
1175 OS << ' ' << S;
1176 OS << '\n';
1177}
1178
1182#endif
1183
1184// Determine if A and B should be coalesced.
1185static inline bool coalescable(const LiveRange::Segment &A,
1186 const LiveRange::Segment &B) {
1187 assert(A.start <= B.start && "Unordered live segments.");
1188 if (A.end == B.start)
1189 return A.valno == B.valno;
1190 if (A.end < B.start)
1191 return false;
1192 assert(A.valno == B.valno && "Cannot overlap different values");
1193 return true;
1194}
1195
1197 assert(LR && "Cannot add to a null destination");
1198
1199 // Fall back to the regular add method if the live range
1200 // is using the segment set instead of the segment vector.
1201 if (LR->segmentSet != nullptr) {
1202 LR->addSegmentToSet(Seg);
1203 return;
1204 }
1205
1206 // Flush the state if Start moves backwards.
1207 if (!LastStart.isValid() || LastStart > Seg.start) {
1208 if (isDirty())
1209 flush();
1210 // This brings us to an uninitialized state. Reinitialize.
1211 assert(Spills.empty() && "Leftover spilled segments");
1212 WriteI = ReadI = LR->begin();
1213 }
1214
1215 // Remember start for next time.
1216 LastStart = Seg.start;
1217
1218 // Advance ReadI until it ends after Seg.start.
1219 LiveRange::iterator E = LR->end();
1220 if (ReadI != E && ReadI->end <= Seg.start) {
1221 // First try to close the gap between WriteI and ReadI with spills.
1222 if (ReadI != WriteI)
1223 mergeSpills();
1224 // Then advance ReadI.
1225 if (ReadI == WriteI)
1226 ReadI = WriteI = LR->find(Seg.start);
1227 else
1228 while (ReadI != E && ReadI->end <= Seg.start)
1229 *WriteI++ = *ReadI++;
1230 }
1231
1232 assert(ReadI == E || ReadI->end > Seg.start);
1233
1234 // Check if the ReadI segment begins early.
1235 if (ReadI != E && ReadI->start <= Seg.start) {
1236 assert(ReadI->valno == Seg.valno && "Cannot overlap different values");
1237 // Bail if Seg is completely contained in ReadI.
1238 if (ReadI->end >= Seg.end)
1239 return;
1240 // Coalesce into Seg.
1241 Seg.start = ReadI->start;
1242 ++ReadI;
1243 }
1244
1245 // Coalesce as much as possible from ReadI into Seg.
1246 while (ReadI != E && coalescable(Seg, *ReadI)) {
1247 Seg.end = std::max(Seg.end, ReadI->end);
1248 ++ReadI;
1249 }
1250
1251 // Try coalescing Spills.back() into Seg.
1252 if (!Spills.empty() && coalescable(Spills.back(), Seg)) {
1253 Seg.start = Spills.back().start;
1254 Seg.end = std::max(Spills.back().end, Seg.end);
1255 Spills.pop_back();
1256 }
1257
1258 // Try coalescing Seg into WriteI[-1].
1259 if (WriteI != LR->begin() && coalescable(WriteI[-1], Seg)) {
1260 WriteI[-1].end = std::max(WriteI[-1].end, Seg.end);
1261 return;
1262 }
1263
1264 // Seg doesn't coalesce with anything, and needs to be inserted somewhere.
1265 if (WriteI != ReadI) {
1266 *WriteI++ = Seg;
1267 return;
1268 }
1269
1270 // Finally, append to LR or Spills.
1271 if (WriteI == E) {
1272 LR->segments.push_back(Seg);
1273 WriteI = ReadI = LR->end();
1274 } else
1275 Spills.push_back(Seg);
1276}
1277
1278// Merge as many spilled segments as possible into the gap between WriteI
1279// and ReadI. Advance WriteI to reflect the inserted instructions.
1280void LiveRangeUpdater::mergeSpills() {
1281 // Perform a backwards merge of Spills and [SpillI;WriteI).
1282 size_t GapSize = ReadI - WriteI;
1283 size_t NumMoved = std::min(Spills.size(), GapSize);
1284 LiveRange::iterator Src = WriteI;
1285 LiveRange::iterator Dst = Src + NumMoved;
1286 LiveRange::iterator SpillSrc = Spills.end();
1287 LiveRange::iterator B = LR->begin();
1288
1289 // This is the new WriteI position after merging spills.
1290 WriteI = Dst;
1291
1292 // Now merge Src and Spills backwards.
1293 while (Src != Dst) {
1294 if (Src != B && Src[-1].start > SpillSrc[-1].start)
1295 *--Dst = *--Src;
1296 else
1297 *--Dst = *--SpillSrc;
1298 }
1299 assert(NumMoved == size_t(Spills.end() - SpillSrc));
1300 Spills.erase(SpillSrc, Spills.end());
1301}
1302
1304 if (!isDirty())
1305 return;
1306 // Clear the dirty state.
1307 LastStart = SlotIndex();
1308
1309 assert(LR && "Cannot add to a null destination");
1310
1311 // Nothing to merge?
1312 if (Spills.empty()) {
1313 LR->segments.erase(WriteI, ReadI);
1314 assert(LR->verify());
1315 return;
1316 }
1317
1318 // Resize the WriteI - ReadI gap to match Spills.
1319 size_t GapSize = ReadI - WriteI;
1320 if (GapSize < Spills.size()) {
1321 // The gap is too small. Make some room.
1322 size_t WritePos = WriteI - LR->begin();
1323 LR->segments.insert(ReadI, Spills.size() - GapSize, LiveRange::Segment());
1324 // This also invalidated ReadI, but it is recomputed below.
1325 WriteI = LR->begin() + WritePos;
1326 } else {
1327 // Shrink the gap if necessary.
1328 LR->segments.erase(WriteI + Spills.size(), ReadI);
1329 }
1330 ReadI = WriteI + Spills.size();
1331 mergeSpills();
1332 assert(LR->verify());
1333}
1334
1336 // Create initial equivalence classes.
1337 EqClass.clear();
1338 EqClass.grow(LR.getNumValNums());
1339
1340 const VNInfo *used = nullptr, *unused = nullptr;
1341
1342 // Determine connections.
1343 for (const VNInfo *VNI : LR.valnos) {
1344 // Group all unused values into one class.
1345 if (VNI->isUnused()) {
1346 if (unused)
1347 EqClass.join(unused->id, VNI->id);
1348 unused = VNI;
1349 continue;
1350 }
1351 used = VNI;
1352 if (VNI->isPHIDef()) {
1353 const MachineBasicBlock *MBB = LIS.getMBBFromIndex(VNI->def);
1354 assert(MBB && "Phi-def has no defining MBB");
1355 // Connect to values live out of predecessors.
1356 for (MachineBasicBlock *Pred : MBB->predecessors())
1357 if (const VNInfo *PVNI = LR.getVNInfoBefore(LIS.getMBBEndIdx(Pred)))
1358 EqClass.join(VNI->id, PVNI->id);
1359 } else {
1360 // Normal value defined by an instruction. Check for two-addr redef.
1361 // FIXME: This could be coincidental. Should we really check for a tied
1362 // operand constraint?
1363 // Note that VNI->def may be a use slot for an early clobber def.
1364 if (const VNInfo *UVNI = LR.getVNInfoBefore(VNI->def))
1365 EqClass.join(VNI->id, UVNI->id);
1366 }
1367 }
1368
1369 // Lump all the unused values in with the last used value.
1370 if (used && unused)
1371 EqClass.join(used->id, unused->id);
1372
1373 EqClass.compress();
1374 return EqClass.getNumClasses();
1375}
1376
1379 // Rewrite instructions.
1380 for (MachineOperand &MO :
1381 llvm::make_early_inc_range(MRI.reg_operands(LI.reg()))) {
1382 MachineInstr *MI = MO.getParent();
1383 const VNInfo *VNI;
1384 if (MI->isDebugValue()) {
1385 // DBG_VALUE instructions don't have slot indexes, so get the index of
1386 // the instruction before them. The value is defined there too.
1387 SlotIndex Idx = LIS.getSlotIndexes()->getIndexBefore(*MI);
1388 VNI = LI.Query(Idx).valueOut();
1389 } else {
1390 SlotIndex Idx = LIS.getInstructionIndex(*MI);
1391 LiveQueryResult LRQ = LI.Query(Idx);
1392 VNI = MO.readsReg() ? LRQ.valueIn() : LRQ.valueDefined();
1393 }
1394 // In the case of an <undef> use that isn't tied to any def, VNI will be
1395 // NULL. If the use is tied to a def, VNI will be the defined value.
1396 if (!VNI)
1397 continue;
1398 if (unsigned EqClass = getEqClass(VNI))
1399 MO.setReg(LIV[EqClass - 1]->reg());
1400 }
1401
1402 // Distribute subregister liveranges.
1403 if (LI.hasSubRanges()) {
1404 unsigned NumComponents = EqClass.getNumClasses();
1405 SmallVector<unsigned, 8> VNIMapping;
1407 BumpPtrAllocator &Allocator = LIS.getVNInfoAllocator();
1408 for (LiveInterval::SubRange &SR : LI.subranges()) {
1409 // Create new subranges in the split intervals and construct a mapping
1410 // for the VNInfos in the subrange.
1411 unsigned NumValNos = SR.valnos.size();
1412 VNIMapping.clear();
1413 VNIMapping.reserve(NumValNos);
1414 SubRanges.clear();
1415 SubRanges.resize(NumComponents-1, nullptr);
1416 for (unsigned I = 0; I < NumValNos; ++I) {
1417 const VNInfo &VNI = *SR.valnos[I];
1418 unsigned ComponentNum;
1419 if (VNI.isUnused()) {
1420 ComponentNum = 0;
1421 } else {
1422 const VNInfo *MainRangeVNI = LI.getVNInfoAt(VNI.def);
1423 assert(MainRangeVNI != nullptr
1424 && "SubRange def must have corresponding main range def");
1425 ComponentNum = getEqClass(MainRangeVNI);
1426 if (ComponentNum > 0 && SubRanges[ComponentNum-1] == nullptr) {
1427 SubRanges[ComponentNum-1]
1428 = LIV[ComponentNum-1]->createSubRange(Allocator, SR.LaneMask);
1429 }
1430 }
1431 VNIMapping.push_back(ComponentNum);
1432 }
1433 DistributeRange(SR, SubRanges.data(), VNIMapping);
1434 }
1436 }
1437
1438 // Distribute main liverange.
1439 DistributeRange(LI, LIV, EqClass);
1440}
unsigned SubReg
unsigned const MachineRegisterInfo * MRI
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock & MBB
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static BasicBlock::iterator findInsertPos(Value *Addr, Instruction *MemoryInst, Value *SunkAddr)
#define LLVM_DUMP_METHOD
Mark debug helper function definitions like dump() that should not be stripped from debug builds.
Definition Compiler.h:638
IRTranslator LLVM IR MI
A common definition of LaneBitmask for use in TableGen and CodeGen.
static void createDeadDef(SlotIndexes &Indexes, VNInfo::Allocator &Alloc, LiveRange &LR, const MachineOperand &MO)
static bool coalescable(const LiveRange::Segment &A, const LiveRange::Segment &B)
static void stripValuesNotDefiningMask(Register Reg, LiveInterval::SubRange &SR, LaneBitmask LaneMask, const SlotIndexes &Indexes, const TargetRegisterInfo &TRI, unsigned ComposeSubRegIdx)
For each VNI in SR, check whether or not that value defines part of the mask describe by LaneMask and...
This file contains helper functions to modify live ranges.
#define I(x, y, z)
Definition MD5.cpp:58
Register Reg
Register const TargetRegisterInfo * TRI
place backedge safepoints impl
SI Optimize VGPR LiveRange
This file contains some templates that are useful if you are working with the STL at all.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
static TableGen::Emitter::OptClass< SkeletonEmitter > X("gen-skeleton-class", "Generate example skeleton class")
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
Definition ArrayRef.h:41
iterator end() const
Definition ArrayRef.h:136
const_pointer iterator
Definition ArrayRef.h:48
iterator begin() const
Definition ArrayRef.h:135
A helper class for register coalescers.
LLVM_ABI void Distribute(LiveInterval &LI, LiveInterval *LIV[], MachineRegisterInfo &MRI)
Distribute values in LI into a separate LiveIntervals for each connected component.
LLVM_ABI unsigned Classify(const LiveRange &LR)
Classify the values in LR into connected components.
unsigned getEqClass(const VNInfo *VNI) const
getEqClass - Classify creates equivalence classes numbered 0..N.
ConstMIBundleOperands - Iterate over all operands in a const bundle of machine instructions.
A live range for subregisters.
LLVM_ABI void print(raw_ostream &OS) const
LLVM_ABI void dump() const
LiveInterval - This class represents the liveness of a register, or stack slot.
LLVM_ABI void removeEmptySubRanges()
Removes all subranges without any segments (subranges without segments are not considered valid and s...
Register reg() const
bool hasSubRanges() const
Returns true if subregister liveness information is available.
LLVM_ABI void dump() const
LLVM_ABI unsigned getSize() const
getSize - Returns the sum of sizes of all the LiveRange's.
SubRange * createSubRangeFrom(BumpPtrAllocator &Allocator, LaneBitmask LaneMask, const LiveRange &CopyFrom)
Like createSubRange() but the new range is filled with a copy of the liveness information in CopyFrom...
iterator_range< subrange_iterator > subranges()
LLVM_ABI void refineSubRanges(BumpPtrAllocator &Allocator, LaneBitmask LaneMask, std::function< void(LiveInterval::SubRange &)> Apply, const SlotIndexes &Indexes, const TargetRegisterInfo &TRI, unsigned ComposeSubRegIdx=0)
Refines the subranges to support LaneMask.
LLVM_ABI void print(raw_ostream &OS) const
LLVM_ABI void computeSubRangeUndefs(SmallVectorImpl< SlotIndex > &Undefs, LaneBitmask LaneMask, const MachineRegisterInfo &MRI, const SlotIndexes &Indexes) const
For a given lane mask LaneMask, compute indexes at which the lane is marked undefined by subregister ...
SubRange * createSubRange(BumpPtrAllocator &Allocator, LaneBitmask LaneMask)
Creates a new empty subregister live range.
LLVM_ABI void clearSubRanges()
Removes all subregister liveness information.
Result of a LiveRange query.
VNInfo * valueIn() const
Return the value that is live-in to the instruction.
VNInfo * valueOut() const
Return the value leaving the instruction, if any.
VNInfo * valueDefined() const
Return the value defined by this instruction, if any.
LLVM_ABI void print(raw_ostream &) const
LLVM_ABI void flush()
Flush the updater state to LR so it is valid and contains all added segments.
LLVM_ABI void dump() const
bool isDirty() const
Return true if the LR is currently in an invalid state, and flush() needs to be called.
LLVM_ABI void add(LiveRange::Segment)
Add a segment to LR and coalesce when possible, just like LR.addSegment().
This class represents the liveness of a register, stack slot, etc.
LiveRange(bool UseSegmentSet=false)
Constructs a new LiveRange object.
VNInfo * getValNumInfo(unsigned ValNo)
getValNumInfo - Returns pointer to the specified val#.
LLVM_ABI iterator addSegment(Segment S)
Add the specified Segment to this range, merging segments as appropriate.
Segments::iterator iterator
LLVM_ABI void join(LiveRange &Other, const int *ValNoAssignments, const int *RHSValNoAssignments, SmallVectorImpl< VNInfo * > &NewVNInfo)
join - Join two live ranges (this, and other) together.
LLVM_ABI void MergeValueInAsValue(const LiveRange &RHS, const VNInfo *RHSValNo, VNInfo *LHSValNo)
MergeValueInAsValue - Merge all of the segments of a specific val# in RHS into this live range as the...
Segments::const_iterator const_iterator
LLVM_ABI VNInfo * createDeadDef(SlotIndex Def, VNInfo::Allocator &VNIAlloc)
createDeadDef - Make sure the range has a value defined at Def.
std::set< Segment > SegmentSet
LLVM_ABI bool isLiveAtIndexes(ArrayRef< SlotIndex > Slots) const
LLVM_ABI bool covers(const LiveRange &Other) const
Returns true if all segments of the Other live range are completely covered by this live range.
vni_iterator vni_begin()
iterator advanceTo(iterator I, SlotIndex Pos)
advanceTo - Advance the specified iterator to point to the Segment containing the specified position,...
std::unique_ptr< SegmentSet > segmentSet
LLVM_ABI void removeValNo(VNInfo *ValNo)
removeValNo - Remove all the segments defined by the specified value#.
bool empty() const
LLVM_ABI void RenumberValues()
RenumberValues - Renumber all values in order of appearance and remove unused values.
bool overlaps(const LiveRange &other) const
overlaps - Return true if the intersection of the two live ranges is not empty.
LLVM_ABI bool overlapsFrom(const LiveRange &Other, const_iterator StartPos) const
overlapsFrom - Return true if the intersection of the two live ranges is not empty.
LiveQueryResult Query(SlotIndex Idx) const
Query Liveness at Idx.
VNInfo * getVNInfoBefore(SlotIndex Idx) const
getVNInfoBefore - Return the VNInfo that is live up to but not necessarily including Idx,...
LLVM_ABI std::pair< VNInfo *, bool > extendInBlock(ArrayRef< SlotIndex > Undefs, SlotIndex StartIdx, SlotIndex Kill)
Attempt to extend a value defined after StartIdx to include Use.
bool verify() const
Walk the range and assert if any invariants fail to hold.
LLVM_ABI void append(const LiveRange::Segment S)
Append a segment to the list of segments.
friend class LiveRangeUpdater
LLVM_ABI VNInfo * MergeValueNumberInto(VNInfo *V1, VNInfo *V2)
MergeValueNumberInto - This method is called when two value numbers are found to be equivalent.
unsigned getNumValNums() const
iterator begin()
VNInfoList valnos
LLVM_ABI void MergeSegmentsInAsValue(const LiveRange &RHS, VNInfo *LHSValNo)
Merge all of the live segments of a specific val# in RHS into this live range as the specified value ...
vni_iterator vni_end()
SmallVector< Segment, 2 > Segments
VNInfoList::const_iterator const_vni_iterator
LLVM_ABI void removeSegment(SlotIndex Start, SlotIndex End, bool RemoveDeadValNo=false)
Remove the specified interval from this live range.
LLVM_ABI void removeValNoIfDead(VNInfo *ValNo)
Mark ValNo for deletion if no segments in this range use it.
LLVM_ABI void dump() const
LLVM_ABI void print(raw_ostream &OS) const
LLVM_ABI void flushSegmentSet()
Flush segment set into the regular segment vector.
VNInfo * getVNInfoAt(SlotIndex Idx) const
getVNInfoAt - Return the VNInfo that is live at Idx, or NULL.
LLVM_ABI iterator find(SlotIndex Pos)
find - Return an iterator pointing to the first segment that ends after Pos, or end().
bool isValid() const
isValid - Returns true until all the operands have been visited.
Representation of each machine instruction.
MachineOperand class - Representation of each machine instruction operand.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
Wrapper class representing virtual and physical registers.
Definition Register.h:19
SlotIndex - An opaque wrapper around machine indexes.
Definition SlotIndexes.h:66
static bool isSameInstr(SlotIndex A, SlotIndex B)
isSameInstr - Return true if A and B refer to the same instruction.
static bool isEarlierInstr(SlotIndex A, SlotIndex B)
isEarlierInstr - Return true if A refers to an instruction earlier than B.
SlotIndex getNextSlot() const
Returns the next slot in the index list.
SlotIndex getRegSlot(bool EC=false) const
Returns the register use/def slot in the current instruction for a normal or early-clobber def.
SlotIndexes pass.
SlotIndex getInstructionIndex(const MachineInstr &MI, bool IgnoreBundle=false) const
Returns the base index for the given instruction.
MachineInstr * getInstructionFromIndex(SlotIndex index) const
Returns the instruction for the given index, or null if the given index has no instruction associated...
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void reserve(size_type N)
void resize(size_type N)
void push_back(const T &Elt)
pointer data()
Return a pointer to the vector's buffer, even if empty().
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
VNInfo - Value Number Information.
void copyFrom(VNInfo &src)
Copy from the parameter into this VNInfo.
LLVM_ABI void dump() const
LLVM_ABI void print(raw_ostream &OS) const
void markUnused()
Mark this value as unused.
BumpPtrAllocator Allocator
bool isUnused() const
Returns true if this value is unused.
unsigned id
The ID number of this value.
SlotIndex def
The index of the defining instruction.
bool isPHIDef() const
Returns true if this value is defined by a PHI instruction (or was, PHI instructions may have been el...
This class implements an extremely fast bulk output stream that can only output to a stream.
Definition raw_ostream.h:53
This provides a very simple, boring adaptor for a begin and end iterator into a range type.
NodeAddr< DefNode * > Def
Definition RDFGraph.h:384
NodeAddr< UseNode * > Use
Definition RDFGraph.h:385
bool empty() const
Definition BasicBlock.h:101
iterator end() const
Definition BasicBlock.h:89
BBIterator iterator
Definition BasicBlock.h:87
LLVM_ABI iterator begin() const
This is an optimization pass for GlobalISel generic memory operations.
auto find(R &&Range, const T &Val)
Provide wrappers to std::find which take ranges instead of having to pass begin/end explicitly.
Definition STLExtras.h:1731
auto partition_point(R &&Range, Predicate P)
Binary search for the first iterator in a range where a predicate is false.
Definition STLExtras.h:2051
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
Definition STLExtras.h:634
Printable PrintLaneMask(LaneBitmask LaneMask)
Create Printable object to print LaneBitmasks on a raw_ostream.
Definition LaneBitmask.h:92
auto upper_bound(R &&Range, T &&Value)
Provide wrappers to std::upper_bound which take ranges instead of having to pass begin/end explicitly...
Definition STLExtras.h:1987
static void DistributeRange(LiveRangeT &LR, LiveRangeT *SplitLRs[], EqClassesT VNIClasses)
Helper function that distributes live range value numbers and the corresponding segments of a primary...
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition Debug.cpp:207
bool none_of(R &&Range, UnaryPredicate P)
Provide wrappers to std::none_of which take ranges instead of having to pass begin/end explicitly.
Definition STLExtras.h:1719
BumpPtrAllocatorImpl BumpPtrAllocator
The standard BumpPtrAllocator which just uses the default template parameters.
Definition Allocator.h:383
LLVM_ABI raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
@ Other
Any other memory.
Definition ModRef.h:68
auto lower_bound(R &&Range, T &&Value)
Provide wrappers to std::lower_bound which take ranges instead of having to pass begin/end explicitly...
Definition STLExtras.h:1974
FunctionAddr VTableAddr Next
Definition InstrProf.h:141
raw_ostream & operator<<(raw_ostream &OS, const APFixedPoint &FX)
ArrayRef(const T &OneElt) -> ArrayRef< T >
void erase_if(Container &C, UnaryPredicate P)
Provide a container algorithm similar to C++ Library Fundamentals v2's erase_if which is equivalent t...
Definition STLExtras.h:2100
LLVM_ABI Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
Definition BitVector.h:872
ListT< IteratorSpecifierT< T, I, E > > IteratorT
Definition ClauseT.h:277
static constexpr LaneBitmask getAll()
Definition LaneBitmask.h:82
constexpr bool none() const
Definition LaneBitmask.h:52
constexpr bool any() const
Definition LaneBitmask.h:53
This represents a simple continuous liveness interval for a value.
LLVM_ABI void dump() const