1//===- llvm/CodeGen/LiveInterval.h - Interval representation ----*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements the LiveRange and LiveInterval classes. Given some
10// numbering of each the machine instructions an interval [i, j) is said to be a
11// live range for register v if there is no instruction with number j' >= j
12// such that v is live at j' and there is no instruction with number i' < i such
13// that v is live at i'. In this implementation ranges can have holes,
14// i.e. a range might look like [1,20), [50,65), [1000,1001). Each
15// individual segment is represented as an instance of LiveRange::Segment,
16// and the whole range is represented as an instance of LiveRange.
17//
18//===----------------------------------------------------------------------===//
19
20#ifndef LLVM_CODEGEN_LIVEINTERVAL_H
21#define LLVM_CODEGEN_LIVEINTERVAL_H
22
23#include "llvm/ADT/ArrayRef.h"
24#include "llvm/ADT/IntEqClasses.h"
25#include "llvm/ADT/STLExtras.h"
26#include "llvm/ADT/SmallVector.h"
27#include "llvm/ADT/iterator_range.h"
28#include "llvm/CodeGen/Register.h"
29#include "llvm/CodeGen/SlotIndexes.h"
30#include "llvm/MC/LaneBitmask.h"
31#include "llvm/Support/Allocator.h"
32#include "llvm/Support/MathExtras.h"
33#include <algorithm>
34#include <cassert>
35#include <cstddef>
36#include <functional>
37#include <memory>
38#include <set>
39#include <tuple>
40#include <utility>
41
42namespace llvm {
43
44 class CoalescerPair;
45 class LiveIntervals;
46 class MachineRegisterInfo;
47 class raw_ostream;
48
49 /// VNInfo - Value Number Information.
50 /// This class holds information about a machine level values, including
51 /// definition and use points.
52 ///
53 class VNInfo {
54 public:
55 using Allocator = BumpPtrAllocator;
56
57 /// The ID number of this value.
58 unsigned id;
59
60 /// The index of the defining instruction.
61 SlotIndex def;
62
63 /// VNInfo constructor.
64 VNInfo(unsigned i, SlotIndex d) : id(i), def(d) {}
65
66 /// VNInfo constructor, copies values from orig, except for the value number.
67 VNInfo(unsigned i, const VNInfo &orig) : id(i), def(orig.def) {}
68
69 /// Copy from the parameter into this VNInfo.
70 void copyFrom(VNInfo &src) {
71 def = src.def;
72 }
73
74 /// Returns true if this value is defined by a PHI instruction (or was,
75 /// PHI instructions may have been eliminated).
76 /// PHI-defs begin at a block boundary, all other defs begin at register or
77 /// EC slots.
78 bool isPHIDef() const { return def.isBlock(); }
79
80 /// Returns true if this value is unused.
81 bool isUnused() const { return !def.isValid(); }
82
83 /// Mark this value as unused.
84 void markUnused() { def = SlotIndex(); }
85 };
86
87 /// Result of a LiveRange query. This class hides the implementation details
88 /// of live ranges, and it should be used as the primary interface for
89 /// examining live ranges around instructions.
90 class LiveQueryResult {
91 VNInfo *const EarlyVal;
92 VNInfo *const LateVal;
93 const SlotIndex EndPoint;
94 const bool Kill;
95
96 public:
97 LiveQueryResult(VNInfo *EarlyVal, VNInfo *LateVal, SlotIndex EndPoint,
98 bool Kill)
99 : EarlyVal(EarlyVal), LateVal(LateVal), EndPoint(EndPoint), Kill(Kill)
100 {}
101
102 /// Return the value that is live-in to the instruction. This is the value
103 /// that will be read by the instruction's use operands. Return NULL if no
104 /// value is live-in.
105 VNInfo *valueIn() const {
106 return EarlyVal;
107 }
108
109 /// Return true if the live-in value is killed by this instruction. This
110 /// means that either the live range ends at the instruction, or it changes
111 /// value.
112 bool isKill() const {
113 return Kill;
114 }
115
116 /// Return true if this instruction has a dead def.
117 bool isDeadDef() const {
118 return EndPoint.isDead();
119 }
120
121 /// Return the value leaving the instruction, if any. This can be a
122 /// live-through value, or a live def. A dead def returns NULL.
123 VNInfo *valueOut() const {
124 return isDeadDef() ? nullptr : LateVal;
125 }
126
127 /// Returns the value alive at the end of the instruction, if any. This can
128 /// be a live-through value, a live def or a dead def.
129 VNInfo *valueOutOrDead() const {
130 return LateVal;
131 }
132
133 /// Return the value defined by this instruction, if any. This includes
134 /// dead defs, it is the value created by the instruction's def operands.
135 VNInfo *valueDefined() const {
136 return EarlyVal == LateVal ? nullptr : LateVal;
137 }
138
139 /// Return the end point of the last live range segment to interact with
140 /// the instruction, if any.
141 ///
142 /// The end point is an invalid SlotIndex only if the live range doesn't
143 /// intersect the instruction at all.
144 ///
145 /// The end point may be at or past the end of the instruction's basic
146 /// block. That means the value was live out of the block.
147 SlotIndex endPoint() const {
148 return EndPoint;
149 }
150 };
151
152 /// This class represents the liveness of a register, stack slot, etc.
153 /// It manages an ordered list of Segment objects.
154 /// The Segments are organized in a static single assignment form: At places
155 /// where a new value is defined or different values reach a CFG join a new
156 /// segment with a new value number is used.
157 class LiveRange {
158 public:
159 /// This represents a simple continuous liveness interval for a value.
160 /// The start point is inclusive, the end point exclusive. These intervals
161 /// are rendered as [start,end).
162 struct Segment {
163 SlotIndex start; // Start point of the interval (inclusive)
164 SlotIndex end; // End point of the interval (exclusive)
165 VNInfo *valno = nullptr; // identifier for the value contained in this
166 // segment.
167
168 Segment() = default;
169
170 Segment(SlotIndex S, SlotIndex E, VNInfo *V)
171 : start(S), end(E), valno(V) {
172 assert(S < E && "Cannot create empty or backwards segment");
173 }
174
175 /// Return true if the index is covered by this segment.
176 bool contains(SlotIndex I) const {
177 return start <= I && I < end;
178 }
179
180 /// Return true if the given interval, [S, E), is covered by this segment.
181 bool containsInterval(SlotIndex S, SlotIndex E) const {
182 assert((S < E) && "Backwards interval?");
183 return (start <= S && S < end) && (start < E && E <= end);
184 }
185
186 bool operator<(const Segment &Other) const {
187 return std::tie(args: start, args: end) < std::tie(args: Other.start, args: Other.end);
188 }
189 bool operator==(const Segment &Other) const {
190 return start == Other.start && end == Other.end;
191 }
192
193 bool operator!=(const Segment &Other) const {
194 return !(*this == Other);
195 }
196
197 void dump() const;
198 };
199
200 using Segments = SmallVector<Segment, 2>;
201 using VNInfoList = SmallVector<VNInfo *, 2>;
202
203 Segments segments; // the liveness segments
204 VNInfoList valnos; // value#'s
205
206 // The segment set is used temporarily to accelerate initial computation
207 // of live ranges of physical registers in computeRegUnitRange.
208 // After that the set is flushed to the segment vector and deleted.
209 using SegmentSet = std::set<Segment>;
210 std::unique_ptr<SegmentSet> segmentSet;
211
212 using iterator = Segments::iterator;
213 using const_iterator = Segments::const_iterator;
214
215 iterator begin() { return segments.begin(); }
216 iterator end() { return segments.end(); }
217
218 const_iterator begin() const { return segments.begin(); }
219 const_iterator end() const { return segments.end(); }
220
221 using vni_iterator = VNInfoList::iterator;
222 using const_vni_iterator = VNInfoList::const_iterator;
223
224 vni_iterator vni_begin() { return valnos.begin(); }
225 vni_iterator vni_end() { return valnos.end(); }
226
227 const_vni_iterator vni_begin() const { return valnos.begin(); }
228 const_vni_iterator vni_end() const { return valnos.end(); }
229
230 iterator_range<vni_iterator> vnis() {
231 return make_range(x: vni_begin(), y: vni_end());
232 }
233
234 iterator_range<const_vni_iterator> vnis() const {
235 return make_range(x: vni_begin(), y: vni_end());
236 }
237
238 /// Constructs a new LiveRange object.
239 LiveRange(bool UseSegmentSet = false)
240 : segmentSet(UseSegmentSet ? std::make_unique<SegmentSet>()
241 : nullptr) {}
242
243 /// Constructs a new LiveRange object by copying segments and valnos from
244 /// another LiveRange.
245 LiveRange(const LiveRange &Other, BumpPtrAllocator &Allocator) {
246 assert(Other.segmentSet == nullptr &&
247 "Copying of LiveRanges with active SegmentSets is not supported");
248 assign(Other, Allocator);
249 }
250
251 /// Copies values numbers and live segments from \p Other into this range.
252 void assign(const LiveRange &Other, BumpPtrAllocator &Allocator) {
253 if (this == &Other)
254 return;
255
256 assert(Other.segmentSet == nullptr &&
257 "Copying of LiveRanges with active SegmentSets is not supported");
258 // Duplicate valnos.
259 for (const VNInfo *VNI : Other.valnos)
260 createValueCopy(orig: VNI, VNInfoAllocator&: Allocator);
261 // Now we can copy segments and remap their valnos.
262 for (const Segment &S : Other.segments)
263 segments.push_back(Elt: Segment(S.start, S.end, valnos[S.valno->id]));
264 }
265
266 /// advanceTo - Advance the specified iterator to point to the Segment
267 /// containing the specified position, or end() if the position is past the
268 /// end of the range. If no Segment contains this position, but the
269 /// position is in a hole, this method returns an iterator pointing to the
270 /// Segment immediately after the hole.
271 iterator advanceTo(iterator I, SlotIndex Pos) {
272 assert(I != end());
273 if (Pos >= endIndex())
274 return end();
275 while (I->end <= Pos) ++I;
276 return I;
277 }
278
279 const_iterator advanceTo(const_iterator I, SlotIndex Pos) const {
280 assert(I != end());
281 if (Pos >= endIndex())
282 return end();
283 while (I->end <= Pos) ++I;
284 return I;
285 }
286
287 /// find - Return an iterator pointing to the first segment that ends after
288 /// Pos, or end(). This is the same as advanceTo(begin(), Pos), but faster
289 /// when searching large ranges.
290 ///
291 /// If Pos is contained in a Segment, that segment is returned.
292 /// If Pos is in a hole, the following Segment is returned.
293 /// If Pos is beyond endIndex, end() is returned.
294 iterator find(SlotIndex Pos);
295
296 const_iterator find(SlotIndex Pos) const {
297 return const_cast<LiveRange*>(this)->find(Pos);
298 }
299
300 void clear() {
301 valnos.clear();
302 segments.clear();
303 }
304
305 size_t size() const {
306 return segments.size();
307 }
308
309 bool hasAtLeastOneValue() const { return !valnos.empty(); }
310
311 bool containsOneValue() const { return valnos.size() == 1; }
312
313 unsigned getNumValNums() const { return (unsigned)valnos.size(); }
314
315 /// getValNumInfo - Returns pointer to the specified val#.
316 ///
317 inline VNInfo *getValNumInfo(unsigned ValNo) {
318 return valnos[ValNo];
319 }
320 inline const VNInfo *getValNumInfo(unsigned ValNo) const {
321 return valnos[ValNo];
322 }
323
324 /// containsValue - Returns true if VNI belongs to this range.
325 bool containsValue(const VNInfo *VNI) const {
326 return VNI && VNI->id < getNumValNums() && VNI == getValNumInfo(ValNo: VNI->id);
327 }
328
329 /// getNextValue - Create a new value number and return it.
330 /// @p Def is the index of instruction that defines the value number.
331 VNInfo *getNextValue(SlotIndex Def, VNInfo::Allocator &VNInfoAllocator) {
332 VNInfo *VNI =
333 new (VNInfoAllocator) VNInfo((unsigned)valnos.size(), Def);
334 valnos.push_back(Elt: VNI);
335 return VNI;
336 }
337
338 /// createDeadDef - Make sure the range has a value defined at Def.
339 /// If one already exists, return it. Otherwise allocate a new value and
340 /// add liveness for a dead def.
341 VNInfo *createDeadDef(SlotIndex Def, VNInfo::Allocator &VNIAlloc);
342
343 /// Create a def of value @p VNI. Return @p VNI. If there already exists
344 /// a definition at VNI->def, the value defined there must be @p VNI.
345 VNInfo *createDeadDef(VNInfo *VNI);
346
347 /// Create a copy of the given value. The new value will be identical except
348 /// for the Value number.
349 VNInfo *createValueCopy(const VNInfo *orig,
350 VNInfo::Allocator &VNInfoAllocator) {
351 VNInfo *VNI =
352 new (VNInfoAllocator) VNInfo((unsigned)valnos.size(), *orig);
353 valnos.push_back(Elt: VNI);
354 return VNI;
355 }
356
357 /// RenumberValues - Renumber all values in order of appearance and remove
358 /// unused values.
359 void RenumberValues();
360
361 /// MergeValueNumberInto - This method is called when two value numbers
362 /// are found to be equivalent. This eliminates V1, replacing all
363 /// segments with the V1 value number with the V2 value number. This can
364 /// cause merging of V1/V2 values numbers and compaction of the value space.
365 VNInfo* MergeValueNumberInto(VNInfo *V1, VNInfo *V2);
366
367 /// Merge all of the live segments of a specific val# in RHS into this live
368 /// range as the specified value number. The segments in RHS are allowed
369 /// to overlap with segments in the current range, it will replace the
370 /// value numbers of the overlaped live segments with the specified value
371 /// number.
372 void MergeSegmentsInAsValue(const LiveRange &RHS, VNInfo *LHSValNo);
373
374 /// MergeValueInAsValue - Merge all of the segments of a specific val#
375 /// in RHS into this live range as the specified value number.
376 /// The segments in RHS are allowed to overlap with segments in the
377 /// current range, but only if the overlapping segments have the
378 /// specified value number.
379 void MergeValueInAsValue(const LiveRange &RHS,
380 const VNInfo *RHSValNo, VNInfo *LHSValNo);
381
382 bool empty() const { return segments.empty(); }
383
384 /// beginIndex - Return the lowest numbered slot covered.
385 SlotIndex beginIndex() const {
386 assert(!empty() && "Call to beginIndex() on empty range.");
387 return segments.front().start;
388 }
389
390 /// endNumber - return the maximum point of the range of the whole,
391 /// exclusive.
392 SlotIndex endIndex() const {
393 assert(!empty() && "Call to endIndex() on empty range.");
394 return segments.back().end;
395 }
396
397 bool expiredAt(SlotIndex index) const {
398 return index >= endIndex();
399 }
400
401 bool liveAt(SlotIndex index) const {
402 const_iterator r = find(Pos: index);
403 return r != end() && r->start <= index;
404 }
405
406 /// Return the segment that contains the specified index, or null if there
407 /// is none.
408 const Segment *getSegmentContaining(SlotIndex Idx) const {
409 const_iterator I = FindSegmentContaining(Idx);
410 return I == end() ? nullptr : &*I;
411 }
412
413 /// Return the live segment that contains the specified index, or null if
414 /// there is none.
415 Segment *getSegmentContaining(SlotIndex Idx) {
416 iterator I = FindSegmentContaining(Idx);
417 return I == end() ? nullptr : &*I;
418 }
419
420 /// getVNInfoAt - Return the VNInfo that is live at Idx, or NULL.
421 VNInfo *getVNInfoAt(SlotIndex Idx) const {
422 const_iterator I = FindSegmentContaining(Idx);
423 return I == end() ? nullptr : I->valno;
424 }
425
426 /// getVNInfoBefore - Return the VNInfo that is live up to but not
427 /// necessarilly including Idx, or NULL. Use this to find the reaching def
428 /// used by an instruction at this SlotIndex position.
429 VNInfo *getVNInfoBefore(SlotIndex Idx) const {
430 const_iterator I = FindSegmentContaining(Idx: Idx.getPrevSlot());
431 return I == end() ? nullptr : I->valno;
432 }
433
434 /// Return an iterator to the segment that contains the specified index, or
435 /// end() if there is none.
436 iterator FindSegmentContaining(SlotIndex Idx) {
437 iterator I = find(Pos: Idx);
438 return I != end() && I->start <= Idx ? I : end();
439 }
440
441 const_iterator FindSegmentContaining(SlotIndex Idx) const {
442 const_iterator I = find(Pos: Idx);
443 return I != end() && I->start <= Idx ? I : end();
444 }
445
446 /// overlaps - Return true if the intersection of the two live ranges is
447 /// not empty.
448 bool overlaps(const LiveRange &other) const {
449 if (other.empty())
450 return false;
451 return overlapsFrom(Other: other, StartPos: other.begin());
452 }
453
454 /// overlaps - Return true if the two ranges have overlapping segments
455 /// that are not coalescable according to CP.
456 ///
457 /// Overlapping segments where one range is defined by a coalescable
458 /// copy are allowed.
459 bool overlaps(const LiveRange &Other, const CoalescerPair &CP,
460 const SlotIndexes&) const;
461
462 /// overlaps - Return true if the live range overlaps an interval specified
463 /// by [Start, End).
464 bool overlaps(SlotIndex Start, SlotIndex End) const;
465
466 /// overlapsFrom - Return true if the intersection of the two live ranges
467 /// is not empty. The specified iterator is a hint that we can begin
468 /// scanning the Other range starting at I.
469 bool overlapsFrom(const LiveRange &Other, const_iterator StartPos) const;
470
471 /// Returns true if all segments of the @p Other live range are completely
472 /// covered by this live range.
473 /// Adjacent live ranges do not affect the covering:the liverange
474 /// [1,5](5,10] covers (3,7].
475 bool covers(const LiveRange &Other) const;
476
477 /// Add the specified Segment to this range, merging segments as
478 /// appropriate. This returns an iterator to the inserted segment (which
479 /// may have grown since it was inserted).
480 iterator addSegment(Segment S);
481
482 /// Attempt to extend a value defined after @p StartIdx to include @p Use.
483 /// Both @p StartIdx and @p Use should be in the same basic block. In case
484 /// of subranges, an extension could be prevented by an explicit "undef"
485 /// caused by a <def,read-undef> on a non-overlapping lane. The list of
486 /// location of such "undefs" should be provided in @p Undefs.
487 /// The return value is a pair: the first element is VNInfo of the value
488 /// that was extended (possibly nullptr), the second is a boolean value
489 /// indicating whether an "undef" was encountered.
490 /// If this range is live before @p Use in the basic block that starts at
491 /// @p StartIdx, and there is no intervening "undef", extend it to be live
492 /// up to @p Use, and return the pair {value, false}. If there is no
493 /// segment before @p Use and there is no "undef" between @p StartIdx and
494 /// @p Use, return {nullptr, false}. If there is an "undef" before @p Use,
495 /// return {nullptr, true}.
496 std::pair<VNInfo*,bool> extendInBlock(ArrayRef<SlotIndex> Undefs,
497 SlotIndex StartIdx, SlotIndex Kill);
498
499 /// Simplified version of the above "extendInBlock", which assumes that
500 /// no register lanes are undefined by <def,read-undef> operands.
501 /// If this range is live before @p Use in the basic block that starts
502 /// at @p StartIdx, extend it to be live up to @p Use, and return the
503 /// value. If there is no segment before @p Use, return nullptr.
504 VNInfo *extendInBlock(SlotIndex StartIdx, SlotIndex Kill);
505
506 /// join - Join two live ranges (this, and other) together. This applies
507 /// mappings to the value numbers in the LHS/RHS ranges as specified. If
508 /// the ranges are not joinable, this aborts.
509 void join(LiveRange &Other,
510 const int *ValNoAssignments,
511 const int *RHSValNoAssignments,
512 SmallVectorImpl<VNInfo *> &NewVNInfo);
513
514 /// True iff this segment is a single segment that lies between the
515 /// specified boundaries, exclusively. Vregs live across a backedge are not
516 /// considered local. The boundaries are expected to lie within an extended
517 /// basic block, so vregs that are not live out should contain no holes.
518 bool isLocal(SlotIndex Start, SlotIndex End) const {
519 return beginIndex() > Start.getBaseIndex() &&
520 endIndex() < End.getBoundaryIndex();
521 }
522
523 /// Remove the specified interval from this live range.
524 /// Does nothing if interval is not part of this live range.
525 /// Note that the interval must be within a single Segment in its entirety.
526 void removeSegment(SlotIndex Start, SlotIndex End,
527 bool RemoveDeadValNo = false);
528
529 void removeSegment(Segment S, bool RemoveDeadValNo = false) {
530 removeSegment(Start: S.start, End: S.end, RemoveDeadValNo);
531 }
532
533 /// Remove segment pointed to by iterator @p I from this range.
534 iterator removeSegment(iterator I, bool RemoveDeadValNo = false);
535
536 /// Mark \p ValNo for deletion if no segments in this range use it.
537 void removeValNoIfDead(VNInfo *ValNo);
538
539 /// Query Liveness at Idx.
540 /// The sub-instruction slot of Idx doesn't matter, only the instruction
541 /// it refers to is considered.
542 LiveQueryResult Query(SlotIndex Idx) const {
543 // Find the segment that enters the instruction.
544 const_iterator I = find(Pos: Idx.getBaseIndex());
545 const_iterator E = end();
546 if (I == E)
547 return LiveQueryResult(nullptr, nullptr, SlotIndex(), false);
548
549 // Is this an instruction live-in segment?
550 // If Idx is the start index of a basic block, include live-in segments
551 // that start at Idx.getBaseIndex().
552 VNInfo *EarlyVal = nullptr;
553 VNInfo *LateVal = nullptr;
554 SlotIndex EndPoint;
555 bool Kill = false;
556 if (I->start <= Idx.getBaseIndex()) {
557 EarlyVal = I->valno;
558 EndPoint = I->end;
559 // Move to the potentially live-out segment.
560 if (SlotIndex::isSameInstr(A: Idx, B: I->end)) {
561 Kill = true;
562 if (++I == E)
563 return LiveQueryResult(EarlyVal, LateVal, EndPoint, Kill);
564 }
565 // Special case: A PHIDef value can have its def in the middle of a
566 // segment if the value happens to be live out of the layout
567 // predecessor.
568 // Such a value is not live-in.
569 if (EarlyVal->def == Idx.getBaseIndex())
570 EarlyVal = nullptr;
571 }
572 // I now points to the segment that may be live-through, or defined by
573 // this instr. Ignore segments starting after the current instr.
574 if (!SlotIndex::isEarlierInstr(A: Idx, B: I->start)) {
575 LateVal = I->valno;
576 EndPoint = I->end;
577 }
578 return LiveQueryResult(EarlyVal, LateVal, EndPoint, Kill);
579 }
580
581 /// removeValNo - Remove all the segments defined by the specified value#.
582 /// Also remove the value# from value# list.
583 void removeValNo(VNInfo *ValNo);
584
585 /// Returns true if the live range is zero length, i.e. no live segments
586 /// span instructions. It doesn't pay to spill such a range.
587 bool isZeroLength(SlotIndexes *Indexes) const {
588 for (const Segment &S : segments)
589 if (Indexes->getNextNonNullIndex(Index: S.start).getBaseIndex() <
590 S.end.getBaseIndex())
591 return false;
592 return true;
593 }
594
595 // Returns true if any segment in the live range contains any of the
596 // provided slot indexes. Slots which occur in holes between
597 // segments will not cause the function to return true.
598 bool isLiveAtIndexes(ArrayRef<SlotIndex> Slots) const;
599
600 bool operator<(const LiveRange& other) const {
601 const SlotIndex &thisIndex = beginIndex();
602 const SlotIndex &otherIndex = other.beginIndex();
603 return thisIndex < otherIndex;
604 }
605
606 /// Returns true if there is an explicit "undef" between @p Begin
607 /// @p End.
608 bool isUndefIn(ArrayRef<SlotIndex> Undefs, SlotIndex Begin,
609 SlotIndex End) const {
610 return llvm::any_of(Range&: Undefs, P: [Begin, End](SlotIndex Idx) -> bool {
611 return Begin <= Idx && Idx < End;
612 });
613 }
614
615 /// Flush segment set into the regular segment vector.
616 /// The method is to be called after the live range
617 /// has been created, if use of the segment set was
618 /// activated in the constructor of the live range.
619 void flushSegmentSet();
620
621 /// Stores indexes from the input index sequence R at which this LiveRange
622 /// is live to the output O iterator.
623 /// R is a range of _ascending sorted_ _random_ access iterators
624 /// to the input indexes. Indexes stored at O are ascending sorted so it
625 /// can be used directly in the subsequent search (for example for
626 /// subranges). Returns true if found at least one index.
627 template <typename Range, typename OutputIt>
628 bool findIndexesLiveAt(Range &&R, OutputIt O) const {
629 assert(llvm::is_sorted(R));
630 auto Idx = R.begin(), EndIdx = R.end();
631 auto Seg = segments.begin(), EndSeg = segments.end();
632 bool Found = false;
633 while (Idx != EndIdx && Seg != EndSeg) {
634 // if the Seg is lower find first segment that is above Idx using binary
635 // search
636 if (Seg->end <= *Idx) {
637 Seg =
638 std::upper_bound(++Seg, EndSeg, *Idx, [=](auto V, const auto &S) {
639 return V < S.end;
640 });
641 if (Seg == EndSeg)
642 break;
643 }
644 auto NotLessStart = std::lower_bound(Idx, EndIdx, Seg->start);
645 if (NotLessStart == EndIdx)
646 break;
647 auto NotLessEnd = std::lower_bound(NotLessStart, EndIdx, Seg->end);
648 if (NotLessEnd != NotLessStart) {
649 Found = true;
650 O = std::copy(NotLessStart, NotLessEnd, O);
651 }
652 Idx = NotLessEnd;
653 ++Seg;
654 }
655 return Found;
656 }
657
658 void print(raw_ostream &OS) const;
659 void dump() const;
660
661 /// Walk the range and assert if any invariants fail to hold.
662 ///
663 /// Note that this is a no-op when asserts are disabled.
664#ifdef NDEBUG
665 void verify() const {}
666#else
667 void verify() const;
668#endif
669
670 protected:
671 /// Append a segment to the list of segments.
672 void append(const LiveRange::Segment S);
673
674 private:
675 friend class LiveRangeUpdater;
676 void addSegmentToSet(Segment S);
677 void markValNoForDeletion(VNInfo *V);
678 };
679
680 inline raw_ostream &operator<<(raw_ostream &OS, const LiveRange &LR) {
681 LR.print(OS);
682 return OS;
683 }
684
685 /// LiveInterval - This class represents the liveness of a register,
686 /// or stack slot.
687 class LiveInterval : public LiveRange {
688 public:
689 using super = LiveRange;
690
691 /// A live range for subregisters. The LaneMask specifies which parts of the
692 /// super register are covered by the interval.
693 /// (@sa TargetRegisterInfo::getSubRegIndexLaneMask()).
694 class SubRange : public LiveRange {
695 public:
696 SubRange *Next = nullptr;
697 LaneBitmask LaneMask;
698
699 /// Constructs a new SubRange object.
700 SubRange(LaneBitmask LaneMask) : LaneMask(LaneMask) {}
701
702 /// Constructs a new SubRange object by copying liveness from @p Other.
703 SubRange(LaneBitmask LaneMask, const LiveRange &Other,
704 BumpPtrAllocator &Allocator)
705 : LiveRange(Other, Allocator), LaneMask(LaneMask) {}
706
707 void print(raw_ostream &OS) const;
708 void dump() const;
709 };
710
711 private:
712 SubRange *SubRanges = nullptr; ///< Single linked list of subregister live
713 /// ranges.
714 const Register Reg; // the register or stack slot of this interval.
715 float Weight = 0.0; // weight of this interval
716
717 public:
718 Register reg() const { return Reg; }
719 float weight() const { return Weight; }
720 void incrementWeight(float Inc) { Weight += Inc; }
721 void setWeight(float Value) { Weight = Value; }
722
723 LiveInterval(unsigned Reg, float Weight) : Reg(Reg), Weight(Weight) {}
724
725 ~LiveInterval() {
726 clearSubRanges();
727 }
728
729 template<typename T>
730 class SingleLinkedListIterator {
731 T *P;
732
733 public:
734 using difference_type = ptrdiff_t;
735 using value_type = T;
736 using pointer = T *;
737 using reference = T &;
738 using iterator_category = std::forward_iterator_tag;
739
740 SingleLinkedListIterator(T *P) : P(P) {}
741
742 SingleLinkedListIterator<T> &operator++() {
743 P = P->Next;
744 return *this;
745 }
746 SingleLinkedListIterator<T> operator++(int) {
747 SingleLinkedListIterator res = *this;
748 ++*this;
749 return res;
750 }
751 bool operator!=(const SingleLinkedListIterator<T> &Other) const {
752 return P != Other.operator->();
753 }
754 bool operator==(const SingleLinkedListIterator<T> &Other) const {
755 return P == Other.operator->();
756 }
757 T &operator*() const {
758 return *P;
759 }
760 T *operator->() const {
761 return P;
762 }
763 };
764
765 using subrange_iterator = SingleLinkedListIterator<SubRange>;
766 using const_subrange_iterator = SingleLinkedListIterator<const SubRange>;
767
768 subrange_iterator subrange_begin() {
769 return subrange_iterator(SubRanges);
770 }
771 subrange_iterator subrange_end() {
772 return subrange_iterator(nullptr);
773 }
774
775 const_subrange_iterator subrange_begin() const {
776 return const_subrange_iterator(SubRanges);
777 }
778 const_subrange_iterator subrange_end() const {
779 return const_subrange_iterator(nullptr);
780 }
781
782 iterator_range<subrange_iterator> subranges() {
783 return make_range(x: subrange_begin(), y: subrange_end());
784 }
785
786 iterator_range<const_subrange_iterator> subranges() const {
787 return make_range(x: subrange_begin(), y: subrange_end());
788 }
789
790 /// Creates a new empty subregister live range. The range is added at the
791 /// beginning of the subrange list; subrange iterators stay valid.
792 SubRange *createSubRange(BumpPtrAllocator &Allocator,
793 LaneBitmask LaneMask) {
794 SubRange *Range = new (Allocator) SubRange(LaneMask);
795 appendSubRange(Range);
796 return Range;
797 }
798
799 /// Like createSubRange() but the new range is filled with a copy of the
800 /// liveness information in @p CopyFrom.
801 SubRange *createSubRangeFrom(BumpPtrAllocator &Allocator,
802 LaneBitmask LaneMask,
803 const LiveRange &CopyFrom) {
804 SubRange *Range = new (Allocator) SubRange(LaneMask, CopyFrom, Allocator);
805 appendSubRange(Range);
806 return Range;
807 }
808
809 /// Returns true if subregister liveness information is available.
810 bool hasSubRanges() const {
811 return SubRanges != nullptr;
812 }
813
814 /// Removes all subregister liveness information.
815 void clearSubRanges();
816
817 /// Removes all subranges without any segments (subranges without segments
818 /// are not considered valid and should only exist temporarily).
819 void removeEmptySubRanges();
820
821 /// getSize - Returns the sum of sizes of all the LiveRange's.
822 ///
823 unsigned getSize() const;
824
825 /// isSpillable - Can this interval be spilled?
826 bool isSpillable() const { return Weight != huge_valf; }
827
828 /// markNotSpillable - Mark interval as not spillable
829 void markNotSpillable() { Weight = huge_valf; }
830
831 /// For a given lane mask @p LaneMask, compute indexes at which the
832 /// lane is marked undefined by subregister <def,read-undef> definitions.
833 void computeSubRangeUndefs(SmallVectorImpl<SlotIndex> &Undefs,
834 LaneBitmask LaneMask,
835 const MachineRegisterInfo &MRI,
836 const SlotIndexes &Indexes) const;
837
838 /// Refines the subranges to support \p LaneMask. This may only be called
839 /// for LI.hasSubrange()==true. Subregister ranges are split or created
840 /// until \p LaneMask can be matched exactly. \p Mod is executed on the
841 /// matching subranges.
842 ///
843 /// Example:
844 /// Given an interval with subranges with lanemasks L0F00, L00F0 and
845 /// L000F, refining for mask L0018. Will split the L00F0 lane into
846 /// L00E0 and L0010 and the L000F lane into L0007 and L0008. The Mod
847 /// function will be applied to the L0010 and L0008 subranges.
848 ///
849 /// \p Indexes and \p TRI are required to clean up the VNIs that
850 /// don't define the related lane masks after they get shrunk. E.g.,
851 /// when L000F gets split into L0007 and L0008 maybe only a subset
852 /// of the VNIs that defined L000F defines L0007.
853 ///
854 /// The clean up of the VNIs need to look at the actual instructions
855 /// to decide what is or is not live at a definition point. If the
856 /// update of the subranges occurs while the IR does not reflect these
857 /// changes, \p ComposeSubRegIdx can be used to specify how the
858 /// definition are going to be rewritten.
859 /// E.g., let say we want to merge:
860 /// V1.sub1:<2 x s32> = COPY V2.sub3:<4 x s32>
861 /// We do that by choosing a class where sub1:<2 x s32> and sub3:<4 x s32>
862 /// overlap, i.e., by choosing a class where we can find "offset + 1 == 3".
863 /// Put differently we align V2's sub3 with V1's sub1:
864 /// V2: sub0 sub1 sub2 sub3
865 /// V1: <offset> sub0 sub1
866 ///
867 /// This offset will look like a composed subregidx in the class:
868 /// V1.(composed sub2 with sub1):<4 x s32> = COPY V2.sub3:<4 x s32>
869 /// => V1.(composed sub2 with sub1):<4 x s32> = COPY V2.sub3:<4 x s32>
870 ///
871 /// Now if we didn't rewrite the uses and def of V1, all the checks for V1
872 /// need to account for this offset.
873 /// This happens during coalescing where we update the live-ranges while
874 /// still having the old IR around because updating the IR on-the-fly
875 /// would actually clobber some information on how the live-ranges that
876 /// are being updated look like.
877 void refineSubRanges(BumpPtrAllocator &Allocator, LaneBitmask LaneMask,
878 std::function<void(LiveInterval::SubRange &)> Apply,
879 const SlotIndexes &Indexes,
880 const TargetRegisterInfo &TRI,
881 unsigned ComposeSubRegIdx = 0);
882
883 bool operator<(const LiveInterval& other) const {
884 const SlotIndex &thisIndex = beginIndex();
885 const SlotIndex &otherIndex = other.beginIndex();
886 return std::tie(args: thisIndex, args: Reg) < std::tie(args: otherIndex, args: other.Reg);
887 }
888
889 void print(raw_ostream &OS) const;
890 void dump() const;
891
892 /// Walks the interval and assert if any invariants fail to hold.
893 ///
894 /// Note that this is a no-op when asserts are disabled.
895#ifdef NDEBUG
896 void verify(const MachineRegisterInfo *MRI = nullptr) const {}
897#else
898 void verify(const MachineRegisterInfo *MRI = nullptr) const;
899#endif
900
901 private:
902 /// Appends @p Range to SubRanges list.
903 void appendSubRange(SubRange *Range) {
904 Range->Next = SubRanges;
905 SubRanges = Range;
906 }
907
908 /// Free memory held by SubRange.
909 void freeSubRange(SubRange *S);
910 };
911
912 inline raw_ostream &operator<<(raw_ostream &OS,
913 const LiveInterval::SubRange &SR) {
914 SR.print(OS);
915 return OS;
916 }
917
918 inline raw_ostream &operator<<(raw_ostream &OS, const LiveInterval &LI) {
919 LI.print(OS);
920 return OS;
921 }
922
923 raw_ostream &operator<<(raw_ostream &OS, const LiveRange::Segment &S);
924
925 inline bool operator<(SlotIndex V, const LiveRange::Segment &S) {
926 return V < S.start;
927 }
928
929 inline bool operator<(const LiveRange::Segment &S, SlotIndex V) {
930 return S.start < V;
931 }
932
933 /// Helper class for performant LiveRange bulk updates.
934 ///
935 /// Calling LiveRange::addSegment() repeatedly can be expensive on large
936 /// live ranges because segments after the insertion point may need to be
937 /// shifted. The LiveRangeUpdater class can defer the shifting when adding
938 /// many segments in order.
939 ///
940 /// The LiveRange will be in an invalid state until flush() is called.
941 class LiveRangeUpdater {
942 LiveRange *LR;
943 SlotIndex LastStart;
944 LiveRange::iterator WriteI;
945 LiveRange::iterator ReadI;
946 SmallVector<LiveRange::Segment, 16> Spills;
947 void mergeSpills();
948
949 public:
950 /// Create a LiveRangeUpdater for adding segments to LR.
951 /// LR will temporarily be in an invalid state until flush() is called.
952 LiveRangeUpdater(LiveRange *lr = nullptr) : LR(lr) {}
953
954 ~LiveRangeUpdater() { flush(); }
955
956 /// Add a segment to LR and coalesce when possible, just like
957 /// LR.addSegment(). Segments should be added in increasing start order for
958 /// best performance.
959 void add(LiveRange::Segment);
960
961 void add(SlotIndex Start, SlotIndex End, VNInfo *VNI) {
962 add(LiveRange::Segment(Start, End, VNI));
963 }
964
965 /// Return true if the LR is currently in an invalid state, and flush()
966 /// needs to be called.
967 bool isDirty() const { return LastStart.isValid(); }
968
969 /// Flush the updater state to LR so it is valid and contains all added
970 /// segments.
971 void flush();
972
973 /// Select a different destination live range.
974 void setDest(LiveRange *lr) {
975 if (LR != lr && isDirty())
976 flush();
977 LR = lr;
978 }
979
980 /// Get the current destination live range.
981 LiveRange *getDest() const { return LR; }
982
983 void dump() const;
984 void print(raw_ostream&) const;
985 };
986
987 inline raw_ostream &operator<<(raw_ostream &OS, const LiveRangeUpdater &X) {
988 X.print(OS);
989 return OS;
990 }
991
992 /// ConnectedVNInfoEqClasses - Helper class that can divide VNInfos in a
993 /// LiveInterval into equivalence clases of connected components. A
994 /// LiveInterval that has multiple connected components can be broken into
995 /// multiple LiveIntervals.
996 ///
997 /// Given a LiveInterval that may have multiple connected components, run:
998 ///
999 /// unsigned numComps = ConEQ.Classify(LI);
1000 /// if (numComps > 1) {
1001 /// // allocate numComps-1 new LiveIntervals into LIS[1..]
1002 /// ConEQ.Distribute(LIS);
1003 /// }
1004
1005 class ConnectedVNInfoEqClasses {
1006 LiveIntervals &LIS;
1007 IntEqClasses EqClass;
1008
1009 public:
1010 explicit ConnectedVNInfoEqClasses(LiveIntervals &lis) : LIS(lis) {}
1011
1012 /// Classify the values in \p LR into connected components.
1013 /// Returns the number of connected components.
1014 unsigned Classify(const LiveRange &LR);
1015
1016 /// getEqClass - Classify creates equivalence classes numbered 0..N. Return
1017 /// the equivalence class assigned the VNI.
1018 unsigned getEqClass(const VNInfo *VNI) const { return EqClass[VNI->id]; }
1019
1020 /// Distribute values in \p LI into a separate LiveIntervals
1021 /// for each connected component. LIV must have an empty LiveInterval for
1022 /// each additional connected component. The first connected component is
1023 /// left in \p LI.
1024 void Distribute(LiveInterval &LI, LiveInterval *LIV[],
1025 MachineRegisterInfo &MRI);
1026 };
1027
1028} // end namespace llvm
1029
1030#endif // LLVM_CODEGEN_LIVEINTERVAL_H
1031

source code of llvm/include/llvm/CodeGen/LiveInterval.h