LLVM 21.0.0git
DependencyGraph.cpp
Go to the documentation of this file.
1//===- DependencyGraph.cpp ------------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
10#include "llvm/ADT/ArrayRef.h"
14
15namespace llvm::sandboxir {
16
17User::op_iterator PredIterator::skipBadIt(User::op_iterator OpIt,
19 const DependencyGraph &DAG) {
20 auto Skip = [&DAG](auto OpIt) {
21 auto *I = dyn_cast<Instruction>((*OpIt).get());
22 return I == nullptr || DAG.getNode(I) == nullptr;
23 };
24 while (OpIt != OpItE && Skip(OpIt))
25 ++OpIt;
26 return OpIt;
27}
28
30 // If it's a DGNode then we dereference the operand iterator.
31 if (!isa<MemDGNode>(N)) {
32 assert(OpIt != OpItE && "Can't dereference end iterator!");
33 return DAG->getNode(cast<Instruction>((Value *)*OpIt));
34 }
35 // It's a MemDGNode, so we check if we return either the use-def operand,
36 // or a mem predecessor.
37 if (OpIt != OpItE)
38 return DAG->getNode(cast<Instruction>((Value *)*OpIt));
39 // It's a MemDGNode with OpIt == end, so we need to use MemIt.
40 assert(MemIt != cast<MemDGNode>(N)->MemPreds.end() &&
41 "Cant' dereference end iterator!");
42 return *MemIt;
43}
44
46 // If it's a DGNode then we increment the use-def iterator.
47 if (!isa<MemDGNode>(N)) {
48 assert(OpIt != OpItE && "Already at end!");
49 ++OpIt;
50 // Skip operands that are not instructions or are outside the DAG.
51 OpIt = PredIterator::skipBadIt(OpIt, OpItE, *DAG);
52 return *this;
53 }
54 // It's a MemDGNode, so if we are not at the end of the use-def iterator we
55 // need to first increment that.
56 if (OpIt != OpItE) {
57 ++OpIt;
58 // Skip operands that are not instructions or are outside the DAG.
59 OpIt = PredIterator::skipBadIt(OpIt, OpItE, *DAG);
60 return *this;
61 }
62 // It's a MemDGNode with OpIt == end, so we need to increment MemIt.
63 assert(MemIt != cast<MemDGNode>(N)->MemPreds.end() && "Already at end!");
64 ++MemIt;
65 return *this;
66}
67
69 assert(DAG == Other.DAG && "Iterators of different DAGs!");
70 assert(N == Other.N && "Iterators of different nodes!");
71 return OpIt == Other.OpIt && MemIt == Other.MemIt;
72}
73
75 if (SB == nullptr)
76 return;
77 SB->eraseFromBundle(this);
78}
79
80#ifndef NDEBUG
81void DGNode::print(raw_ostream &OS, bool PrintDeps) const {
82 OS << *I << " USuccs:" << UnscheduledSuccs << " Sched:" << Scheduled << "\n";
83}
84void DGNode::dump() const { print(dbgs()); }
85void MemDGNode::print(raw_ostream &OS, bool PrintDeps) const {
86 DGNode::print(OS, false);
87 if (PrintDeps) {
88 // Print memory preds.
89 static constexpr const unsigned Indent = 4;
90 for (auto *Pred : MemPreds)
91 OS.indent(Indent) << "<-" << *Pred->getInstruction() << "\n";
92 }
93}
94#endif // NDEBUG
95
98 const DependencyGraph &DAG) {
99 Instruction *I = Intvl.top();
100 Instruction *BeforeI = Intvl.bottom();
101 // Walk down the chain looking for a mem-dep candidate instruction.
102 while (!DGNode::isMemDepNodeCandidate(I) && I != BeforeI)
103 I = I->getNextNode();
105 return nullptr;
106 return cast<MemDGNode>(DAG.getNode(I));
107}
108
109MemDGNode *
111 const DependencyGraph &DAG) {
112 Instruction *I = Intvl.bottom();
113 Instruction *AfterI = Intvl.top();
114 // Walk up the chain looking for a mem-dep candidate instruction.
115 while (!DGNode::isMemDepNodeCandidate(I) && I != AfterI)
116 I = I->getPrevNode();
118 return nullptr;
119 return cast<MemDGNode>(DAG.getNode(I));
120}
121
124 DependencyGraph &DAG) {
125 if (Instrs.empty())
126 return {};
127 auto *TopMemN = getTopMemDGNode(Instrs, DAG);
128 // If we couldn't find a mem node in range TopN - BotN then it's empty.
129 if (TopMemN == nullptr)
130 return {};
131 auto *BotMemN = getBotMemDGNode(Instrs, DAG);
132 assert(BotMemN != nullptr && "TopMemN should be null too!");
133 // Now that we have the mem-dep nodes, create and return the range.
134 return Interval<MemDGNode>(TopMemN, BotMemN);
135}
136
137DependencyGraph::DependencyType
138DependencyGraph::getRoughDepType(Instruction *FromI, Instruction *ToI) {
139 // TODO: Perhaps compile-time improvement by skipping if neither is mem?
140 if (FromI->mayWriteToMemory()) {
141 if (ToI->mayReadFromMemory())
142 return DependencyType::ReadAfterWrite;
143 if (ToI->mayWriteToMemory())
144 return DependencyType::WriteAfterWrite;
145 } else if (FromI->mayReadFromMemory()) {
146 if (ToI->mayWriteToMemory())
147 return DependencyType::WriteAfterRead;
148 }
149 if (isa<sandboxir::PHINode>(FromI) || isa<sandboxir::PHINode>(ToI))
150 return DependencyType::Control;
151 if (ToI->isTerminator())
152 return DependencyType::Control;
155 return DependencyType::Other;
156 return DependencyType::None;
157}
158
159static bool isOrdered(Instruction *I) {
160 auto IsOrdered = [](Instruction *I) {
161 if (auto *LI = dyn_cast<LoadInst>(I))
162 return !LI->isUnordered();
163 if (auto *SI = dyn_cast<StoreInst>(I))
164 return !SI->isUnordered();
166 return true;
167 return false;
168 };
169 bool Is = IsOrdered(I);
171 "An ordered instruction must be a MemDepCandidate!");
172 return Is;
173}
174
175bool DependencyGraph::alias(Instruction *SrcI, Instruction *DstI,
176 DependencyType DepType) {
177 std::optional<MemoryLocation> DstLocOpt =
179 if (!DstLocOpt)
180 return true;
181 // Check aliasing.
182 assert((SrcI->mayReadFromMemory() || SrcI->mayWriteToMemory()) &&
183 "Expected a mem instr");
184 // TODO: Check AABudget
185 ModRefInfo SrcModRef =
186 isOrdered(SrcI)
188 : Utils::aliasAnalysisGetModRefInfo(*BatchAA, SrcI, *DstLocOpt);
189 switch (DepType) {
190 case DependencyType::ReadAfterWrite:
191 case DependencyType::WriteAfterWrite:
192 return isModSet(SrcModRef);
193 case DependencyType::WriteAfterRead:
194 return isRefSet(SrcModRef);
195 default:
196 llvm_unreachable("Expected only RAW, WAW and WAR!");
197 }
198}
199
200bool DependencyGraph::hasDep(Instruction *SrcI, Instruction *DstI) {
201 DependencyType RoughDepType = getRoughDepType(SrcI, DstI);
202 switch (RoughDepType) {
203 case DependencyType::ReadAfterWrite:
204 case DependencyType::WriteAfterWrite:
205 case DependencyType::WriteAfterRead:
206 return alias(SrcI, DstI, RoughDepType);
207 case DependencyType::Control:
208 // Adding actual dep edges from PHIs/to terminator would just create too
209 // many edges, which would be bad for compile-time.
210 // So we ignore them in the DAG formation but handle them in the
211 // scheduler, while sorting the ready list.
212 return false;
213 case DependencyType::Other:
214 return true;
215 case DependencyType::None:
216 return false;
217 }
218 llvm_unreachable("Unknown DependencyType enum");
219}
220
221void DependencyGraph::scanAndAddDeps(MemDGNode &DstN,
222 const Interval<MemDGNode> &SrcScanRange) {
223 assert(isa<MemDGNode>(DstN) &&
224 "DstN is the mem dep destination, so it must be mem");
225 Instruction *DstI = DstN.getInstruction();
226 // Walk up the instruction chain from ScanRange bottom to top, looking for
227 // memory instrs that may alias.
228 for (MemDGNode &SrcN : reverse(SrcScanRange)) {
229 Instruction *SrcI = SrcN.getInstruction();
230 if (hasDep(SrcI, DstI))
231 DstN.addMemPred(&SrcN);
232 }
233}
234
235void DependencyGraph::setDefUseUnscheduledSuccs(
236 const Interval<Instruction> &NewInterval) {
237 // +---+
238 // | | Def
239 // | | |
240 // | | v
241 // | | Use
242 // +---+
243 // Set the intra-interval counters in NewInterval.
244 for (Instruction &I : NewInterval) {
245 for (Value *Op : I.operands()) {
246 auto *OpI = dyn_cast<Instruction>(Op);
247 if (OpI == nullptr)
248 continue;
249 // TODO: For now don't cross BBs.
250 if (OpI->getParent() != I.getParent())
251 continue;
252 if (!NewInterval.contains(OpI))
253 continue;
254 auto *OpN = getNode(OpI);
255 if (OpN == nullptr)
256 continue;
257 ++OpN->UnscheduledSuccs;
258 }
259 }
260
261 // Now handle the cross-interval edges.
262 bool NewIsAbove = DAGInterval.empty() || NewInterval.comesBefore(DAGInterval);
263 const auto &TopInterval = NewIsAbove ? NewInterval : DAGInterval;
264 const auto &BotInterval = NewIsAbove ? DAGInterval : NewInterval;
265 // +---+
266 // |Top|
267 // | | Def
268 // +---+ |
269 // | | v
270 // |Bot| Use
271 // | |
272 // +---+
273 // Walk over all instructions in "BotInterval" and update the counter
274 // of operands that are in "TopInterval".
275 for (Instruction &BotI : BotInterval) {
276 auto *BotN = getNode(&BotI);
277 // Skip scheduled nodes.
278 if (BotN->scheduled())
279 continue;
280 for (Value *Op : BotI.operands()) {
281 auto *OpI = dyn_cast<Instruction>(Op);
282 if (OpI == nullptr)
283 continue;
284 auto *OpN = getNode(OpI);
285 if (OpN == nullptr)
286 continue;
287 if (!TopInterval.contains(OpI))
288 continue;
289 ++OpN->UnscheduledSuccs;
290 }
291 }
292}
293
294void DependencyGraph::createNewNodes(const Interval<Instruction> &NewInterval) {
295 // Create Nodes only for the new sections of the DAG.
296 DGNode *LastN = getOrCreateNode(NewInterval.top());
297 MemDGNode *LastMemN = dyn_cast<MemDGNode>(LastN);
298 for (Instruction &I : drop_begin(NewInterval)) {
299 auto *N = getOrCreateNode(&I);
300 // Build the Mem node chain.
301 if (auto *MemN = dyn_cast<MemDGNode>(N)) {
302 MemN->setPrevNode(LastMemN);
303 LastMemN = MemN;
304 }
305 }
306 // Link new MemDGNode chain with the old one, if any.
307 if (!DAGInterval.empty()) {
308 bool NewIsAbove = NewInterval.comesBefore(DAGInterval);
309 const auto &TopInterval = NewIsAbove ? NewInterval : DAGInterval;
310 const auto &BotInterval = NewIsAbove ? DAGInterval : NewInterval;
311 MemDGNode *LinkTopN =
313 MemDGNode *LinkBotN =
315 assert((LinkTopN == nullptr || LinkBotN == nullptr ||
316 LinkTopN->comesBefore(LinkBotN)) &&
317 "Wrong order!");
318 if (LinkTopN != nullptr && LinkBotN != nullptr) {
319 LinkTopN->setNextNode(LinkBotN);
320 }
321#ifndef NDEBUG
322 // TODO: Remove this once we've done enough testing.
323 // Check that the chain is well formed.
324 auto UnionIntvl = DAGInterval.getUnionInterval(NewInterval);
325 MemDGNode *ChainTopN =
327 MemDGNode *ChainBotN =
329 if (ChainTopN != nullptr && ChainBotN != nullptr) {
330 for (auto *N = ChainTopN->getNextNode(), *LastN = ChainTopN; N != nullptr;
331 LastN = N, N = N->getNextNode()) {
332 assert(N == LastN->getNextNode() && "Bad chain!");
333 assert(N->getPrevNode() == LastN && "Bad chain!");
334 }
335 }
336#endif // NDEBUG
337 }
338
339 setDefUseUnscheduledSuccs(NewInterval);
340}
341
342MemDGNode *DependencyGraph::getMemDGNodeBefore(DGNode *N, bool IncludingN,
343 MemDGNode *SkipN) const {
344 auto *I = N->getInstruction();
345 for (auto *PrevI = IncludingN ? I : I->getPrevNode(); PrevI != nullptr;
346 PrevI = PrevI->getPrevNode()) {
347 auto *PrevN = getNodeOrNull(PrevI);
348 if (PrevN == nullptr)
349 return nullptr;
350 auto *PrevMemN = dyn_cast<MemDGNode>(PrevN);
351 if (PrevMemN != nullptr && PrevMemN != SkipN)
352 return PrevMemN;
353 }
354 return nullptr;
355}
356
357MemDGNode *DependencyGraph::getMemDGNodeAfter(DGNode *N, bool IncludingN,
358 MemDGNode *SkipN) const {
359 auto *I = N->getInstruction();
360 for (auto *NextI = IncludingN ? I : I->getNextNode(); NextI != nullptr;
361 NextI = NextI->getNextNode()) {
362 auto *NextN = getNodeOrNull(NextI);
363 if (NextN == nullptr)
364 return nullptr;
365 auto *NextMemN = dyn_cast<MemDGNode>(NextN);
366 if (NextMemN != nullptr && NextMemN != SkipN)
367 return NextMemN;
368 }
369 return nullptr;
370}
371
372void DependencyGraph::notifyCreateInstr(Instruction *I) {
374 // We don't maintain the DAG while reverting.
375 return;
376 // Nothing to do if the node is not in the focus range of the DAG.
377 if (!(DAGInterval.contains(I) || DAGInterval.touches(I)))
378 return;
379 // Include `I` into the interval.
380 DAGInterval = DAGInterval.getUnionInterval({I, I});
381 auto *N = getOrCreateNode(I);
382 auto *MemN = dyn_cast<MemDGNode>(N);
383
384 // Update the MemDGNode chain if this is a memory node.
385 if (MemN != nullptr) {
386 if (auto *PrevMemN = getMemDGNodeBefore(MemN, /*IncludingN=*/false)) {
387 PrevMemN->NextMemN = MemN;
388 MemN->PrevMemN = PrevMemN;
389 }
390 if (auto *NextMemN = getMemDGNodeAfter(MemN, /*IncludingN=*/false)) {
391 NextMemN->PrevMemN = MemN;
392 MemN->NextMemN = NextMemN;
393 }
394
395 // Add Mem dependencies.
396 // 1. Scan for deps above `I` for deps to `I`: AboveN->MemN.
397 if (DAGInterval.top()->comesBefore(I)) {
398 Interval<Instruction> AboveIntvl(DAGInterval.top(), I->getPrevNode());
399 auto SrcInterval = MemDGNodeIntervalBuilder::make(AboveIntvl, *this);
400 scanAndAddDeps(*MemN, SrcInterval);
401 }
402 // 2. Scan for deps below `I` for deps from `I`: MemN->BelowN.
403 if (I->comesBefore(DAGInterval.bottom())) {
404 Interval<Instruction> BelowIntvl(I->getNextNode(), DAGInterval.bottom());
405 for (MemDGNode &BelowN :
406 MemDGNodeIntervalBuilder::make(BelowIntvl, *this))
407 scanAndAddDeps(BelowN, Interval<MemDGNode>(MemN, MemN));
408 }
409 }
410}
411
412void DependencyGraph::notifyMoveInstr(Instruction *I, const BBIterator &To) {
414 // We don't maintain the DAG while reverting.
415 return;
416 // NOTE: This function runs before `I` moves to its new destination.
417 BasicBlock *BB = To.getNodeParent();
418 assert(!(To != BB->end() && &*To == I->getNextNode()) &&
419 !(To == BB->end() && std::next(I->getIterator()) == BB->end()) &&
420 "Should not have been called if destination is same as origin.");
421
422 // TODO: We can only handle fully internal movements within DAGInterval or at
423 // the borders, i.e., right before the top or right after the bottom.
424 assert(To.getNodeParent() == I->getParent() &&
425 "TODO: We don't support movement across BBs!");
426 assert(
427 (To == std::next(DAGInterval.bottom()->getIterator()) ||
428 (To != BB->end() && std::next(To) == DAGInterval.top()->getIterator()) ||
429 (To != BB->end() && DAGInterval.contains(&*To))) &&
430 "TODO: To should be either within the DAGInterval or right "
431 "before/after it.");
432
433 // Make a copy of the DAGInterval before we update it.
434 auto OrigDAGInterval = DAGInterval;
435
436 // Maintain the DAGInterval.
437 DAGInterval.notifyMoveInstr(I, To);
438
439 // TODO: Perhaps check if this is legal by checking the dependencies?
440
441 // Update the MemDGNode chain to reflect the instr movement if necessary.
443 if (N == nullptr)
444 return;
445 MemDGNode *MemN = dyn_cast<MemDGNode>(N);
446 if (MemN == nullptr)
447 return;
448
449 // First safely detach it from the existing chain.
450 MemN->detachFromChain();
451
452 // Now insert it back into the chain at the new location.
453 //
454 // We won't always have a DGNode to insert before it. If `To` is BB->end() or
455 // if it points to an instr after DAGInterval.bottom() then we will have to
456 // find a node to insert *after*.
457 //
458 // BB: BB:
459 // I1 I1 ^
460 // I2 I2 | DAGInteval [I1 to I3]
461 // I3 I3 V
462 // I4 I4 <- `To` == right after DAGInterval
463 // <- `To` == BB->end()
464 //
465 if (To == BB->end() ||
466 To == std::next(OrigDAGInterval.bottom()->getIterator())) {
467 // If we don't have a node to insert before, find a node to insert after and
468 // update the chain.
469 DGNode *InsertAfterN = getNode(&*std::prev(To));
470 MemN->setPrevNode(
471 getMemDGNodeBefore(InsertAfterN, /*IncludingN=*/true, /*SkipN=*/MemN));
472 } else {
473 // We have a node to insert before, so update the chain.
474 DGNode *BeforeToN = getNode(&*To);
475 MemN->setPrevNode(
476 getMemDGNodeBefore(BeforeToN, /*IncludingN=*/false, /*SkipN=*/MemN));
477 MemN->setNextNode(
478 getMemDGNodeAfter(BeforeToN, /*IncludingN=*/true, /*SkipN=*/MemN));
479 }
480}
481
482void DependencyGraph::notifyEraseInstr(Instruction *I) {
484 // We don't maintain the DAG while reverting.
485 return;
486 // Update the MemDGNode chain if this is a memory node.
487 if (auto *MemN = dyn_cast_or_null<MemDGNode>(getNodeOrNull(I))) {
488 auto *PrevMemN = getMemDGNodeBefore(MemN, /*IncludingN=*/false);
489 auto *NextMemN = getMemDGNodeAfter(MemN, /*IncludingN=*/false);
490 if (PrevMemN != nullptr)
491 PrevMemN->NextMemN = NextMemN;
492 if (NextMemN != nullptr)
493 NextMemN->PrevMemN = PrevMemN;
494 }
495
496 InstrToNodeMap.erase(I);
497
498 // TODO: Update the dependencies.
499}
500
502 if (Instrs.empty())
503 return {};
504
505 Interval<Instruction> InstrsInterval(Instrs);
506 Interval<Instruction> Union = DAGInterval.getUnionInterval(InstrsInterval);
507 auto NewInterval = Union.getSingleDiff(DAGInterval);
508 if (NewInterval.empty())
509 return {};
510
511 createNewNodes(NewInterval);
512
513 // Create the dependencies.
514 //
515 // 1. This is a new DAG, DAGInterval is empty. Fully scan the whole interval.
516 // +---+ - -
517 // | | SrcN | |
518 // | | | | SrcRange |
519 // |New| v | | DstRange
520 // | | DstN - |
521 // | | |
522 // +---+ -
523 // We are scanning for deps with destination in NewInterval and sources in
524 // NewInterval until DstN, for each DstN.
525 auto FullScan = [this](const Interval<Instruction> Intvl) {
526 auto DstRange = MemDGNodeIntervalBuilder::make(Intvl, *this);
527 if (!DstRange.empty()) {
528 for (MemDGNode &DstN : drop_begin(DstRange)) {
529 auto SrcRange = Interval<MemDGNode>(DstRange.top(), DstN.getPrevNode());
530 scanAndAddDeps(DstN, SrcRange);
531 }
532 }
533 };
534 auto MemDAGInterval = MemDGNodeIntervalBuilder::make(DAGInterval, *this);
535 if (MemDAGInterval.empty()) {
536 FullScan(NewInterval);
537 }
538 // 2. The new section is below the old section.
539 // +---+ -
540 // | | |
541 // |Old| SrcN |
542 // | | | |
543 // +---+ | | SrcRange
544 // +---+ | | -
545 // | | | | |
546 // |New| v | | DstRange
547 // | | DstN - |
548 // | | |
549 // +---+ -
550 // We are scanning for deps with destination in NewInterval because the deps
551 // in DAGInterval have already been computed. We consider sources in the whole
552 // range including both NewInterval and DAGInterval until DstN, for each DstN.
553 else if (DAGInterval.bottom()->comesBefore(NewInterval.top())) {
554 auto DstRange = MemDGNodeIntervalBuilder::make(NewInterval, *this);
555 auto SrcRangeFull = MemDAGInterval.getUnionInterval(DstRange);
556 for (MemDGNode &DstN : DstRange) {
557 auto SrcRange =
558 Interval<MemDGNode>(SrcRangeFull.top(), DstN.getPrevNode());
559 scanAndAddDeps(DstN, SrcRange);
560 }
561 }
562 // 3. The new section is above the old section.
563 else if (NewInterval.bottom()->comesBefore(DAGInterval.top())) {
564 // +---+ - -
565 // | | SrcN | |
566 // |New| | | SrcRange | DstRange
567 // | | v | |
568 // | | DstN - |
569 // | | |
570 // +---+ -
571 // +---+
572 // |Old|
573 // | |
574 // +---+
575 // When scanning for deps with destination in NewInterval we need to fully
576 // scan the interval. This is the same as the scanning for a new DAG.
577 FullScan(NewInterval);
578
579 // +---+ -
580 // | | |
581 // |New| SrcN | SrcRange
582 // | | | |
583 // | | | |
584 // | | | |
585 // +---+ | -
586 // +---+ | -
587 // |Old| v | DstRange
588 // | | DstN |
589 // +---+ -
590 // When scanning for deps with destination in DAGInterval we need to
591 // consider sources from the NewInterval only, because all intra-DAGInterval
592 // dependencies have already been created.
593 auto DstRangeOld = MemDAGInterval;
594 auto SrcRange = MemDGNodeIntervalBuilder::make(NewInterval, *this);
595 for (MemDGNode &DstN : DstRangeOld)
596 scanAndAddDeps(DstN, SrcRange);
597 } else {
598 llvm_unreachable("We don't expect extending in both directions!");
599 }
600
601 DAGInterval = Union;
602 return NewInterval;
603}
604
605#ifndef NDEBUG
607 // InstrToNodeMap is unordered so we need to create an ordered vector.
609 Nodes.reserve(InstrToNodeMap.size());
610 for (const auto &Pair : InstrToNodeMap)
611 Nodes.push_back(Pair.second.get());
612 // Sort them based on which one comes first in the BB.
613 sort(Nodes, [](DGNode *N1, DGNode *N2) {
614 return N1->getInstruction()->comesBefore(N2->getInstruction());
615 });
616 for (auto *N : Nodes)
617 N->print(OS, /*PrintDeps=*/true);
618}
619
621 print(dbgs());
622 dbgs() << "\n";
623}
624#endif // NDEBUG
625
626} // namespace llvm::sandboxir
#define I(x, y, z)
Definition: MD5.cpp:58
std::pair< uint64_t, uint64_t > Interval
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
raw_pwrite_stream & OS
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
Definition: ArrayRef.h:41
bool empty() const
empty - Check if the array is empty.
Definition: ArrayRef.h:163
bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
void reserve(size_type N)
Definition: SmallVector.h:663
void push_back(const T &Elt)
Definition: SmallVector.h:413
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1196
This class implements an extremely fast bulk output stream that can only output to a stream.
Definition: raw_ostream.h:52
Tracker & getTracker()
Definition: Context.h:224
A DependencyGraph Node that points to an Instruction and contains memory dependency edges.
virtual void print(raw_ostream &OS, bool PrintDeps=true) const
static bool isMemDepCandidate(Instruction *I)
We consider I as a Memory Dependency Candidate instruction if it reads/write memory or if it has side...
unsigned UnscheduledSuccs
The number of unscheduled successors.
SchedBundle * SB
The scheduler bundle that this node belongs to.
bool Scheduled
This is true if this node has been scheduled.
static bool isMemDepNodeCandidate(Instruction *I)
\Returns true if I is a memory dependency candidate instruction.
static bool isFenceLike(Instruction *I)
\Returns true if I is fence like. It excludes non-mem intrinsics.
LLVM_DUMP_METHOD void dump() const
Instruction * getInstruction() const
static bool isStackSaveOrRestoreIntrinsic(Instruction *I)
LLVM_DUMP_METHOD void dump() const
DGNode * getNode(Instruction *I) const
DGNode * getNodeOrNull(Instruction *I) const
Like getNode() but returns nullptr if I is nullptr.
void print(raw_ostream &OS) const
DGNode * getOrCreateNode(Instruction *I)
Interval< Instruction > extend(ArrayRef< Instruction * > Instrs)
Build/extend the dependency graph such that it includes Instrs.
A sandboxir::User with operands, opcode and linked with previous/next instructions in an instruction ...
Definition: Instruction.h:42
bool empty() const
Definition: Interval.h:99
static MemDGNode * getBotMemDGNode(const Interval< Instruction > &Intvl, const DependencyGraph &DAG)
Scans the instruction chain in Intvl bottom-up, returning the bottom-most MemDGNode,...
static MemDGNode * getTopMemDGNode(const Interval< Instruction > &Intvl, const DependencyGraph &DAG)
Scans the instruction chain in Intvl top-down, returning the top-most MemDGNode, or nullptr.
static Interval< MemDGNode > make(const Interval< Instruction > &Instrs, DependencyGraph &DAG)
Given Instrs it finds their closest mem nodes in the interval and returns the corresponding mem range...
A DependencyGraph Node for instructions that may read/write memory, or have some ordering constraints...
virtual void print(raw_ostream &OS, bool PrintDeps=true) const override
Iterate over both def-use and mem dependencies.
bool operator==(const PredIterator &Other) const
@ Reverting
‍Tracking changes
TrackerState getState() const
\Returns the current state of the tracker.
Definition: Tracker.h:507
OperandUseIterator op_iterator
Definition: User.h:97
static ModRefInfo aliasAnalysisGetModRefInfo(BatchAAResults &BatchAA, const Instruction *I, const std::optional< MemoryLocation > &OptLoc)
Equivalent to BatchAA::getModRefInfo().
Definition: Utils.h:123
static std::optional< llvm::MemoryLocation > memoryLocationGetOrNone(const Instruction *I)
Equivalent to MemoryLocation::getOrNone(I).
Definition: Utils.h:85
A SandboxIR Value has users. This is the base class.
Definition: Value.h:63
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ BasicBlock
Various leaf nodes.
Definition: ISDOpcodes.h:71
static bool isOrdered(Instruction *I)
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
Definition: STLExtras.h:329
auto reverse(ContainerTy &&C)
Definition: STLExtras.h:420
bool isModSet(const ModRefInfo MRI)
Definition: ModRef.h:48
void sort(IteratorTy Start, IteratorTy End)
Definition: STLExtras.h:1664
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:163
ModRefInfo
Flags indicating whether a memory access modifies or references memory.
Definition: ModRef.h:27
@ ModRef
The access may reference and may modify the value stored in memory.
@ Other
Any other memory.
DWARFExpression::Operation Op
bool isRefSet(const ModRefInfo MRI)
Definition: ModRef.h:51
#define N