20 auto Skip = [&DAG](
auto OpIt) {
22 return I ==
nullptr || DAG.getNode(
I) ==
nullptr;
24 while (OpIt != OpItE &&
Skip(OpIt))
32 assert(OpIt != OpItE &&
"Can't dereference end iterator!");
41 "Cant' dereference end iterator!");
48 assert(OpIt != OpItE &&
"Already at end!");
51 OpIt = PredIterator::skipBadIt(OpIt, OpItE, *DAG);
59 OpIt = PredIterator::skipBadIt(OpIt, OpItE, *DAG);
69 assert(DAG ==
Other.DAG &&
"Iterators of different DAGs!");
70 assert(N ==
Other.N &&
"Iterators of different nodes!");
71 return OpIt ==
Other.OpIt && MemIt ==
Other.MemIt;
75 if (this->SB !=
nullptr)
76 this->SB->eraseFromBundle(
this);
83 SB->eraseFromBundle(
this);
95 static constexpr const unsigned Indent = 4;
96 for (
auto *Pred : MemPreds)
97 OS.
indent(Indent) <<
"<-" << *Pred->getInstruction() <<
"\n";
109 I =
I->getNextNode();
122 I =
I->getPrevNode();
135 if (TopMemN ==
nullptr)
138 assert(BotMemN !=
nullptr &&
"TopMemN should be null too!");
143DependencyGraph::DependencyType
148 return DependencyType::ReadAfterWrite;
150 return DependencyType::WriteAfterWrite;
153 return DependencyType::WriteAfterRead;
156 return DependencyType::Control;
158 return DependencyType::Control;
161 return DependencyType::Other;
162 return DependencyType::None;
168 return !LI->isUnordered();
170 return !
SI->isUnordered();
175 bool Is = IsOrdered(
I);
177 "An ordered instruction must be a MemDepCandidate!");
182 DependencyType DepType) {
183 std::optional<MemoryLocation> DstLocOpt =
189 "Expected a mem instr");
196 case DependencyType::ReadAfterWrite:
197 case DependencyType::WriteAfterWrite:
199 case DependencyType::WriteAfterRead:
207 DependencyType RoughDepType = getRoughDepType(SrcI, DstI);
208 switch (RoughDepType) {
209 case DependencyType::ReadAfterWrite:
210 case DependencyType::WriteAfterWrite:
211 case DependencyType::WriteAfterRead:
212 return alias(SrcI, DstI, RoughDepType);
213 case DependencyType::Control:
219 case DependencyType::Other:
221 case DependencyType::None:
227void DependencyGraph::scanAndAddDeps(
MemDGNode &DstN,
230 "DstN is the mem dep destination, so it must be mem");
236 if (hasDep(SrcI, DstI))
237 DstN.addMemPred(&SrcN);
241void DependencyGraph::setDefUseUnscheduledSuccs(
256 if (OpI->getParent() !=
I.getParent())
258 if (!NewInterval.contains(OpI))
263 ++OpN->UnscheduledSuccs;
268 bool NewIsAbove = DAGInterval.empty() || NewInterval.comesBefore(DAGInterval);
269 const auto &TopInterval = NewIsAbove ? NewInterval : DAGInterval;
270 const auto &BotInterval = NewIsAbove ? DAGInterval : NewInterval;
284 if (BotN->scheduled())
286 for (
Value *
Op : BotI.operands()) {
293 if (!TopInterval.contains(OpI))
295 ++OpN->UnscheduledSuccs;
308 MemN->setPrevNode(LastMemN);
313 if (!DAGInterval.empty()) {
314 bool NewIsAbove = NewInterval.comesBefore(DAGInterval);
315 const auto &TopInterval = NewIsAbove ? NewInterval : DAGInterval;
316 const auto &BotInterval = NewIsAbove ? DAGInterval : NewInterval;
321 assert((LinkTopN ==
nullptr || LinkBotN ==
nullptr ||
322 LinkTopN->comesBefore(LinkBotN)) &&
324 if (LinkTopN !=
nullptr && LinkBotN !=
nullptr) {
325 LinkTopN->setNextNode(LinkBotN);
330 auto UnionIntvl = DAGInterval.getUnionInterval(NewInterval);
335 if (ChainTopN !=
nullptr && ChainBotN !=
nullptr) {
336 for (
auto *
N = ChainTopN->getNextNode(), *LastN = ChainTopN;
N !=
nullptr;
337 LastN =
N,
N =
N->getNextNode()) {
338 assert(
N == LastN->getNextNode() &&
"Bad chain!");
339 assert(
N->getPrevNode() == LastN &&
"Bad chain!");
345 setDefUseUnscheduledSuccs(NewInterval);
351 for (
auto *PrevI = IncludingN ?
I :
I->getPrevNode(); PrevI !=
nullptr;
352 PrevI = PrevI->getPrevNode()) {
354 if (PrevN ==
nullptr)
357 if (PrevMemN !=
nullptr && PrevMemN != SkipN)
366 for (
auto *NextI = IncludingN ?
I :
I->getNextNode(); NextI !=
nullptr;
367 NextI = NextI->getNextNode()) {
369 if (NextN ==
nullptr)
372 if (NextMemN !=
nullptr && NextMemN != SkipN)
378void DependencyGraph::notifyCreateInstr(
Instruction *
I) {
383 if (!(DAGInterval.contains(
I) || DAGInterval.touches(
I)))
386 DAGInterval = DAGInterval.getUnionInterval({
I,
I});
391 if (MemN !=
nullptr) {
392 if (
auto *PrevMemN = getMemDGNodeBefore(MemN,
false)) {
393 PrevMemN->NextMemN = MemN;
394 MemN->PrevMemN = PrevMemN;
396 if (
auto *NextMemN = getMemDGNodeAfter(MemN,
false)) {
397 NextMemN->PrevMemN = MemN;
398 MemN->NextMemN = NextMemN;
403 if (DAGInterval.top()->comesBefore(
I)) {
406 scanAndAddDeps(*MemN, SrcInterval);
409 if (
I->comesBefore(DAGInterval.bottom())) {
418void DependencyGraph::notifyMoveInstr(
Instruction *
I,
const BBIterator &To) {
424 assert(!(To != BB->end() && &*To ==
I->getNextNode()) &&
425 !(To == BB->end() && std::next(
I->getIterator()) == BB->end()) &&
426 "Should not have been called if destination is same as origin.");
430 assert(To.getNodeParent() ==
I->getParent() &&
431 "TODO: We don't support movement across BBs!");
433 (To == std::next(DAGInterval.bottom()->getIterator()) ||
434 (To != BB->end() && std::next(To) == DAGInterval.top()->getIterator()) ||
435 (To != BB->end() && DAGInterval.contains(&*To))) &&
436 "TODO: To should be either within the DAGInterval or right "
440 auto OrigDAGInterval = DAGInterval;
443 DAGInterval.notifyMoveInstr(
I, To);
456 MemN->detachFromChain();
471 if (To == BB->end() ||
472 To == std::next(OrigDAGInterval.bottom()->getIterator())) {
477 getMemDGNodeBefore(InsertAfterN,
true, MemN));
482 getMemDGNodeBefore(BeforeToN,
false, MemN));
484 getMemDGNodeAfter(BeforeToN,
true, MemN));
498 auto *PrevMemN = getMemDGNodeBefore(MemN,
false);
499 auto *NextMemN = getMemDGNodeAfter(MemN,
false);
500 if (PrevMemN !=
nullptr)
501 PrevMemN->NextMemN = NextMemN;
502 if (NextMemN !=
nullptr)
503 NextMemN->PrevMemN = PrevMemN;
506 while (!MemN->memPreds().empty()) {
507 auto *PredN = *MemN->memPreds().begin();
508 MemN->removeMemPred(PredN);
510 while (!MemN->memSuccs().empty()) {
511 auto *SuccN = *MemN->memSuccs().begin();
512 SuccN->removeMemPred(MemN);
518 for (
auto *PredN :
N->preds(*
this))
519 PredN->decrUnscheduledSuccs();
522 InstrToNodeMap.erase(
I);
525void DependencyGraph::notifySetUse(
const Use &U,
Value *NewSrc) {
529 if (
auto *CurrSrcN =
getNode(CurrSrcI)) {
530 CurrSrcN->decrUnscheduledSuccs();
534 if (
auto *NewSrcN =
getNode(NewSrcI)) {
535 ++NewSrcN->UnscheduledSuccs;
546 auto NewInterval = Union.getSingleDiff(DAGInterval);
547 if (NewInterval.empty())
550 createNewNodes(NewInterval);
566 if (!DstRange.empty()) {
569 scanAndAddDeps(DstN, SrcRange);
574 if (MemDAGInterval.empty()) {
575 FullScan(NewInterval);
592 else if (DAGInterval.bottom()->comesBefore(NewInterval.top())) {
594 auto SrcRangeFull = MemDAGInterval.getUnionInterval(DstRange);
598 scanAndAddDeps(DstN, SrcRange);
602 else if (NewInterval.bottom()->comesBefore(DAGInterval.top())) {
616 FullScan(NewInterval);
632 auto DstRangeOld = MemDAGInterval;
635 scanAndAddDeps(DstN, SrcRange);
648 Nodes.
reserve(InstrToNodeMap.size());
649 for (
const auto &Pair : InstrToNodeMap)
655 for (
auto *
N : Nodes)
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
bool empty() const
empty - Check if the array is empty.
LLVM_ABI bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
LLVM_ABI bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
This class implements an extremely fast bulk output stream that can only output to a stream.
raw_ostream & indent(unsigned NumSpaces)
indent - Insert 'NumSpaces' spaces.
A DependencyGraph Node that points to an Instruction and contains memory dependency edges.
virtual void print(raw_ostream &OS, bool PrintDeps=true) const
static bool isMemDepCandidate(Instruction *I)
We consider I as a Memory Dependency Candidate instruction if it reads/write memory or if it has side...
void setSchedBundle(SchedBundle &SB)
unsigned UnscheduledSuccs
The number of unscheduled successors.
SchedBundle * SB
The scheduler bundle that this node belongs to.
bool Scheduled
This is true if this node has been scheduled.
static bool isMemDepNodeCandidate(Instruction *I)
\Returns true if I is a memory dependency candidate instruction.
static bool isFenceLike(Instruction *I)
\Returns true if I is fence like. It excludes non-mem intrinsics.
LLVM_DUMP_METHOD void dump() const
Instruction * getInstruction() const
static bool isStackSaveOrRestoreIntrinsic(Instruction *I)
LLVM_DUMP_METHOD void dump() const
DGNode * getNode(Instruction *I) const
DGNode * getNodeOrNull(Instruction *I) const
Like getNode() but returns nullptr if I is nullptr.
void print(raw_ostream &OS) const
DGNode * getOrCreateNode(Instruction *I)
LLVM_ABI Interval< Instruction > extend(ArrayRef< Instruction * > Instrs)
Build/extend the dependency graph such that it includes Instrs.
A sandboxir::User with operands, opcode and linked with previous/next instructions in an instruction ...
bool mayWriteToMemory() const
bool mayReadFromMemory() const
bool comesBefore(const Instruction *Other) const
Given an instruction Other in the same basic block as this instruction, return true if this instructi...
bool isTerminator() const
static LLVM_ABI MemDGNode * getBotMemDGNode(const Interval< Instruction > &Intvl, const DependencyGraph &DAG)
Scans the instruction chain in Intvl bottom-up, returning the bottom-most MemDGNode,...
static LLVM_ABI MemDGNode * getTopMemDGNode(const Interval< Instruction > &Intvl, const DependencyGraph &DAG)
Scans the instruction chain in Intvl top-down, returning the top-most MemDGNode, or nullptr.
static LLVM_ABI Interval< MemDGNode > make(const Interval< Instruction > &Instrs, DependencyGraph &DAG)
Given Instrs it finds their closest mem nodes in the interval and returns the corresponding mem range...
A DependencyGraph Node for instructions that may read/write memory, or have some ordering constraints...
virtual void print(raw_ostream &OS, bool PrintDeps=true) const override
LLVM_ABI value_type operator*()
LLVM_ABI PredIterator & operator++()
LLVM_ABI bool operator==(const PredIterator &Other) const
Represents a Def-use/Use-def edge in SandboxIR.
OperandUseIterator op_iterator
static ModRefInfo aliasAnalysisGetModRefInfo(BatchAAResults &BatchAA, const Instruction *I, const std::optional< MemoryLocation > &OptLoc)
Equivalent to BatchAA::getModRefInfo().
static std::optional< llvm::MemoryLocation > memoryLocationGetOrNone(const Instruction *I)
Equivalent to MemoryLocation::getOrNone(I).
A SandboxIR Value has users. This is the base class.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
static bool isOrdered(Instruction *I)
template class LLVM_TEMPLATE_ABI Interval< MemDGNode >
BasicBlock(llvm::BasicBlock *BB, Context &SBCtx)
template class LLVM_TEMPLATE_ABI Interval< Instruction >
friend class Instruction
Iterator for Instructions in a `BasicBlock.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
FunctionAddr VTableAddr Value
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
auto reverse(ContainerTy &&C)
bool isModSet(const ModRefInfo MRI)
void sort(IteratorTy Start, IteratorTy End)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
ModRefInfo
Flags indicating whether a memory access modifies or references memory.
@ ModRef
The access may reference and may modify the value stored in memory.
DWARFExpression::Operation Op
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
bool isRefSet(const ModRefInfo MRI)