24#define DEBUG_TYPE "memoryssa"
41 auto Cached = CachedPreviousDef.find(BB);
42 if (Cached != CachedPreviousDef.end())
43 return Cached->second;
52 MemoryAccess *Result = getPreviousDefFromEnd(Pred, CachedPreviousDef);
53 CachedPreviousDef.insert({BB, Result});
57 if (VisitedBlocks.
count(BB)) {
62 CachedPreviousDef.insert({BB, Result});
66 if (VisitedBlocks.
insert(BB).second) {
73 bool UniqueIncomingAccess =
true;
77 auto *IncomingAccess = getPreviousDefFromEnd(Pred, CachedPreviousDef);
79 SingleAccess = IncomingAccess;
80 else if (IncomingAccess != SingleAccess)
81 UniqueIncomingAccess =
false;
92 auto *Result = tryRemoveTrivialPhi(Phi, PhiOps);
94 if (Result == Phi && UniqueIncomingAccess && SingleAccess) {
97 assert(Phi->operands().empty() &&
"Expected empty Phi");
98 Phi->replaceAllUsesWith(SingleAccess);
101 Result = SingleAccess;
102 }
else if (Result == Phi && !(UniqueIncomingAccess && SingleAccess)) {
104 Phi = MSSA->createMemoryPhi(BB);
109 if (Phi->getNumOperands() != 0) {
111 if (!std::equal(Phi->op_begin(), Phi->op_end(), PhiOps.
begin())) {
119 Phi->addIncoming(&*PhiOps[i++], Pred);
120 InsertedPHIs.push_back(Phi);
126 VisitedBlocks.
erase(BB);
127 CachedPreviousDef.insert({BB, Result});
138 if (
auto *LocalResult = getPreviousDefInBlock(MA))
140 DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> CachedPreviousDef;
141 return getPreviousDefRecursive(MA->
getBlock(), CachedPreviousDef);
148 auto *Defs = MSSA->getWritableBlockDefs(MA->
getBlock());
156 if (Iter != Defs->rend())
160 auto End = MSSA->getWritableBlockAccesses(MA->
getBlock())->rend();
175 auto *Defs = MSSA->getWritableBlockDefs(BB);
178 CachedPreviousDef.insert({BB, &*Defs->
rbegin()});
179 return &*Defs->rbegin();
182 return getPreviousDefRecursive(BB, CachedPreviousDef);
188 TrackingVH<MemoryAccess> Res(Phi);
190 std::copy(
Phi->user_begin(),
Phi->user_end(), std::back_inserter(
Uses));
193 tryRemoveTrivialPhi(UsePhi);
203 assert(Phi &&
"Can only remove concrete Phi.");
204 auto OperRange =
Phi->operands();
205 return tryRemoveTrivialPhi(Phi, OperRange);
207template <
class RangeType>
211 if (NonOptPhis.count(Phi))
215 MemoryAccess *
Same =
nullptr;
227 return MSSA->getLiveOnEntryDef();
235 return recursePhi(
Same);
239 VisitedBlocks.clear();
240 InsertedPHIs.clear();
256 if (!RenameUses && !InsertedPHIs.empty()) {
257 auto *Defs = MSSA->getBlockDefs(MU->
getBlock());
259 assert((!Defs || (++Defs->begin() == Defs->end())) &&
260 "Block may have only a Phi or no defs");
263 if (RenameUses && InsertedPHIs.size()) {
267 if (
auto *Defs = MSSA->getWritableBlockDefs(StartBlock)) {
272 FirstDef = MD->getDefiningAccess();
274 MSSA->renamePass(MU->
getBlock(), FirstDef, Visited);
278 for (
auto &MP : InsertedPHIs)
280 MSSA->renamePass(Phi->getBlock(),
nullptr, Visited);
290 assert(i != -1 &&
"Should have found the basic block in the phi");
309 if (!MSSA->DT->isReachableFromEntry(MD->
getBlock())) {
314 VisitedBlocks.clear();
315 InsertedPHIs.clear();
319 bool DefBeforeSameBlock =
false;
323 DefBeforeSameBlock =
true;
329 if (DefBeforeSameBlock) {
333 User *Usr = U.getUser();
348 unsigned NewPhiIndex = InsertedPHIs.
size();
349 if (!DefBeforeSameBlock) {
371 for (
const auto &VH : InsertedPHIs)
373 DefiningBlocks.
insert(RealPHI->getBlock());
379 for (
auto *BBIDF : IDFBlocks) {
380 auto *MPhi = MSSA->getMemoryAccess(BBIDF);
382 MPhi = MSSA->createMemoryPhi(BBIDF);
385 ExistingPhis.
insert(MPhi);
393 NonOptPhis.insert(MPhi);
395 for (
auto &MPhi : NewInsertedPHIs) {
396 auto *BBIDF = MPhi->getBlock();
399 MPhi->addIncoming(getPreviousDefFromEnd(Pred, CachedPreviousDef), Pred);
405 NewPhiIndex = InsertedPHIs.size();
406 for (
auto &MPhi : NewInsertedPHIs) {
407 InsertedPHIs.push_back(&*MPhi);
415 unsigned NewPhiIndexEnd = InsertedPHIs.size();
416 fixupDefs(FixupList);
417 assert(NewPhiIndexEnd == InsertedPHIs.size() &&
418 "Should not insert new phis during fixupDefs()");
421 unsigned NewPhiSize = NewPhiIndexEnd - NewPhiIndex;
423 tryRemoveTrivialPhis(
ArrayRef<WeakVH>(&InsertedPHIs[NewPhiIndex], NewPhiSize));
432 MemoryAccess *FirstDef = &*MSSA->getWritableBlockDefs(StartBlock)->begin();
438 MSSA->renamePass(MD->
getBlock(), FirstDef, Visited);
441 for (
auto &MP : InsertedPHIs) {
444 MSSA->renamePass(Phi->getBlock(),
nullptr, Visited);
448 for (
const auto &MP : ExistingPhis) {
451 MSSA->renamePass(Phi->getBlock(),
nullptr, Visited);
459 for (
const auto &Var : Vars) {
469 NonOptPhis.erase(Phi);
472 if (++DefIter != Defs->end()) {
487 while (!Worklist.
empty()) {
491 if (
auto *Defs = MSSA->getWritableBlockDefs(FixupBlock)) {
492 auto *FirstDef = &*Defs->begin();
495 "Should have already handled phi nodes!");
498 assert(MSSA->dominates(NewDef, FirstDef) &&
499 "Should have dominated the new access");
505 for (
const auto *S :
successors(FixupBlock)) {
508 if (
auto *MP = MSSA->getMemoryAccess(S))
513 if (!Seen.
insert(S).second)
523 if (
MemoryPhi *MPhi = MSSA->getMemoryAccess(To)) {
524 MPhi->unorderedDeleteIncomingBlock(From);
525 tryRemoveTrivialPhi(MPhi);
531 if (
MemoryPhi *MPhi = MSSA->getMemoryAccess(To)) {
541 tryRemoveTrivialPhi(MPhi);
569 assert(DefMUDI &&
"Found MemoryUseOrDef with no Instruction.");
570 if (!IsInClonedRegion(DefMUDI->
getParent()))
574 InsnDefining = NewDefMUDI ? MSSA->
getMemoryAccess(NewDefMUDI) :
nullptr;
578 DefMUD->getDefiningAccess(), VMap, MPhiMap, MSSA, IsInClonedRegion);
583 InsnDefining = NewDefPhi;
585 assert(InsnDefining &&
"Defining instruction cannot be nullptr.");
589void MemorySSAUpdater::cloneUsesAndDefs(
592 bool CloneWasSimplified) {
596 for (
const MemoryAccess &MA : *Acc) {
606 if (Instruction *NewInsn =
608 MemoryAccess *NewUseOrDef = MSSA->createDefinedAccess(
611 MPhiMap, MSSA, IsInClonedRegion),
612 CloneWasSimplified ?
nullptr : MUD,
615 MSSA->insertIntoListsForBlock(NewUseOrDef, NewBB,
MemorySSA::End);
623 auto *MPhi = MSSA->getMemoryAccess(Header);
629 auto *NewMPhi = MSSA->createMemoryPhi(BEBlock);
630 bool HasUniqueIncomingValue =
true;
632 for (
unsigned I = 0, E = MPhi->getNumIncomingValues();
I != E; ++
I) {
635 if (IBB != Preheader) {
636 NewMPhi->addIncoming(
IV, IBB);
637 if (HasUniqueIncomingValue) {
640 else if (UniqueValue !=
IV)
641 HasUniqueIncomingValue =
false;
648 auto *AccFromPreheader = MPhi->getIncomingValueForBlock(Preheader);
649 MPhi->setIncomingValue(0, AccFromPreheader);
650 MPhi->setIncomingBlock(0, Preheader);
651 for (
unsigned I = MPhi->getNumIncomingValues() - 1;
I >= 1; --
I)
652 MPhi->unorderedDeleteIncoming(
I);
653 MPhi->addIncoming(NewMPhi, BEBlock);
657 tryRemoveTrivialPhi(NewMPhi);
663 bool IgnoreIncomingWithNoClones) {
671 assert(Phi && NewPhi &&
"Invalid Phi nodes.");
675 for (
unsigned It = 0, E = Phi->getNumIncomingValues(); It < E; ++It) {
676 MemoryAccess *IncomingAccess = Phi->getIncomingValue(It);
677 BasicBlock *IncBB = Phi->getIncomingBlock(It);
681 else if (IgnoreIncomingWithNoClones)
688 if (!NewPhiBBPreds.
count(IncBB))
698 MPhiMap[Phi] = SingleAccess;
708 assert(!MSSA->getWritableBlockAccesses(NewBlock) &&
709 "Cloned block should have no accesses");
712 if (
MemoryPhi *MPhi = MSSA->getMemoryAccess(BB)) {
713 MemoryPhi *NewPhi = MSSA->createMemoryPhi(NewBlock);
714 MPhiMap[MPhi] = NewPhi;
717 cloneUsesAndDefs(BB, NewBlock, VMap, MPhiMap, IsInClonedRegion);
720 for (
auto *BB : Blocks)
723 for (
auto *BB : Blocks)
724 if (
MemoryPhi *MPhi = MSSA->getMemoryAccess(BB))
740 if (
MemoryPhi *MPhi = MSSA->getMemoryAccess(BB))
741 MPhiMap[MPhi] = MPhi->getIncomingValueForBlock(P1);
743 BB, P1, VM, MPhiMap, [&](
BasicBlock *CheckBB) {
return BB == CheckBB; },
747template <
typename Iter>
748void MemorySSAUpdater::privateUpdateExitBlocksForClonedLoop(
753 for (
auto *Exit : ExitBlocks)
766 privateUpdateExitBlocksForClonedLoop(ExitBlocks, std::begin(Arr),
773 auto GetPtr = [&](
const std::unique_ptr<ValueToValueMapTy> &
I) {
776 using MappedIteratorType =
779 auto MapBegin = MappedIteratorType(VMaps.begin(), GetPtr);
780 auto MapEnd = MappedIteratorType(VMaps.end(), GetPtr);
781 privateUpdateExitBlocksForClonedLoop(ExitBlocks, MapBegin, MapEnd, DT);
789 for (
const auto &Update : Updates) {
790 if (Update.getKind() == DT.
Insert)
791 InsertUpdates.
push_back({DT.
Insert, Update.getFrom(), Update.getTo()});
793 DeleteUpdates.
push_back({DT.
Delete, Update.getFrom(), Update.getTo()});
794 RevDeleteUpdates.
push_back({DT.
Insert, Update.getFrom(), Update.getTo()});
798 if (!DeleteUpdates.
empty()) {
799 if (!InsertUpdates.
empty()) {
831 for (
auto &Update : DeleteUpdates)
850 return &*(--Defs->
end());
855 for (
auto *Pi : GD->template getChildren</*InverseEdge=*/true>(BB)) {
870 if (IDom->getBlock() != BB) {
871 BB = IDom->getBlock();
874 return MSSA->getLiveOnEntryDef();
877 assert(
Count == 1 && Pred &&
"Single predecessor expected.");
880 return MSSA->getLiveOnEntryDef();
890 auto FindNearestCommonDominator =
891 [&](
const SmallSetVector<BasicBlock *, 2> &BBSet) -> BasicBlock * {
893 for (
auto *BB : BBSet)
900 auto GetNoLongerDomBlocks =
902 SmallVectorImpl<BasicBlock *> &BlocksPrevDom) {
903 if (PrevIDom == CurrIDom)
905 BlocksPrevDom.push_back(PrevIDom);
907 while (BasicBlock *UpIDom =
909 if (UpIDom == CurrIDom)
911 BlocksPrevDom.push_back(UpIDom);
931 SmallSetVector<BasicBlock *, 2>
Added;
932 SmallSetVector<BasicBlock *, 2> Prev;
934 SmallDenseMap<BasicBlock *, PredInfo> PredMap;
936 for (
const auto &
Edge : Updates) {
938 auto &AddedBlockSet = PredMap[BB].Added;
943 SmallDenseMap<std::pair<BasicBlock *, BasicBlock *>,
int> EdgeCountMap;
944 SmallPtrSet<BasicBlock *, 2> NewBlocks;
945 for (
auto &BBPredPair : PredMap) {
946 auto *BB = BBPredPair.first;
947 const auto &AddedBlockSet = BBPredPair.second.Added;
948 auto &PrevBlockSet = BBPredPair.second.Prev;
949 for (
auto *Pi : GD->template getChildren</*InverseEdge=*/true>(BB)) {
950 if (!AddedBlockSet.count(Pi))
951 PrevBlockSet.insert(Pi);
952 EdgeCountMap[{Pi, BB}]++;
955 if (PrevBlockSet.empty()) {
956 assert(
pred_size(BB) == AddedBlockSet.size() &&
"Duplicate edges added.");
959 <<
"Adding a predecessor to a block with no predecessors. "
960 "This must be an edge added to a new, likely cloned, block. "
961 "Its memory accesses must be already correct, assuming completed "
962 "via the updateExitBlocksForClonedLoop API. "
963 "Assert a single such edge is added so no phi addition or "
964 "additional processing is required.\n");
965 assert(AddedBlockSet.size() == 1 &&
966 "Can only handle adding one predecessor to a new block.");
973 for (
auto *BB : NewBlocks)
976 SmallVector<BasicBlock *, 16> BlocksWithDefsToReplace;
981 for (
const auto &
Edge : Updates) {
983 if (PredMap.count(BB) && !MSSA->getMemoryAccess(BB))
984 InsertedPhis.
push_back(MSSA->createMemoryPhi(BB));
988 for (
auto &BBPredPair : PredMap) {
989 auto *BB = BBPredPair.first;
990 const auto &PrevBlockSet = BBPredPair.second.Prev;
991 const auto &AddedBlockSet = BBPredPair.second.Added;
992 assert(!PrevBlockSet.empty() &&
993 "At least one previous predecessor must exist.");
1001 SmallDenseMap<BasicBlock *, MemoryAccess *> LastDefAddedPred;
1002 for (
auto *AddedPred : AddedBlockSet) {
1003 auto *DefPn = GetLastDef(AddedPred);
1004 assert(DefPn !=
nullptr &&
"Unable to find last definition.");
1005 LastDefAddedPred[AddedPred] = DefPn;
1008 MemoryPhi *NewPhi = MSSA->getMemoryAccess(BB);
1012 for (
auto *Pred : AddedBlockSet) {
1013 auto *LastDefForPred = LastDefAddedPred[Pred];
1014 for (
int I = 0,
E = EdgeCountMap[{Pred, BB}];
I <
E; ++
I)
1020 auto *
P1 = *PrevBlockSet.begin();
1021 MemoryAccess *DefP1 = GetLastDef(P1);
1025 bool InsertPhi =
false;
1026 for (
auto LastDefPredPair : LastDefAddedPred)
1027 if (DefP1 != LastDefPredPair.second) {
1043 for (
auto *Pred : AddedBlockSet) {
1044 auto *LastDefForPred = LastDefAddedPred[Pred];
1045 for (
int I = 0,
E = EdgeCountMap[{Pred, BB}];
I <
E; ++
I)
1048 for (
auto *Pred : PrevBlockSet)
1049 for (
int I = 0,
E = EdgeCountMap[{Pred, BB}];
I <
E; ++
I)
1056 BasicBlock *PrevIDom = FindNearestCommonDominator(PrevBlockSet);
1057 assert(PrevIDom &&
"Previous IDom should exists");
1059 assert(NewIDom &&
"BB should have a new valid idom");
1061 "New idom should dominate old idom");
1062 GetNoLongerDomBlocks(PrevIDom, NewIDom, BlocksWithDefsToReplace);
1065 tryRemoveTrivialPhis(InsertedPhis);
1068 SmallVector<BasicBlock *, 8> BlocksToProcess;
1069 for (
auto &VH : InsertedPhis)
1071 BlocksToProcess.
push_back(MPhi->getBlock());
1075 if (!BlocksToProcess.
empty()) {
1079 IDFs.setDefiningBlocks(DefiningBlocks);
1080 IDFs.calculate(IDFBlocks);
1082 SmallSetVector<MemoryPhi *, 4> PhisToFill;
1084 for (
auto *BBIDF : IDFBlocks)
1085 if (!MSSA->getMemoryAccess(BBIDF)) {
1086 auto *IDFPhi = MSSA->createMemoryPhi(BBIDF);
1087 InsertedPhis.push_back(IDFPhi);
1088 PhisToFill.
insert(IDFPhi);
1091 for (
auto *BBIDF : IDFBlocks) {
1092 auto *IDFPhi = MSSA->getMemoryAccess(BBIDF);
1093 assert(IDFPhi &&
"Phi must exist");
1094 if (!PhisToFill.
count(IDFPhi)) {
1097 for (
unsigned I = 0,
E = IDFPhi->getNumIncomingValues();
I <
E; ++
I)
1098 IDFPhi->setIncomingValue(
I, GetLastDef(IDFPhi->getIncomingBlock(
I)));
1100 for (
auto *Pi : GD->template getChildren</*InverseEdge=*/true>(BBIDF))
1101 IDFPhi->addIncoming(GetLastDef(Pi), Pi);
1109 for (
auto *BlockWithDefsToReplace : BlocksWithDefsToReplace) {
1110 if (
auto DefsList = MSSA->getWritableBlockDefs(BlockWithDefsToReplace)) {
1111 for (
auto &DefToReplaceUses : *DefsList) {
1112 BasicBlock *DominatingBlock = DefToReplaceUses.getBlock();
1119 BasicBlock *DominatedBlock = UsrPhi->getIncomingBlock(U);
1120 if (!DT.
dominates(DominatingBlock, DominatedBlock))
1121 U.set(GetLastDef(DominatedBlock));
1124 if (!DT.
dominates(DominatingBlock, DominatedBlock)) {
1125 if (
auto *DomBlPhi = MSSA->getMemoryAccess(DominatedBlock))
1129 assert(IDom &&
"Block must have a valid IDom.");
1130 U.set(GetLastDef(IDom->getBlock()));
1137 for (
auto *Usr : ResetOptimized)
1138 Usr->resetOptimized();
1142 tryRemoveTrivialPhis(InsertedPhis);
1146template <
class WhereType>
1150 for (
auto *U : What->
users())
1152 NonOptPhis.insert(PhiUser);
1158 MSSA->moveTo(What, BB, Where);
1184 return moveTo(What, BB, Where);
1186 if (
auto *Where = MSSA->getMemoryAccess(BB->
getTerminator()))
1200 assert(Start->getParent() == To &&
"Incorrect Start instruction");
1208 auto NextIt = ++MUD->getIterator();
1222 auto *Defs = MSSA->getWritableBlockDefs(From);
1223 if (Defs && !Defs->
empty())
1225 tryRemoveTrivialPhi(Phi);
1231 assert(MSSA->getBlockAccesses(To) ==
nullptr &&
1232 "To block is expected to be free of MemoryAccesses.");
1233 moveAllAccesses(From, To, Start);
1235 if (
MemoryPhi *MPhi = MSSA->getMemoryAccess(Succ))
1236 MPhi->setIncomingBlock(MPhi->getBasicBlockIndex(From), To);
1242 "From block is expected to have a single predecessor (To).");
1243 moveAllAccesses(From, To, Start);
1245 if (
MemoryPhi *MPhi = MSSA->getMemoryAccess(Succ))
1246 MPhi->setIncomingBlock(MPhi->getBasicBlockIndex(From), To);
1251 bool IdenticalEdgesWereMerged) {
1252 assert(!MSSA->getWritableBlockAccesses(New) &&
1253 "Access list should be null for a new block.");
1254 MemoryPhi *Phi = MSSA->getMemoryAccess(Old);
1259 "Should have moved all predecessors.");
1262 assert(!Preds.
empty() &&
"Must be moving at least one predecessor to the "
1263 "new immediate predecessor.");
1264 MemoryPhi *NewPhi = MSSA->createMemoryPhi(New);
1268 if (!IdenticalEdgesWereMerged)
1270 "If identical edges were not merged, we cannot have duplicate "
1271 "blocks in the predecessors");
1275 if (!IdenticalEdgesWereMerged)
1281 Phi->addIncoming(NewPhi, New);
1282 tryRemoveTrivialPhi(NewPhi);
1287 assert(!MSSA->isLiveOnEntryDef(MA) &&
1288 "Trying to remove the live on entry def");
1300 "We can't delete this memory phi");
1322 assert(NewDefTarget != MA &&
"Going into an infinite loop");
1326 MUD->resetOptimized();
1330 U.set(NewDefTarget);
1336 MSSA->removeFromLookups(MA);
1337 MSSA->removeFromLists(MA);
1340 if (!PhisToCheck.
empty()) {
1343 PhisToCheck.
clear();
1345 unsigned PhisSize = PhisToOptimize.
size();
1346 while (PhisSize-- > 0)
1349 tryRemoveTrivialPhi(MP);
1358 assert(TI &&
"Basic block expected to have a terminator instruction");
1360 if (!DeadBlocks.
count(Succ))
1361 if (
MemoryPhi *MP = MSSA->getMemoryAccess(Succ)) {
1362 MP->unorderedDeleteIncomingBlock(BB);
1363 tryRemoveTrivialPhi(MP);
1377 MSSA->removeFromLookups(&MA);
1378 MSSA->removeFromLists(&MA);
1384 for (
const auto &VH : UpdatedPHIs)
1386 tryRemoveTrivialPhi(MPhi);
1392 auto BBI =
I->getIterator(), BBE = BB->
end();
1402 MPhi->unorderedDeleteIncomingBlock(BB);
1407 tryRemoveTrivialPhis(UpdatedPHIs);
1414 I, Definition,
nullptr, CreationMustSucceed);
1416 MSSA->insertIntoListsForBlock(NewAccess, BB, Point);
1423 "New and old access must be in the same block");
1425 MSSA->insertIntoListsBefore(NewAccess, InsertPt->
getBlock(),
1433 "New and old access must be in the same block");
1435 MSSA->insertIntoListsBefore(NewAccess, InsertPt->
getBlock(),
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
mir Rename Register Operands
static MemoryAccess * getNewDefiningAccessForClone(MemoryAccess *MA, const ValueToValueMapTy &VMap, PhiToDefMap &MPhiMap, MemorySSA *MSSA, function_ref< bool(BasicBlock *BB)> IsInClonedRegion)
static void setMemoryPhiValueForBlock(MemoryPhi *MP, const BasicBlock *BB, MemoryAccess *NewDef)
static MemoryAccess * onlySingleValue(MemoryPhi *MP)
If all arguments of a MemoryPHI are defined by the same incoming argument, return that argument.
This file exposes an interface to building/using memory SSA to walk memory instructions using a use/d...
Remove Loads Into Fake Uses
std::pair< BasicBlock *, BasicBlock * > Edge
This file implements a set that has insertion order iteration characteristics.
static bool ProcessBlock(BasicBlock &BB, DominatorTree &DT, LoopInfo &LI, AAResults &AA)
This file defines the SmallPtrSet class.
LocallyHashedType DenseMapInfo< LocallyHashedType >::Empty
static const uint32_t IV[8]
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
bool empty() const
empty - Check if the array is empty.
LLVM Basic Block Representation.
reverse_iterator rbegin()
LLVM_ABI bool hasNPredecessors(unsigned N) const
Return true if this block has exactly N predecessors.
LLVM_ABI const BasicBlock * getUniquePredecessor() const
Return the predecessor of this block if it has a unique predecessor block.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
ValueT lookup(const_arg_type_t< KeyT > Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
DomTreeNodeBase * getIDom() const
void applyUpdates(ArrayRef< UpdateType > Updates)
Inform the dominator tree about a sequence of CFG edge insertions and deletions and perform a batch u...
static constexpr UpdateKind Delete
static constexpr UpdateKind Insert
DomTreeNodeBase< NodeT > * getNode(const NodeT *BB) const
getNode - return the (Post)DominatorTree node for the specified basic block.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
LLVM_ABI bool isReachableFromEntry(const Use &U) const
Provide an overload for a Use.
LLVM_ABI Instruction * findNearestCommonDominator(Instruction *I1, Instruction *I2) const
Find the nearest instruction I that dominates both I1 and I2, in the sense that a result produced bef...
LLVM_ABI bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
void calculate(SmallVectorImpl< NodeTy * > &IDFBlocks)
Calculate iterated dominance frontiers.
void setDefiningBlocks(const SmallPtrSetImpl< NodeTy * > &Blocks)
Give the IDF calculator the set of blocks in which the value is defined.
LLVM_ABI BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
Wrapper class to LoopBlocksDFS that provides a standard begin()/end() interface for the DFS reverse p...
AllAccessType::reverse_self_iterator getReverseIterator()
DefsOnlyType::self_iterator getDefsIterator()
DefsOnlyType::reverse_self_iterator getReverseDefsIterator()
BasicBlock * getBlock() const
AllAccessType::self_iterator getIterator()
Get the iterators for the all access list and the defs only list We default to the all access list.
Represents a read-write access to memory, whether it is a must-alias, or a may-alias.
Represents phi nodes for memory accesses.
void setIncomingValue(unsigned I, MemoryAccess *V)
iterator_range< block_iterator > blocks()
void addIncoming(MemoryAccess *V, BasicBlock *BB)
Add an incoming value to the end of the PHI list.
int getBasicBlockIndex(const BasicBlock *BB) const
Return the first index of the specified basic block in the value list for this PHI.
LLVM_ABI MemoryUseOrDef * createMemoryAccessBefore(Instruction *I, MemoryAccess *Definition, MemoryUseOrDef *InsertPt)
Create a MemoryAccess in MemorySSA before an existing MemoryAccess.
LLVM_ABI void insertDef(MemoryDef *Def, bool RenameUses=false)
Insert a definition into the MemorySSA IR.
LLVM_ABI void moveAfter(MemoryUseOrDef *What, MemoryUseOrDef *Where)
LLVM_ABI void removeEdge(BasicBlock *From, BasicBlock *To)
Update the MemoryPhi in To following an edge deletion between From and To.
LLVM_ABI void updateForClonedLoop(const LoopBlocksRPO &LoopBlocks, ArrayRef< BasicBlock * > ExitBlocks, const ValueToValueMapTy &VM, bool IgnoreIncomingWithNoClones=false)
Update MemorySSA after a loop was cloned, given the blocks in RPO order, the exit blocks and a 1:1 ma...
LLVM_ABI void changeToUnreachable(const Instruction *I)
Instruction I will be changed to an unreachable.
LLVM_ABI void removeDuplicatePhiEdgesBetween(const BasicBlock *From, const BasicBlock *To)
Update the MemoryPhi in To to have a single incoming edge from From, following a CFG change that repl...
LLVM_ABI void updatePhisWhenInsertingUniqueBackedgeBlock(BasicBlock *LoopHeader, BasicBlock *LoopPreheader, BasicBlock *BackedgeBlock)
Update MemorySSA when inserting a unique backedge block for a loop.
LLVM_ABI void insertUse(MemoryUse *Use, bool RenameUses=false)
LLVM_ABI void removeBlocks(const SmallSetVector< BasicBlock *, 8 > &DeadBlocks)
Remove all MemoryAcceses in a set of BasicBlocks about to be deleted.
LLVM_ABI void moveAllAfterSpliceBlocks(BasicBlock *From, BasicBlock *To, Instruction *Start)
From block was spliced into From and To.
LLVM_ABI MemoryAccess * createMemoryAccessInBB(Instruction *I, MemoryAccess *Definition, const BasicBlock *BB, MemorySSA::InsertionPlace Point, bool CreationMustSucceed=true)
Create a MemoryAccess in MemorySSA at a specified point in a block.
LLVM_ABI void removeMemoryAccess(MemoryAccess *, bool OptimizePhis=false)
Remove a MemoryAccess from MemorySSA, including updating all definitions and uses.
LLVM_ABI void applyInsertUpdates(ArrayRef< CFGUpdate > Updates, DominatorTree &DT)
Apply CFG insert updates, analogous with the DT edge updates.
LLVM_ABI MemoryUseOrDef * createMemoryAccessAfter(Instruction *I, MemoryAccess *Definition, MemoryAccess *InsertPt)
Create a MemoryAccess in MemorySSA after an existing MemoryAccess.
LLVM_ABI void updateForClonedBlockIntoPred(BasicBlock *BB, BasicBlock *P1, const ValueToValueMapTy &VM)
LLVM_ABI void applyUpdates(ArrayRef< CFGUpdate > Updates, DominatorTree &DT, bool UpdateDTFirst=false)
Apply CFG updates, analogous with the DT edge updates.
LLVM_ABI void moveAllAfterMergeBlocks(BasicBlock *From, BasicBlock *To, Instruction *Start)
From block was merged into To.
LLVM_ABI void moveToPlace(MemoryUseOrDef *What, BasicBlock *BB, MemorySSA::InsertionPlace Where)
LLVM_ABI void wireOldPredecessorsToNewImmediatePredecessor(BasicBlock *Old, BasicBlock *New, ArrayRef< BasicBlock * > Preds, bool IdenticalEdgesWereMerged=true)
A new empty BasicBlock (New) now branches directly to Old.
LLVM_ABI void updateExitBlocksForClonedLoop(ArrayRef< BasicBlock * > ExitBlocks, const ValueToValueMapTy &VMap, DominatorTree &DT)
Update phi nodes in exit block successors following cloning.
LLVM_ABI void moveBefore(MemoryUseOrDef *What, MemoryUseOrDef *Where)
Encapsulates MemorySSA, including all data associated with memory accesses.
simple_ilist< MemoryAccess, ilist_tag< MSSAHelpers::DefsOnlyTag > > DefsList
LLVM_ABI void moveTo(MemoryUseOrDef *What, BasicBlock *BB, AccessList::iterator Where)
iplist< MemoryAccess, ilist_tag< MSSAHelpers::AllAccessTag > > AccessList
AccessList * getWritableBlockAccesses(const BasicBlock *BB) const
InsertionPlace
Used in various insertion functions to specify whether we are talking about the beginning or end of a...
DefsList * getWritableBlockDefs(const BasicBlock *BB) const
MemoryUseOrDef * getMemoryAccess(const Instruction *I) const
Given a memory Mod/Ref'ing instruction, get the MemorySSA access associated with it.
MemoryAccess * getLiveOnEntryDef() const
bool isLiveOnEntryDef(const MemoryAccess *MA) const
Return true if MA represents the live on entry value.
Class that has the common methods + fields of memory uses/defs.
MemoryAccess * getDefiningAccess() const
Get the access that produces the memory state used by this Use.
void setDefiningAccess(MemoryAccess *DMA, bool Optimized=false)
Represents read-only accesses to memory.
iterator end()
Get an iterator to the end of the SetVector.
void clear()
Completely clear the SetVector.
size_type count(const key_type &key) const
Count the number of elements of a given key in the SetVector.
bool empty() const
Determine if the SetVector is empty or not.
iterator begin()
Get an iterator to the beginning of the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool contains(const key_type &key) const
Check if the SetVector contains the given key.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
A SetVector that performs no allocations if smaller than a certain size.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Value handle that tracks a Value across RAUW.
A Use represents the edge between a Value definition and its users.
void dropAllReferences()
Drop all references to operands.
unsigned getNumOperands() const
static LLVM_ABI void ValueIsRAUWd(Value *Old, Value *New)
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM_ABI void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVM_ABI void replaceUsesWithIf(Value *New, llvm::function_ref< bool(Use &U)> ShouldReplace)
Go through the uses list for this definition and make each use point to "V" if the callback ShouldRep...
bool hasValueHandle() const
Return true if there is a value handle associated with this value.
An efficient, type-erasing, non-owning reference to a callable.
const ParentTy * getParent() const
bool empty() const
Check if the list is empty in constant time.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ BasicBlock
Various leaf nodes.
NodeAddr< PhiNode * > Phi
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
SmallDenseMap< MemoryPhi *, MemoryAccess * > PhiToDefMap
auto pred_end(const MachineBasicBlock *BB)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
auto successors(const MachineBasicBlock *BB)
constexpr from_range_t from_range
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
auto cast_or_null(const Y &Val)
auto pred_size(const MachineBasicBlock *BB)
detail::concat_range< ValueT, RangeTs... > concat(RangeTs &&...Ranges)
Returns a concatenated range across two or more ranges.
auto dyn_cast_or_null(const Y &Val)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
FunctionAddr VTableAddr Count
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
IDFCalculator< false > ForwardIDFCalculator
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
DWARFExpression::Operation Op
OutputIt copy(R &&Range, OutputIt Out)
ValueMap< const Value *, WeakTrackingVH > ValueToValueMapTy
auto pred_begin(const MachineBasicBlock *BB)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
auto predecessors(const MachineBasicBlock *BB)
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.