39#include "llvm/Config/llvm-config.h"
59#define DEBUG_TYPE "arm-cp-islands"
61#define ARM_CP_ISLANDS_OPT_NAME \
62 "ARM constant island placement and branch shortening pass"
64STATISTIC(NumSplit,
"Number of uncond branches inserted");
65STATISTIC(NumCBrFixed,
"Number of cond branches fixed");
66STATISTIC(NumUBrFixed,
"Number of uncond branches fixed");
67STATISTIC(NumTBs,
"Number of table branches generated");
68STATISTIC(NumT2CPShrunk,
"Number of Thumb2 constantpool instructions shrunk");
69STATISTIC(NumT2BrShrunk,
"Number of Thumb2 immediate branches shrunk");
71STATISTIC(NumJTMoved,
"Number of jump table destination blocks moved");
72STATISTIC(NumJTInserted,
"Number of jump table intermediate blocks inserted");
73STATISTIC(NumLEInserted,
"Number of LE backwards branches inserted");
77 cl::desc(
"Adjust basic block layout to better use TB[BH]"));
81 cl::desc(
"The max number of iteration for converge"));
85 cl::desc(
"Use compressed jump tables in Thumb-1 by synthesizing an "
86 "equivalent to the TBB/TBH instructions"));
102 std::unique_ptr<ARMBasicBlockUtils> BBUtils =
nullptr;
107 std::vector<MachineBasicBlock*> WaterList;
113 using water_iterator = std::vector<MachineBasicBlock *>::iterator;
134 bool KnownAlignment =
false;
137 bool neg,
bool soimm)
138 :
MI(mi), CPEMI(cpemi), MaxDisp(maxdisp), NegOk(
neg), IsSoImm(soimm) {
145 unsigned getMaxDisp()
const {
146 return (KnownAlignment ? MaxDisp : MaxDisp - 2) - 2;
152 std::vector<CPUser> CPUsers;
163 : CPEMI(cpemi), CPI(cpi), RefCount(
rc) {}
175 std::vector<std::vector<CPEntry>> CPEntries;
191 unsigned MaxDisp : 31;
196 ImmBranch(
MachineInstr *mi,
unsigned maxdisp,
bool cond,
unsigned ubr)
197 :
MI(mi), MaxDisp(maxdisp), isCond(cond), UncondBr(ubr) {}
201 std::vector<ImmBranch> ImmBranches;
218 bool isPositionIndependentOrROPI;
241 void doInitialConstPlacement(std::vector<MachineInstr *> &CPEMIs);
242 void doInitialJumpTablePlacement(std::vector<MachineInstr *> &CPEMIs);
244 CPEntry *findConstPoolEntry(
unsigned CPI,
const MachineInstr *CPEMI);
246 void scanFunctionJumpTables();
247 void initializeFunctionInfo(
const std::vector<MachineInstr*> &CPEMIs);
250 bool decrementCPEReferenceCount(
unsigned CPI,
MachineInstr* CPEMI);
252 int findInRangeCPEntry(CPUser& U,
unsigned UserOffset);
253 bool findAvailableWater(CPUser&U,
unsigned UserOffset,
254 water_iterator &WaterIter,
bool CloserWater);
255 void createNewWater(
unsigned CPUserIndex,
unsigned UserOffset,
257 bool handleConstantPoolUser(
unsigned CPUserIndex,
bool CloserWater);
259 bool removeUnusedCPEntries();
262 bool DoDump =
false);
264 CPUser &U,
unsigned &Growth);
265 bool fixupImmediateBr(ImmBranch &Br);
266 bool fixupConditionalBr(ImmBranch &Br);
267 bool fixupUnconditionalBr(ImmBranch &Br);
268 bool optimizeThumb2Instructions();
269 bool optimizeThumb2Branches();
270 bool reorderThumb2JumpTables();
272 unsigned &DeadSize,
bool &CanDeleteLEA,
274 bool optimizeThumb2JumpTables();
279 unsigned getUserOffset(CPUser&)
const;
283 bool isOffsetInRange(
unsigned UserOffset,
unsigned TrialOffset,
284 unsigned Disp,
bool NegativeOK,
bool IsSoImm =
false);
285 bool isOffsetInRange(
unsigned UserOffset,
unsigned TrialOffset,
287 return isOffsetInRange(UserOffset, TrialOffset,
288 U.getMaxDisp(),
U.NegOk,
U.IsSoImm);
294char ARMConstantIslands::ID = 0;
297void ARMConstantIslands::verify() {
302 return BBInfo[
LHS.getNumber()].postOffset() <
303 BBInfo[
RHS.getNumber()].postOffset();
305 LLVM_DEBUG(
dbgs() <<
"Verifying " << CPUsers.size() <<
" CP users.\n");
306 for (CPUser &U : CPUsers) {
307 unsigned UserOffset = getUserOffset(U);
310 if (isCPEntryInRange(
U.MI, UserOffset,
U.CPEMI,
U.getMaxDisp()+2,
U.NegOk,
323#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
328 for (
unsigned J = 0, E = BBInfo.
size(); J !=E; ++J) {
329 const BasicBlockInfo &BBI = BBInfo[J];
330 dbgs() << format(
"%08x %bb.%u\t", BBI.Offset, J)
331 <<
" kb=" << unsigned(BBI.KnownBits)
332 <<
" ua=" << unsigned(BBI.Unalign) <<
" pa=" << Log2(BBI.PostAlign)
333 << format(
" size=%#x\n", BBInfo[J].Size);
348 const Align Alignment = TLI->getPrefLoopAlignment();
352 bool Changed =
false;
353 bool PrevCanFallthough =
true;
354 for (
auto &
MBB : *MF) {
355 if (!PrevCanFallthough) {
366 if (
MI.getOpcode() == ARM::t2B &&
370 MI.getOpcode() == ARM::t2LoopEndDec) {
371 PrevCanFallthough =
true;
386 BBUtils = std::make_unique<ARMBasicBlockUtils>(mf);
389 << MCP->getConstants().size() <<
" CP entries, aligned to "
390 << MCP->getConstantPoolAlign().value() <<
" bytes *****\n");
393 TII = STI->getInstrInfo();
394 isPositionIndependentOrROPI =
395 STI->getTargetLowering()->isPositionIndependent() || STI->isROPI();
397 DT = &getAnalysis<MachineDominatorTreeWrapperPass>().getDomTree();
399 isThumb = AFI->isThumbFunction();
400 isThumb1 = AFI->isThumb1OnlyFunction();
401 isThumb2 = AFI->isThumb2Function();
406 if (STI->hardenSlsRetBr())
411 MF->RenumberBlocks();
416 bool MadeChange =
false;
418 scanFunctionJumpTables();
419 MadeChange |= reorderThumb2JumpTables();
421 T2JumpTables.clear();
423 MF->RenumberBlocks();
432 std::vector<MachineInstr*> CPEMIs;
434 doInitialConstPlacement(CPEMIs);
436 if (MF->getJumpTableInfo())
437 doInitialJumpTablePlacement(CPEMIs);
440 AFI->initPICLabelUId(CPEMIs.size());
445 initializeFunctionInfo(CPEMIs);
451 if (!T2JumpTables.empty())
452 MF->ensureAlignment(
Align(4));
455 MadeChange |= removeUnusedCPEntries();
459 unsigned NoCPIters = 0, NoBRIters = 0;
461 LLVM_DEBUG(
dbgs() <<
"Beginning CP iteration #" << NoCPIters <<
'\n');
462 bool CPChange =
false;
463 for (
unsigned i = 0, e = CPUsers.size(); i != e; ++i)
467 CPChange |= handleConstantPoolUser(i, NoCPIters >=
CPMaxIteration / 2);
474 NewWaterList.
clear();
476 LLVM_DEBUG(
dbgs() <<
"Beginning BR iteration #" << NoBRIters <<
'\n');
477 bool BRChange =
false;
478 for (
unsigned i = 0, e = ImmBranches.size(); i != e; ++i) {
480 BRChange |= fixupImmediateBr(ImmBranches[i]);
482 if (BRChange && ++NoBRIters > 30)
486 if (!CPChange && !BRChange)
492 if (isThumb2 && !STI->prefers32BitThumb())
493 MadeChange |= optimizeThumb2Instructions();
496 if (
isThumb && STI->hasV8MBaselineOps())
497 MadeChange |= optimizeThumb2Branches();
500 if (GenerateTBB && !STI->genExecuteOnly())
501 MadeChange |= optimizeThumb2JumpTables();
507 for (
unsigned i = 0, e = CPEntries.size(); i != e; ++i) {
508 for (
unsigned j = 0, je = CPEntries[i].
size();
j != je; ++
j) {
509 const CPEntry & CPE = CPEntries[i][
j];
510 if (CPE.CPEMI && CPE.CPEMI->getOperand(1).isCPI())
511 AFI->recordCPEClone(i, CPE.CPI);
521 JumpTableEntryIndices.clear();
522 JumpTableUserIndices.clear();
525 T2JumpTables.clear();
533ARMConstantIslands::doInitialConstPlacement(std::vector<MachineInstr*> &CPEMIs) {
539 const Align MaxAlign = MCP->getConstantPoolAlign();
540 const unsigned MaxLogAlign =
Log2(MaxAlign);
548 Align FuncAlign = MaxAlign;
550 FuncAlign =
Align(4);
551 MF->ensureAlignment(FuncAlign);
562 const std::vector<MachineConstantPoolEntry> &CPs = MCP->getConstants();
565 for (
unsigned i = 0, e = CPs.size(); i != e; ++i) {
566 unsigned Size = CPs[i].getSizeInBytes(TD);
567 Align Alignment = CPs[i].getAlign();
573 unsigned LogAlign =
Log2(Alignment);
578 CPEMIs.push_back(CPEMI);
582 for (
unsigned a = LogAlign + 1; a <= MaxLogAlign; ++a)
583 if (InsPoint[a] == InsAt)
587 CPEntries.emplace_back(1, CPEntry(CPEMI, i));
589 LLVM_DEBUG(
dbgs() <<
"Moved CPI#" << i <<
" to end of function, size = "
590 <<
Size <<
", align = " << Alignment.
value() <<
'\n');
600void ARMConstantIslands::doInitialJumpTablePlacement(
601 std::vector<MachineInstr *> &CPEMIs) {
602 unsigned i = CPEntries.size();
603 auto MJTI = MF->getJumpTableInfo();
604 const std::vector<MachineJumpTableEntry> &
JT = MJTI->getJumpTables();
623 switch (
MI->getOpcode()) {
629 case ARM::BR_JTm_i12:
635 "Branch protection must not be enabled for Arm or Thumb1 modes");
636 JTOpcode = ARM::JUMPTABLE_ADDRS;
639 JTOpcode = ARM::JUMPTABLE_INSTS;
643 JTOpcode = ARM::JUMPTABLE_TBB;
647 JTOpcode = ARM::JUMPTABLE_TBH;
651 unsigned NumOps =
MI->getDesc().getNumOperands();
653 MI->getOperand(NumOps - (
MI->isPredicable() ? 2 : 1));
663 CPEMIs.push_back(CPEMI);
664 CPEntries.emplace_back(1, CPEntry(CPEMI, JTI));
665 JumpTableEntryIndices.insert(std::make_pair(JTI, CPEntries.size() - 1));
666 if (!LastCorrectlyNumberedBB)
667 LastCorrectlyNumberedBB = &
MBB;
671 if (LastCorrectlyNumberedBB) {
672 MF->RenumberBlocks(LastCorrectlyNumberedBB);
695 return TooDifficult || FBB ==
nullptr;
700ARMConstantIslands::CPEntry *
701ARMConstantIslands::findConstPoolEntry(
unsigned CPI,
703 std::vector<CPEntry> &CPEs = CPEntries[CPI];
706 for (CPEntry &CPE : CPEs)
707 if (CPE.CPEMI == CPEMI)
716 case ARM::CONSTPOOL_ENTRY:
718 case ARM::JUMPTABLE_TBB:
720 case ARM::JUMPTABLE_TBH:
722 case ARM::JUMPTABLE_INSTS:
724 case ARM::JUMPTABLE_ADDRS:
730 unsigned CPI = getCombinedIndex(CPEMI);
731 assert(CPI < MCP->getConstants().
size() &&
"Invalid constant pool index.");
732 return MCP->getConstants()[CPI].getAlign();
738void ARMConstantIslands::scanFunctionJumpTables() {
742 (
I.getOpcode() == ARM::t2BR_JT ||
I.getOpcode() == ARM::tBR_JTr))
743 T2JumpTables.push_back(&
I);
750void ARMConstantIslands::
751initializeFunctionInfo(
const std::vector<MachineInstr*> &CPEMIs) {
753 BBUtils->computeAllBlockSizes();
757 BBInfo.
front().KnownBits =
Log2(MF->getAlignment());
760 BBUtils->adjustBBOffsetsAfter(&MF->front());
764 bool InlineJumpTables =
772 WaterList.push_back(&
MBB);
775 if (
I.isDebugInstr())
778 unsigned Opc =
I.getOpcode();
789 if (InlineJumpTables)
790 T2JumpTables.push_back(&
I);
823 unsigned MaxOffs = ((1 << (
Bits-1))-1) * Scale;
824 ImmBranches.push_back(ImmBranch(&
I, MaxOffs, isCond, UOpc));
827 if (
Opc == ARM::tPUSH ||
Opc == ARM::tPOP_RET)
828 PushPopMIs.push_back(&
I);
830 if (
Opc == ARM::CONSTPOOL_ENTRY ||
Opc == ARM::JUMPTABLE_ADDRS ||
831 Opc == ARM::JUMPTABLE_INSTS ||
Opc == ARM::JUMPTABLE_TBB ||
832 Opc == ARM::JUMPTABLE_TBH)
836 for (
unsigned op = 0, e =
I.getNumOperands();
op !=
e; ++
op)
837 if (
I.getOperand(
op).isCPI() ||
838 (
I.getOperand(
op).isJTI() && InlineJumpTables)) {
846 bool IsSoImm =
false;
854 case ARM::LEApcrelJT: {
862 unsigned CPI =
I.getOperand(
op).getIndex();
863 assert(CPI < CPEMIs.size());
865 const Align CPEAlign = getCPEAlign(CPEMI);
866 const unsigned LogCPEAlign =
Log2(CPEAlign);
867 if (LogCPEAlign >= 2)
875 case ARM::t2LEApcrel:
876 case ARM::t2LEApcrelJT:
881 case ARM::tLEApcrelJT:
891 case ARM::t2LDRSHpci:
893 case ARM::t2LDRSBpci:
917 unsigned CPI =
I.getOperand(
op).getIndex();
918 if (
I.getOperand(
op).isJTI()) {
919 JumpTableUserIndices.insert(std::make_pair(CPI, CPUsers.size()));
920 CPI = JumpTableEntryIndices[CPI];
924 unsigned MaxOffs = ((1 <<
Bits)-1) * Scale;
925 CPUsers.push_back(CPUser(&
I, CPEMI, MaxOffs, NegOk, IsSoImm));
928 CPEntry *CPE = findConstPoolEntry(CPI, CPEMI);
929 assert(CPE &&
"Cannot find a corresponding CPEntry!");
944 return LHS->getNumber() <
RHS->getNumber();
962 WaterList.insert(IP, NewBB);
972 LivePhysRegs LRs(*MF->getSubtarget().getRegisterInfo());
973 LRs.addLiveOuts(*OrigBB);
976 LRs.stepBackward(LiveMI);
982 MF->insert(
MBBI, NewBB);
991 unsigned Opc =
isThumb ? (isThumb2 ? ARM::t2B : ARM::tB) : ARM::B;
1009 if (!
MRI.isReserved(L))
1015 MF->RenumberBlocks(NewBB);
1028 if (WaterBB == OrigBB)
1029 WaterList.
insert(std::next(IP), NewBB);
1031 WaterList.insert(IP, OrigBB);
1032 NewWaterList.
insert(OrigBB);
1039 BBUtils->computeBlockSize(OrigBB);
1043 BBUtils->computeBlockSize(NewBB);
1046 BBUtils->adjustBBOffsetsAfter(OrigBB);
1054unsigned ARMConstantIslands::getUserOffset(CPUser &U)
const {
1055 unsigned UserOffset = BBUtils->getOffsetOf(
U.MI);
1062 UserOffset += (
isThumb ? 4 : 8);
1083bool ARMConstantIslands::isOffsetInRange(
unsigned UserOffset,
1084 unsigned TrialOffset,
unsigned MaxDisp,
1085 bool NegativeOK,
bool IsSoImm) {
1086 if (UserOffset <= TrialOffset) {
1088 if (TrialOffset - UserOffset <= MaxDisp)
1091 }
else if (NegativeOK) {
1092 if (UserOffset - TrialOffset <= MaxDisp)
1103bool ARMConstantIslands::isWaterInRange(
unsigned UserOffset,
1107 const Align CPEAlign = getCPEAlign(
U.CPEMI);
1108 const unsigned CPEOffset = BBInfo[Water->
getNumber()].postOffset(CPEAlign);
1109 unsigned NextBlockOffset;
1110 Align NextBlockAlignment;
1112 if (++NextBlock == MF->end()) {
1113 NextBlockOffset = BBInfo[Water->
getNumber()].postOffset();
1115 NextBlockOffset = BBInfo[NextBlock->getNumber()].Offset;
1116 NextBlockAlignment = NextBlock->getAlignment();
1118 unsigned Size =
U.CPEMI->getOperand(2).getImm();
1119 unsigned CPEEnd = CPEOffset +
Size;
1124 if (CPEEnd > NextBlockOffset) {
1125 Growth = CPEEnd - NextBlockOffset;
1133 if (CPEOffset < UserOffset)
1139 return isOffsetInRange(UserOffset, CPEOffset, U);
1144bool ARMConstantIslands::isCPEntryInRange(
MachineInstr *
MI,
unsigned UserOffset,
1146 bool NegOk,
bool DoDump) {
1147 unsigned CPEOffset = BBUtils->getOffsetOf(CPEMI);
1152 unsigned Block =
MI->getParent()->getNumber();
1155 <<
" max delta=" << MaxDisp
1156 <<
format(
" insn address=%#x", UserOffset) <<
" in "
1159 <<
format(
"CPE address=%#x offset=%+d: ", CPEOffset,
1160 int(CPEOffset - UserOffset));
1164 return isOffsetInRange(UserOffset, CPEOffset, MaxDisp, NegOk);
1188bool ARMConstantIslands::decrementCPEReferenceCount(
unsigned CPI,
1191 CPEntry *CPE = findConstPoolEntry(CPI, CPEMI);
1192 assert(CPE &&
"Unexpected!");
1193 if (--CPE->RefCount == 0) {
1194 removeDeadCPEMI(CPEMI);
1195 CPE->CPEMI =
nullptr;
1202unsigned ARMConstantIslands::getCombinedIndex(
const MachineInstr *CPEMI) {
1215int ARMConstantIslands::findInRangeCPEntry(CPUser& U,
unsigned UserOffset) {
1220 if (isCPEntryInRange(UserMI, UserOffset, CPEMI,
U.getMaxDisp(),
U.NegOk,
1227 unsigned CPI = getCombinedIndex(CPEMI);
1228 std::vector<CPEntry> &CPEs = CPEntries[CPI];
1229 for (CPEntry &CPE : CPEs) {
1231 if (CPE.CPEMI == CPEMI)
1234 if (CPE.CPEMI ==
nullptr)
1236 if (isCPEntryInRange(UserMI, UserOffset, CPE.CPEMI,
U.getMaxDisp(),
1238 LLVM_DEBUG(
dbgs() <<
"Replacing CPE#" << CPI <<
" with CPE#" << CPE.CPI
1241 U.CPEMI = CPE.CPEMI;
1245 MO.setIndex(CPE.CPI);
1252 return decrementCPEReferenceCount(CPI, CPEMI) ? 2 : 1;
1263 return ((1<<10)-1)*2;
1265 return ((1<<23)-1)*2;
1270 return ((1<<23)-1)*4;
1281bool ARMConstantIslands::findAvailableWater(CPUser &U,
unsigned UserOffset,
1282 water_iterator &WaterIter,
1284 if (WaterList.empty())
1287 unsigned BestGrowth = ~0
u;
1300 const Align CPEAlign = getCPEAlign(
U.CPEMI);
1301 unsigned MinNoSplitDisp = BBInfo[UserBB->
getNumber()].postOffset(CPEAlign);
1302 if (CloserWater && MinNoSplitDisp >
U.getMaxDisp() / 2)
1304 for (water_iterator IP = std::prev(WaterList.end()),
B = WaterList.begin();;
1318 if (isWaterInRange(UserOffset, WaterBB, U, Growth) &&
1319 (WaterBB->
getNumber() <
U.HighWaterMark->getNumber() ||
1320 NewWaterList.
count(WaterBB) || WaterBB ==
U.MI->getParent()) &&
1321 Growth < BestGrowth) {
1323 BestGrowth = Growth;
1326 <<
" Growth=" << Growth <<
'\n');
1328 if (CloserWater && WaterBB ==
U.MI->getParent())
1332 if (!CloserWater && BestGrowth == 0)
1338 return BestGrowth != ~0
u;
1348void ARMConstantIslands::createNewWater(
unsigned CPUserIndex,
1349 unsigned UserOffset,
1351 CPUser &
U = CPUsers[CPUserIndex];
1354 const Align CPEAlign = getCPEAlign(CPEMI);
1365 unsigned Delta = isThumb1 ? 2 : 4;
1367 unsigned CPEOffset = UserBBI.
postOffset(CPEAlign) + Delta;
1369 if (isOffsetInRange(UserOffset, CPEOffset, U)) {
1371 <<
format(
", expected CPE offset %#x\n", CPEOffset));
1378 int UncondBr =
isThumb ? ((isThumb2) ? ARM::t2B : ARM::tB) :
ARM::
B;
1386 ImmBranches.push_back(ImmBranch(&UserMBB->
back(),
1387 MaxDisp,
false, UncondBr));
1388 BBUtils->computeBlockSize(UserMBB);
1389 BBUtils->adjustBBOffsetsAfter(UserMBB);
1410 assert(
Align >= CPEAlign &&
"Over-aligned constant pool entry");
1413 unsigned BaseInsertOffset = UserOffset +
U.getMaxDisp() - UPad;
1420 BaseInsertOffset -= 4;
1424 <<
" up=" << UPad <<
'\n');
1430 if (BaseInsertOffset + 8 >= UserBBI.
postOffset()) {
1436 UserOffset +
TII->getInstSizeInBytes(*UserMI) + 1);
1451 for (
unsigned Offset = UserOffset +
TII->getInstSizeInBytes(*UserMI);
1452 I->getOpcode() != ARM::t2IT &&
1454 Offset +=
TII->getInstSizeInBytes(*
I),
I = std::next(
I)) {
1456 std::max(BaseInsertOffset,
Offset +
TII->getInstSizeInBytes(*
I) + 1);
1457 assert(
I != UserMBB->
end() &&
"Fell off end of block");
1461 unsigned EndInsertOffset = BaseInsertOffset + 4 + UPad +
1465 unsigned CPUIndex = CPUserIndex+1;
1466 unsigned NumCPUsers = CPUsers.size();
1468 for (
unsigned Offset = UserOffset +
TII->getInstSizeInBytes(*UserMI);
1469 Offset < BaseInsertOffset;
1471 assert(
MI != UserMBB->
end() &&
"Fell off end of block");
1472 if (CPUIndex < NumCPUsers && CPUsers[CPUIndex].
MI == &*
MI) {
1473 CPUser &
U = CPUsers[CPUIndex];
1474 if (!isOffsetInRange(
Offset, EndInsertOffset, U)) {
1483 EndInsertOffset +=
U.CPEMI->getOperand(2).getImm();
1488 if (
MI->getOpcode() == ARM::t2IT)
1509 if (STI->isTargetWindows() &&
isThumb &&
MI->getOpcode() == ARM::t2MOVTi16 &&
1513 assert(
MI->getOpcode() == ARM::t2MOVi16 &&
1523 NewMBB = splitBlockBeforeInstr(&*
MI);
1530bool ARMConstantIslands::handleConstantPoolUser(
unsigned CPUserIndex,
1532 CPUser &
U = CPUsers[CPUserIndex];
1535 unsigned CPI = getCombinedIndex(CPEMI);
1538 unsigned UserOffset = getUserOffset(U);
1542 int result = findInRangeCPEntry(U, UserOffset);
1543 if (result==1)
return false;
1544 else if (result==2)
return true;
1548 unsigned ID = AFI->createPICLabelUId();
1554 if (findAvailableWater(U, UserOffset, IP, CloserWater)) {
1561 if (NewWaterList.
erase(WaterBB))
1562 NewWaterList.
insert(NewIsland);
1569 createNewWater(CPUserIndex, UserOffset, NewMBB);
1577 IP =
find(WaterList, WaterBB);
1578 if (IP != WaterList.end())
1579 NewWaterList.
erase(WaterBB);
1582 NewWaterList.
insert(NewIsland);
1595 if (IP != WaterList.end())
1596 WaterList.erase(IP);
1602 updateForInsertedWaterBlock(NewIsland);
1606 U.HighWaterMark = NewIsland;
1611 CPEntries[CPI].push_back(CPEntry(
U.CPEMI,
ID, 1));
1615 decrementCPEReferenceCount(CPI, CPEMI);
1621 BBUtils->adjustBBSize(NewIsland,
Size);
1622 BBUtils->adjustBBOffsetsAfter(&*--NewIsland->
getIterator());
1632 dbgs() <<
" Moved CPE to #" <<
ID <<
" CPI=" << CPI
1633 <<
format(
" offset=%#x\n",
1634 BBUtils->getBBInfo()[NewIsland->
getNumber()].Offset));
1641void ARMConstantIslands::removeDeadCPEMI(
MachineInstr *CPEMI) {
1646 BBUtils->adjustBBSize(CPEBB, -
Size);
1648 if (CPEBB->
empty()) {
1658 BBUtils->adjustBBOffsetsAfter(CPEBB);
1668bool ARMConstantIslands::removeUnusedCPEntries() {
1669 unsigned MadeChange =
false;
1670 for (std::vector<CPEntry> &CPEs : CPEntries) {
1671 for (CPEntry &CPE : CPEs) {
1672 if (CPE.RefCount == 0 && CPE.CPEMI) {
1673 removeDeadCPEMI(CPE.CPEMI);
1674 CPE.CPEMI =
nullptr;
1685bool ARMConstantIslands::fixupImmediateBr(ImmBranch &Br) {
1690 if (BBUtils->isBBInRange(
MI, DestBB, Br.MaxDisp))
1694 return fixupUnconditionalBr(Br);
1695 return fixupConditionalBr(Br);
1703ARMConstantIslands::fixupUnconditionalBr(ImmBranch &Br) {
1709 if (!AFI->isLRSpilled())
1713 Br.MaxDisp = (1 << 21) * 2;
1714 MI->setDesc(
TII->get(ARM::tBfar));
1717 BBUtils->adjustBBOffsetsAfter(
MBB);
1729ARMConstantIslands::fixupConditionalBr(ImmBranch &Br) {
1763 if (BBUtils->isBBInRange(
MI, NewDest, Br.MaxDisp)) {
1765 dbgs() <<
" Invert Bcc condition and swap its destination with "
1768 MI->getOperand(0).setMBB(NewDest);
1769 MI->getOperand(1).setImm(CC);
1776 splitBlockBeforeInstr(
MI);
1779 int delta =
TII->getInstSizeInBytes(
MBB->
back());
1780 BBUtils->adjustBBSize(
MBB, -delta);
1793 <<
" also invert condition and change dest. to "
1801 BBUtils->adjustBBSize(
MBB,
TII->getInstSizeInBytes(
MBB->
back()));
1808 BBUtils->adjustBBSize(
MBB,
TII->getInstSizeInBytes(
MBB->
back()));
1810 ImmBranches.push_back(ImmBranch(&
MBB->
back(), MaxDisp,
false, Br.UncondBr));
1813 BBUtils->adjustBBSize(
MI->getParent(), -
TII->getInstSizeInBytes(*
MI));
1814 MI->eraseFromParent();
1815 BBUtils->adjustBBOffsetsAfter(
MBB);
1819bool ARMConstantIslands::optimizeThumb2Instructions() {
1820 bool MadeChange =
false;
1823 for (CPUser &U : CPUsers) {
1824 unsigned Opcode =
U.MI->getOpcode();
1825 unsigned NewOpc = 0;
1830 case ARM::t2LEApcrel:
1832 NewOpc = ARM::tLEApcrel;
1839 NewOpc = ARM::tLDRpci;
1849 unsigned UserOffset = getUserOffset(U);
1850 unsigned MaxOffs = ((1 <<
Bits) - 1) * Scale;
1853 if (!
U.KnownAlignment)
1857 if (isCPEntryInRange(
U.MI, UserOffset,
U.CPEMI, MaxOffs,
false,
true)) {
1859 U.MI->setDesc(
TII->get(NewOpc));
1861 BBUtils->adjustBBSize(
MBB, -2);
1862 BBUtils->adjustBBOffsetsAfter(
MBB);
1872bool ARMConstantIslands::optimizeThumb2Branches() {
1874 auto TryShrinkBranch = [
this](ImmBranch &Br) {
1875 unsigned Opcode = Br.MI->getOpcode();
1876 unsigned NewOpc = 0;
1893 unsigned MaxOffs = ((1 << (
Bits-1))-1) * Scale;
1895 if (BBUtils->isBBInRange(Br.MI, DestBB, MaxOffs)) {
1897 Br.MI->setDesc(
TII->get(NewOpc));
1899 BBUtils->adjustBBSize(
MBB, -2);
1900 BBUtils->adjustBBOffsetsAfter(
MBB);
1910 unsigned NewOpc = 0;
1913 auto FindCmpForCBZ = [
this](ImmBranch &Br, ImmCompare &ImmCmp,
1915 ImmCmp.MI =
nullptr;
1920 if (!Br.MI->killsRegister(ARM::CPSR,
nullptr))
1924 unsigned NewOpc = 0;
1929 NewOpc = ARM::tCBNZ;
1935 unsigned BrOffset = BBUtils->getOffsetOf(Br.MI) + 4 - 2;
1937 unsigned DestOffset = BBInfo[DestBB->
getNumber()].Offset;
1938 if (BrOffset >= DestOffset || (DestOffset - BrOffset) > 126)
1942 auto *
TRI = STI->getRegisterInfo();
1944 if (!CmpMI || CmpMI->
getOpcode() != ARM::tCMPi8)
1948 ImmCmp.NewOpc = NewOpc;
1952 auto TryConvertToLE = [
this](ImmBranch &Br, ImmCompare &
Cmp) {
1953 if (Br.MI->getOpcode() != ARM::t2Bcc || !STI->hasLOB() ||
1959 if (BBUtils->getOffsetOf(
MBB) < BBUtils->getOffsetOf(DestBB) ||
1960 !BBUtils->isBBInRange(Br.MI, DestBB, 4094))
1968 Cmp.NewOpc =
Cmp.NewOpc == ARM::tCBZ ? ARM::tCBNZ : ARM::tCBZ;
1971 TII->get(ARM::t2LE));
1973 MIB.
add(Br.MI->getOperand(0));
1980 bool MadeChange =
false;
1987 for (ImmBranch &Br :
reverse(ImmBranches)) {
1995 if (FindCmpForCBZ(Br, Cmp, ExitBB) && TryConvertToLE(Br, Cmp)) {
1999 FindCmpForCBZ(Br, Cmp, DestBB);
2000 MadeChange |= TryShrinkBranch(Br);
2003 unsigned Opcode = Br.MI->getOpcode();
2004 if ((Opcode != ARM::tBcc && Opcode != ARM::t2LE) || !
Cmp.NewOpc)
2011 auto *
TRI = STI->getRegisterInfo();
2013 bool RegKilled =
false;
2016 if (KillMI->killsRegister(Reg,
TRI)) {
2017 KillMI->clearRegisterKills(Reg,
TRI);
2021 }
while (KillMI !=
Cmp.MI);
2029 .
addMBB(DestBB, Br.MI->getOperand(0).getTargetFlags());
2031 Cmp.MI->eraseFromParent();
2033 if (Br.MI->getOpcode() == ARM::tBcc) {
2034 Br.MI->eraseFromParent();
2036 BBUtils->adjustBBSize(
MBB, -2);
2047 BBUtils->adjustBBOffsetsAfter(
MBB);
2057 if (
I.getOpcode() != ARM::t2ADDrs)
2060 if (
I.getOperand(0).getReg() != EntryReg)
2063 if (
I.getOperand(1).getReg() != BaseReg)
2076bool ARMConstantIslands::preserveBaseRegister(
MachineInstr *JumpMI,
2080 bool &BaseRegKill) {
2103 CanDeleteLEA =
true;
2104 BaseRegKill =
false;
2107 for (++
I; &*
I != JumpMI; ++
I) {
2114 if (!MO.isReg() || !MO.getReg())
2116 if (MO.isDef() && MO.getReg() == BaseReg)
2118 if (MO.isUse() && MO.getReg() == BaseReg) {
2119 BaseRegKill = BaseRegKill || MO.isKill();
2120 CanDeleteLEA =
false;
2130 for (++
I; &*
I != JumpMI; ++
I) {
2132 if (!MO.isReg() || !MO.getReg())
2134 if (MO.isDef() && MO.getReg() == BaseReg)
2136 if (MO.isUse() && MO.getReg() == EntryReg)
2137 RemovableAdd =
nullptr;
2143 DeadSize += isThumb2 ? 4 : 2;
2144 }
else if (BaseReg == EntryReg) {
2170 unsigned &DeadSize) {
2179 for (++
I; &*
I != JumpMI; ++
I) {
2180 if (
I->getOpcode() == ARM::t2ADDrs &&
I->getOperand(0).getReg() == EntryReg)
2189 for (++J; &*J != JumpMI; ++J) {
2191 if (!MO.isReg() || !MO.getReg())
2193 if (MO.isDef() && MO.getReg() == EntryReg)
2195 if (MO.isUse() && MO.getReg() == EntryReg)
2207bool ARMConstantIslands::optimizeThumb2JumpTables() {
2208 bool MadeChange =
false;
2213 if (!MJTI)
return false;
2219 unsigned JTOpIdx = NumOps - (
MI->isPredicable() ? 2 : 1);
2221 unsigned JTI = JTOP.getIndex();
2225 bool HalfWordOk =
true;
2226 unsigned JTOffset = BBUtils->getOffsetOf(
MI) + 4;
2227 const std::vector<MachineBasicBlock*> &JTBBs =
JT[JTI].MBBs;
2233 if (ByteOk && (DstOffset - JTOffset) > ((1<<8)-1)*2)
2235 unsigned TBHLimit = ((1<<16)-1)*2;
2236 if (HalfWordOk && (DstOffset - JTOffset) > TBHLimit)
2238 if (!ByteOk && !HalfWordOk)
2242 if (!ByteOk && !HalfWordOk)
2245 CPUser &
User = CPUsers[JumpTableUserIndices[JTI]];
2247 if (!
MI->getOperand(0).isKill())
2250 unsigned DeadSize = 0;
2251 bool CanDeleteLEA =
false;
2252 bool BaseRegKill =
false;
2254 unsigned IdxReg = ~0
U;
2255 bool IdxRegKill =
true;
2257 IdxReg =
MI->getOperand(1).getReg();
2258 IdxRegKill =
MI->getOperand(1).isKill();
2260 bool PreservedBaseReg =
2261 preserveBaseRegister(
MI,
User.MI, DeadSize, CanDeleteLEA, BaseRegKill);
2273 if (Shift == UserMBB->
begin())
2277 if (Shift->getOpcode() != ARM::tLSLri ||
2278 Shift->getOperand(3).getImm() != 2 ||
2279 !Shift->getOperand(2).isKill())
2281 IdxReg = Shift->getOperand(2).getReg();
2282 Register ShiftedIdxReg = Shift->getOperand(0).getReg();
2291 if (
Load->getOpcode() != ARM::tLDRr)
2293 if (
Load->getOperand(1).getReg() != BaseReg ||
2294 Load->getOperand(2).getReg() != ShiftedIdxReg ||
2295 !
Load->getOperand(2).isKill())
2299 auto *
TRI = STI->getRegisterInfo();
2309 if (isPositionIndependentOrROPI) {
2311 if (
Add->getOpcode() != ARM::tADDrr ||
2312 Add->getOperand(2).getReg() != BaseReg ||
2313 Add->getOperand(3).getReg() !=
Load->getOperand(0).getReg() ||
2314 !
Add->getOperand(3).isKill())
2316 if (
Add->getOperand(0).getReg() !=
MI->getOperand(0).getReg())
2321 Add->eraseFromParent();
2324 if (
Load->getOperand(0).getReg() !=
MI->getOperand(0).getReg())
2332 CanDeleteLEA =
true;
2333 Shift->eraseFromParent();
2334 Load->eraseFromParent();
2340 unsigned Opc = ByteOk ? ARM::t2TBB_JT : ARM::t2TBH_JT;
2342 Opc = ByteOk ? ARM::tTBB_JT : ARM::tTBH_JT;
2354 unsigned JTOpc = ByteOk ? ARM::JUMPTABLE_TBB : ARM::JUMPTABLE_TBH;
2365 User.MI->eraseFromParent();
2366 DeadSize += isThumb2 ? 4 : 2;
2373 User.IsSoImm =
false;
2374 User.KnownAlignment =
false;
2378 int CPEntryIdx = JumpTableEntryIndices[JTI];
2379 auto &CPEs = CPEntries[CPEntryIdx];
2381 find_if(CPEs, [&](CPEntry &E) {
return E.CPEMI ==
User.CPEMI; });
2383 CPUsers.emplace_back(CPUser(NewJTMI,
User.CPEMI, 4,
false,
false));
2387 unsigned NewSize =
TII->getInstSizeInBytes(*NewJTMI);
2388 unsigned OrigSize =
TII->getInstSizeInBytes(*
MI);
2389 MI->eraseFromParent();
2391 int Delta = OrigSize - NewSize + DeadSize;
2393 BBUtils->adjustBBOffsetsAfter(
MBB);
2404bool ARMConstantIslands::reorderThumb2JumpTables() {
2405 bool MadeChange =
false;
2408 if (!MJTI)
return false;
2414 unsigned JTOpIdx = NumOps - (
MI->isPredicable() ? 2 : 1);
2416 unsigned JTI = JTOP.getIndex();
2422 int JTNumber =
MI->getParent()->getNumber();
2423 const std::vector<MachineBasicBlock*> &JTBBs =
JT[JTI].MBBs;
2427 if (DTNumber < JTNumber) {
2431 adjustJTTargetBlockForward(JTI,
MBB,
MI->getParent());
2461 if (!
B &&
Cond.empty() && BB != &MF->front() &&
2464 OldPrior->updateTerminator(BB);
2467 MF->RenumberBlocks();
2477 MF->insert(
MBBI, NewBB);
2496 MF->RenumberBlocks(NewBB);
2510 return new ARMConstantIslands();
unsigned const MachineRegisterInfo * MRI
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static bool isThumb(const MCSubtargetInfo &STI)
static cl::opt< unsigned > CPMaxIteration("arm-constant-island-max-iteration", cl::Hidden, cl::init(30), cl::desc("The max number of iteration for converge"))
static bool isSimpleIndexCalc(MachineInstr &I, unsigned EntryReg, unsigned BaseReg)
static bool jumpTableFollowsTB(MachineInstr *JTMI, MachineInstr *CPEMI)
Returns whether CPEMI is the first instruction in the block immediately following JTMI (assumed to be...
static bool CompareMBBNumbers(const MachineBasicBlock *LHS, const MachineBasicBlock *RHS)
CompareMBBNumbers - Little predicate function to sort the WaterList by MBB ID.
static unsigned getUnconditionalBrDisp(int Opc)
getUnconditionalBrDisp - Returns the maximum displacement that can fit in the specific unconditional ...
static void RemoveDeadAddBetweenLEAAndJT(MachineInstr *LEAMI, MachineInstr *JumpMI, unsigned &DeadSize)
static bool AlignBlocks(MachineFunction *MF, const ARMSubtarget *STI)
static cl::opt< bool > SynthesizeThumb1TBB("arm-synthesize-thumb-1-tbb", cl::Hidden, cl::init(true), cl::desc("Use compressed jump tables in Thumb-1 by synthesizing an " "equivalent to the TBB/TBH instructions"))
static cl::opt< bool > AdjustJumpTableBlocks("arm-adjust-jump-tables", cl::Hidden, cl::init(true), cl::desc("Adjust basic block layout to better use TB[BH]"))
#define ARM_CP_ISLANDS_OPT_NAME
static bool BBIsJumpedOver(MachineBasicBlock *MBB)
BBIsJumpedOver - Return true of the specified basic block's only predecessor unconditionally branches...
MachineBasicBlock MachineBasicBlock::iterator MBBI
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
#define LLVM_PREFERRED_TYPE(T)
\macro LLVM_PREFERRED_TYPE Adjust type of bit-field in debug info.
#define LLVM_DUMP_METHOD
Mark debug helper function definitions like dump() that should not be stripped from debug builds.
This file defines the DenseMap class.
const HexagonInstrInfo * TII
This file implements the LivePhysRegs utility for tracking liveness of physical registers.
This file declares the MachineConstantPool class which is an abstract constant pool to keep track of ...
Register const TargetRegisterInfo * TRI
static bool BBHasFallthrough(MachineBasicBlock *MBB)
BBHasFallthrough - Return true if the specified basic block can fallthrough into the block immediatel...
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
const SmallVectorImpl< MachineOperand > MachineBasicBlock * TBB
const SmallVectorImpl< MachineOperand > & Cond
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
ARMFunctionInfo - This class is derived from MachineFunctionInfo and contains private ARM-specific in...
bool branchTargetEnforcement() const
const ARMTargetLowering * getTargetLowering() const override
Represent the analysis usage information of a pass.
AnalysisUsage & addRequired()
A parsed version of the target data layout string in and methods for querying it.
std::enable_if_t< GraphHasNodeNumbers< T * >, void > updateBlockNumbers()
Update dominator tree after renumbering blocks.
LLVM_ABI bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
FunctionPass class - This class is used to implement most global optimizations.
bool hasOptSize() const
Optimize this function for size (-Os) or minimum size (-Oz).
bool analyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, MachineBasicBlock *&FBB, SmallVectorImpl< MachineOperand > &Cond, bool AllowModify) const override
Analyze the branching code at the end of MBB, returning true if it cannot be understood (e....
A set of physical registers with utility functions to track liveness when walking backward/forward th...
Describe properties that are true of each instruction in the target description file.
unsigned getNumOperands() const
Return the number of declared MachineOperands for this MachineInstruction.
unsigned getSize() const
Return the number of bytes in the encoding of this instruction, or zero if the encoding size cannot b...
unsigned pred_size() const
LLVM_ABI void replaceSuccessor(MachineBasicBlock *Old, MachineBasicBlock *New)
Replace successor OLD with NEW and update probability info.
LLVM_ABI MachineBasicBlock * getFallThrough(bool JumpToFallThrough=true)
Return the fallthrough block if the block can implicitly transfer control to the block after it by fa...
LLVM_ABI void transferSuccessors(MachineBasicBlock *FromMBB)
Transfers all the successors from MBB to this machine basic block (i.e., copies all the successors Fr...
LLVM_ABI instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
iterator_range< livein_iterator > liveins() const
int getNumber() const
MachineBasicBlocks are uniquely numbered at the function level, unless they're not in a MachineFuncti...
void push_back(MachineInstr *MI)
const BasicBlock * getBasicBlock() const
Return the LLVM basic block that this instance corresponded to originally.
LLVM_ABI void updateTerminator(MachineBasicBlock *PreviousLayoutSuccessor)
Update the terminator instructions in block to account for changes to block layout which may have bee...
LLVM_ABI bool canFallThrough()
Return true if the block can implicitly transfer control to the block after it by falling off the end...
succ_iterator succ_begin()
unsigned succ_size() const
void setAlignment(Align A)
Set alignment of the basic block.
LLVM_ABI void dump() const
LLVM_ABI void addSuccessor(MachineBasicBlock *Succ, BranchProbability Prob=BranchProbability::getUnknown())
Add Succ as a successor of this MachineBasicBlock.
pred_iterator pred_begin()
LLVM_ABI iterator getLastNonDebugInstr(bool SkipPseudoOp=true)
Returns an iterator to the last non-debug instruction in the basic block, or end().
void addLiveIn(MCRegister PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())
Adds the specified register as a live in.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
iterator_range< iterator > terminators()
reverse_iterator rbegin()
LLVM_ABI bool isSuccessor(const MachineBasicBlock *MBB) const
Return true if the specified MBB is a successor of this block.
void splice(iterator Where, MachineBasicBlock *Other, iterator From)
Take an instruction from MBB 'Other' at the position From, and insert it into this MBB right before '...
Align getAlignment() const
Return alignment of the basic block.
MachineInstrBundleIterator< MachineInstr > iterator
LLVM_ABI void moveAfter(MachineBasicBlock *NewBefore)
The MachineConstantPool class keeps track of constants referenced by a function which must be spilled...
Analysis pass which computes a MachineDominatorTree.
DominatorTree Class - Concrete subclass of DominatorTreeBase that is used to compute a normal dominat...
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
Function & getFunction()
Return the LLVM function that this machine code represents.
MachineConstantPool * getConstantPool()
getConstantPool - Return the constant pool object for the current function.
void RenumberBlocks(MachineBasicBlock *MBBFrom=nullptr)
RenumberBlocks - This discards all of the MachineBasicBlock numbers and recomputes them.
const TargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
BasicBlockListType::const_iterator const_iterator
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addConstantPoolIndex(unsigned Idx, int Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addJumpTableIndex(unsigned Idx, unsigned TargetFlags=0) const
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
reverse_iterator getReverse() const
Get a reverse iterator to the same node.
Representation of each machine instruction.
unsigned getOpcode() const
Returns the opcode of this MachineInstr.
const MachineBasicBlock * getParent() const
const MCInstrDesc & getDesc() const
Returns the target instruction descriptor of this MachineInstr.
LLVM_ABI void setDesc(const MCInstrDesc &TID)
Replace the instruction descriptor (thus opcode) of the current instruction with a new one.
LLVM_ABI void eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
const MachineOperand & getOperand(unsigned i) const
LLVM_ABI bool ReplaceMBBInJumpTable(unsigned Idx, MachineBasicBlock *Old, MachineBasicBlock *New)
ReplaceMBBInJumpTable - If Old is a target of the jump tables, update the jump table to branch to New...
@ EK_Inline
EK_Inline - Jump table entries are emitted inline at their point of use.
JTEntryKind getEntryKind() const
const std::vector< MachineJumpTableEntry > & getJumpTables() const
MachineOperand class - Representation of each machine instruction operand.
MachineBasicBlock * getMBB() const
bool isCPI() const
isCPI - Tests if this is a MO_ConstantPoolIndex operand.
LLVM_ABI void setReg(Register Reg)
Change the register this operand corresponds to.
void setIsKill(bool Val=true)
void setMBB(MachineBasicBlock *MBB)
Register getReg() const
getReg - Returns the register number.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
Wrapper class representing virtual and physical registers.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
CodeGenOptLevel getOptLevel() const
Returns the optimization level: None, Less, Default, or Aggressive.
Value * getOperand(unsigned i) const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
static CondCodes getOppositeCondition(CondCodes CC)
@ MO_OPTION_MASK
MO_OPTION_MASK - Most flags are mutually exclusive; this mask selects just that part of the flag set.
@ MO_LO16
MO_LO16 - On a symbol operand, this represents a relocation containing lower 16 bit of the address.
@ MO_HI16
MO_HI16 - On a symbol operand, this represents a relocation containing higher 16 bit of the address.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Reg
All possible values of the reg field in the ModR/M byte.
initializer< Ty > init(const Ty &Val)
BaseReg
Stack frame base register. Bit 0 of FREInfo.Info.
This is an optimization pass for GlobalISel generic memory operations.
MachineInstr * findCMPToFoldIntoCBZ(MachineInstr *Br, const TargetRegisterInfo *TRI)
Search backwards from a tBcc to find a tCMPi8 against 0, meaning we can convert them to a tCBZ or tCB...
auto find(R &&Range, const T &Val)
Provide wrappers to std::find which take ranges instead of having to pass begin/end explicitly.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
static bool isARMLowRegister(MCRegister Reg)
isARMLowRegister - Returns true if the register is a low register (r0-r7).
bool isAligned(Align Lhs, uint64_t SizeInBytes)
Checks that SizeInBytes is a multiple of the alignment.
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
bool registerDefinedBetween(unsigned Reg, MachineBasicBlock::iterator From, MachineBasicBlock::iterator To, const TargetRegisterInfo *TRI)
Return true if Reg is defd between From and To.
static std::array< MachineOperand, 2 > predOps(ARMCC::CondCodes Pred, unsigned PredReg=0)
Get the operands corresponding to the given Pred value.
ARMCC::CondCodes getITInstrPredicate(const MachineInstr &MI, Register &PredReg)
getITInstrPredicate - Valid only in Thumb2 mode.
auto reverse(ContainerTy &&C)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
static bool isLoopStart(const MachineInstr &MI)
bool is_sorted(R &&Range, Compare C)
Wrapper function around std::is_sorted to check if elements in a range R are sorted with respect to a...
format_object< Ts... > format(const char *Fmt, const Ts &... Vals)
These are helper functions used to produce formatted output.
uint64_t offsetToAlignment(uint64_t Value, Align Alignment)
Returns the offset to the next integer (mod 2**64) that is greater than or equal to Value and is a mu...
unsigned getRegState(const MachineOperand &RegOp)
Get all register state flags from machine operand RegOp.
auto lower_bound(R &&Range, T &&Value)
Provide wrappers to std::lower_bound which take ranges instead of having to pass begin/end explicitly...
unsigned getKillRegState(bool B)
ARMCC::CondCodes getInstrPredicate(const MachineInstr &MI, Register &PredReg)
getInstrPredicate - If instruction is predicated, returns its predicate condition,...
FunctionPass * createARMConstantIslandPass()
createARMConstantIslandPass - returns an instance of the constpool island pass.
auto find_if(R &&Range, UnaryPredicate P)
Provide wrappers to std::find_if which take ranges instead of having to pass begin/end explicitly.
unsigned UnknownPadding(Align Alignment, unsigned KnownBits)
UnknownPadding - Return the worst case padding that could result from unknown offset bits.
APFloat neg(APFloat X)
Returns the negated value of the argument.
unsigned Log2(Align A)
Returns the log2 of the alignment.
static bool isSpeculationBarrierEndBBOpcode(int Opc)
IterT prev_nodbg(IterT It, IterT Begin, bool SkipPseudoOp=true)
Decrement It, then continue decrementing it while it points to a debug instruction.
LLVM_ABI Printable printMBBReference(const MachineBasicBlock &MBB)
Prints a machine basic block reference.
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
BasicBlockInfo - Information about the offset and size of a single basic block.
unsigned internalKnownBits() const
Compute the number of known offset bits internally to this block.
unsigned postOffset(Align Alignment=Align(1)) const
Compute the offset immediately following this block.
unsigned Offset
Offset - Distance from the beginning of the function to the beginning of this basic block.
Pair of physical register and lane mask.