43#define AARCH64_EXPAND_PSEUDO_NAME "AArch64 pseudo instruction expansion pass"
67 unsigned ContiguousOpc,
unsigned StridedOpc);
78 unsigned LdarOp,
unsigned StlrOp,
unsigned CmpOp,
79 unsigned ExtendImm,
unsigned ZeroReg,
95 struct ConditionalBlocks {
113char AArch64ExpandPseudo::ID = 0;
125 assert(MO.isReg() && MO.getReg());
140 uint64_t RenamableState =
142 uint64_t
Imm =
MI.getOperand(1).getImm();
144 if (DstReg == AArch64::XZR || DstReg == AArch64::WZR) {
147 MI.eraseFromParent();
155 SmallVector<MachineInstrBuilder, 4> MIBS;
157 bool LastItem = std::next(
I) ==
E;
162 case AArch64::ORRWri:
163 case AArch64::ORRXri:
166 .
add(
MI.getOperand(0))
167 .
addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
171 bool DstIsDead =
MI.getOperand(0).isDead();
181 case AArch64::ORRWrs:
182 case AArch64::ORRXrs: {
184 bool DstIsDead =
MI.getOperand(0).isDead();
194 case AArch64::ANDXri:
195 case AArch64::EORXri:
198 .
add(
MI.getOperand(0))
199 .
addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
203 bool DstIsDead =
MI.getOperand(0).isDead();
213 case AArch64::MOVNWi:
214 case AArch64::MOVNXi:
215 case AArch64::MOVZWi:
216 case AArch64::MOVZXi: {
217 bool DstIsDead =
MI.getOperand(0).isDead();
225 case AArch64::MOVKWi:
226 case AArch64::MOVKXi: {
228 bool DstIsDead =
MI.getOperand(0).isDead();
241 MI.eraseFromParent();
245bool AArch64ExpandPseudo::expandCMP_SWAP(
247 unsigned StlrOp,
unsigned CmpOp,
unsigned ExtendImm,
unsigned ZeroReg,
251 const MachineOperand &Dest =
MI.getOperand(0);
252 Register StatusReg =
MI.getOperand(1).getReg();
253 bool StatusDead =
MI.getOperand(1).isDead();
256 assert(!
MI.getOperand(2).isUndef() &&
"cannot handle undef");
258 Register DesiredReg =
MI.getOperand(3).getReg();
267 MF->
insert(++LoadCmpBB->getIterator(), StoreBB);
268 MF->
insert(++StoreBB->getIterator(), DoneBB);
276 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::MOVZWi), StatusReg)
280 BuildMI(LoadCmpBB, MIMD,
TII->get(CmpOp), ZeroReg)
284 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::Bcc))
288 LoadCmpBB->addSuccessor(DoneBB);
289 LoadCmpBB->addSuccessor(StoreBB);
294 BuildMI(StoreBB, MIMD,
TII->get(StlrOp), StatusReg)
297 BuildMI(StoreBB, MIMD,
TII->get(AArch64::CBNZW))
300 StoreBB->addSuccessor(LoadCmpBB);
301 StoreBB->addSuccessor(DoneBB);
303 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
304 DoneBB->transferSuccessors(&
MBB);
309 MI.eraseFromParent();
312 LivePhysRegs LiveRegs;
317 StoreBB->clearLiveIns();
319 LoadCmpBB->clearLiveIns();
325bool AArch64ExpandPseudo::expandCMP_SWAP_128(
330 MachineOperand &DestLo =
MI.getOperand(0);
331 MachineOperand &DestHi =
MI.getOperand(1);
332 Register StatusReg =
MI.getOperand(2).getReg();
333 bool StatusDead =
MI.getOperand(2).isDead();
336 assert(!
MI.getOperand(3).isUndef() &&
"cannot handle undef");
338 Register DesiredLoReg =
MI.getOperand(4).getReg();
339 Register DesiredHiReg =
MI.getOperand(5).getReg();
340 Register NewLoReg =
MI.getOperand(6).getReg();
341 Register NewHiReg =
MI.getOperand(7).getReg();
343 unsigned LdxpOp, StxpOp;
345 switch (
MI.getOpcode()) {
346 case AArch64::CMP_SWAP_128_MONOTONIC:
347 LdxpOp = AArch64::LDXPX;
348 StxpOp = AArch64::STXPX;
350 case AArch64::CMP_SWAP_128_RELEASE:
351 LdxpOp = AArch64::LDXPX;
352 StxpOp = AArch64::STLXPX;
354 case AArch64::CMP_SWAP_128_ACQUIRE:
355 LdxpOp = AArch64::LDAXPX;
356 StxpOp = AArch64::STXPX;
358 case AArch64::CMP_SWAP_128:
359 LdxpOp = AArch64::LDAXPX;
360 StxpOp = AArch64::STLXPX;
373 MF->
insert(++LoadCmpBB->getIterator(), StoreBB);
374 MF->
insert(++StoreBB->getIterator(), FailBB);
375 MF->
insert(++FailBB->getIterator(), DoneBB);
386 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::SUBSXrs), AArch64::XZR)
390 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CSINCWr), StatusReg)
394 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::SUBSXrs), AArch64::XZR)
398 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CSINCWr), StatusReg)
402 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CBNZW))
405 LoadCmpBB->addSuccessor(FailBB);
406 LoadCmpBB->addSuccessor(StoreBB);
411 BuildMI(StoreBB, MIMD,
TII->get(StxpOp), StatusReg)
415 BuildMI(StoreBB, MIMD,
TII->get(AArch64::CBNZW))
419 StoreBB->addSuccessor(LoadCmpBB);
420 StoreBB->addSuccessor(DoneBB);
425 BuildMI(FailBB, MIMD,
TII->get(StxpOp), StatusReg)
429 BuildMI(FailBB, MIMD,
TII->get(AArch64::CBNZW))
432 FailBB->addSuccessor(LoadCmpBB);
433 FailBB->addSuccessor(DoneBB);
435 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
436 DoneBB->transferSuccessors(&
MBB);
441 MI.eraseFromParent();
444 LivePhysRegs LiveRegs;
451 FailBB->clearLiveIns();
453 StoreBB->clearLiveIns();
455 LoadCmpBB->clearLiveIns();
499bool AArch64ExpandPseudo::expand_DestructiveOp(
501 MachineBasicBlock &
MBB,
508 bool DstIsDead =
MI.getOperand(0).isDead();
510 unsigned PredIdx, DOPIdx, SrcIdx, Src2Idx;
515 if (DstReg ==
MI.getOperand(3).getReg()) {
517 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(1, 3, 2);
524 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(1, 2, 3);
527 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(2, 3, 3);
530 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 2, 3, 4);
531 if (DstReg ==
MI.getOperand(3).getReg()) {
533 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 3, 4, 2);
535 }
else if (DstReg ==
MI.getOperand(4).getReg()) {
537 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 4, 3, 2);
544 std::tie(DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 1, 2);
553 bool DOPRegIsUnique =
false;
556 DOPRegIsUnique = DstReg !=
MI.getOperand(SrcIdx).getReg();
561 DstReg !=
MI.getOperand(DOPIdx).getReg() ||
562 MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(SrcIdx).getReg();
567 DOPRegIsUnique =
true;
571 DstReg !=
MI.getOperand(DOPIdx).getReg() ||
572 (
MI.getOperand(DOPIdx).
getReg() !=
MI.getOperand(SrcIdx).getReg() &&
573 MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(Src2Idx).getReg());
589 uint64_t ElementSize =
TII->getElementSizeForOpcode(Opcode);
590 unsigned MovPrfx, LSLZero, MovPrfxZero;
591 switch (ElementSize) {
594 MovPrfx = AArch64::MOVPRFX_ZZ;
595 LSLZero = AArch64::LSL_ZPmI_B;
596 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_B;
599 MovPrfx = AArch64::MOVPRFX_ZZ;
600 LSLZero = AArch64::LSL_ZPmI_H;
601 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_H;
604 MovPrfx = AArch64::MOVPRFX_ZZ;
605 LSLZero = AArch64::LSL_ZPmI_S;
606 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_S;
609 MovPrfx = AArch64::MOVPRFX_ZZ;
610 LSLZero = AArch64::LSL_ZPmI_D;
611 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_D;
623 MachineInstrBuilder PRFX, DOP;
630 "The destructive operand should be unique");
632 "This instruction is unpredicated");
637 .
addReg(
MI.getOperand(PredIdx).getReg())
638 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState);
653 .
add(
MI.getOperand(PredIdx))
657 }
else if (DstReg !=
MI.getOperand(DOPIdx).getReg()) {
658 assert(DOPRegIsUnique &&
"The destructive operand should be unique");
661 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState);
675 DOP.
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
676 .
add(
MI.getOperand(PredIdx))
677 .
add(
MI.getOperand(SrcIdx));
683 DOP.
add(
MI.getOperand(PredIdx))
684 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
685 .
add(
MI.getOperand(SrcIdx));
688 DOP.
add(
MI.getOperand(PredIdx))
689 .
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
690 .
add(
MI.getOperand(SrcIdx))
691 .
add(
MI.getOperand(Src2Idx));
694 DOP.
addReg(
MI.getOperand(DOPIdx).getReg(), DOPRegState)
695 .
add(
MI.getOperand(SrcIdx))
696 .
add(
MI.getOperand(Src2Idx));
701 transferImpOps(
MI, PRFX, DOP);
704 transferImpOps(
MI, DOP, DOP);
706 MI.eraseFromParent();
710bool AArch64ExpandPseudo::expandSetTagLoop(
716 Register AddressReg =
MI.getOperand(1).getReg();
720 bool ZeroData =
MI.getOpcode() == AArch64::STZGloop_wback;
721 const unsigned OpCode1 =
722 ZeroData ? AArch64::STZGPostIndex : AArch64::STGPostIndex;
723 const unsigned OpCode2 =
724 ZeroData ? AArch64::STZ2GPostIndex : AArch64::ST2GPostIndex;
726 unsigned Size =
MI.getOperand(2).getImm();
728 if (
Size % (16 * 2) != 0) {
744 MF->
insert(++LoopBB->getIterator(), DoneBB);
763 LoopBB->addSuccessor(LoopBB);
764 LoopBB->addSuccessor(DoneBB);
766 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
767 DoneBB->transferSuccessors(&
MBB);
772 MI.eraseFromParent();
774 LivePhysRegs LiveRegs;
779 LoopBB->clearLiveIns();
781 DoneBB->clearLiveIns();
787bool AArch64ExpandPseudo::expandSVESpillFill(MachineBasicBlock &
MBB,
789 unsigned Opc,
unsigned N) {
790 assert((
Opc == AArch64::LDR_ZXI ||
Opc == AArch64::STR_ZXI ||
791 Opc == AArch64::LDR_PXI ||
Opc == AArch64::STR_PXI) &&
792 "Unexpected opcode");
793 unsigned RState = (
Opc == AArch64::LDR_ZXI ||
Opc == AArch64::LDR_PXI)
796 unsigned sub0 = (
Opc == AArch64::LDR_ZXI ||
Opc == AArch64::STR_ZXI)
799 const TargetRegisterInfo *
TRI =
803 int ImmOffset =
MI.getOperand(2).getImm() +
Offset;
804 bool Kill = (
Offset + 1 ==
N) ?
MI.getOperand(1).isKill() :
false;
805 assert(ImmOffset >= -256 && ImmOffset < 256 &&
806 "Immediate spill offset out of range");
813 MI.eraseFromParent();
824 unsigned RegMaskStartIdx) {
833 while (!
MBBI->getOperand(RegMaskStartIdx).isRegMask()) {
835 assert(MOP.
isReg() &&
"can only add register operands");
837 MOP.
getReg(),
false,
true,
false,
843 Call->addOperand(MO);
854 unsigned RegMaskStartIdx) {
855 unsigned Opc = CallTarget.
isGlobal() ? AArch64::BL : AArch64::BLR;
858 "invalid operand for regular call");
862bool AArch64ExpandPseudo::expandCALL_RVMARKER(
870 MachineOperand &RVTarget =
MI.getOperand(0);
871 bool DoEmitMarker =
MI.getOperand(1).getImm();
872 assert(RVTarget.
isGlobal() &&
"invalid operand for attached call");
874 MachineInstr *OriginalCall =
nullptr;
876 if (
MI.getOpcode() == AArch64::BLRA_RVMARKER) {
878 const MachineOperand &CallTarget =
MI.getOperand(2);
879 const MachineOperand &
Key =
MI.getOperand(3);
880 const MachineOperand &IntDisc =
MI.getOperand(4);
881 const MachineOperand &AddrDisc =
MI.getOperand(5);
885 "Invalid auth call key");
887 MachineOperand
Ops[] = {CallTarget,
Key, IntDisc, AddrDisc};
892 assert(
MI.getOpcode() == AArch64::BLR_RVMARKER &&
"unknown rvmarker MI");
909 if (
MI.shouldUpdateAdditionalCallInfo())
912 MI.eraseFromParent();
914 std::next(RVCall->getIterator()));
918bool AArch64ExpandPseudo::expandCALL_BTI(MachineBasicBlock &
MBB,
938 if (
MI.shouldUpdateAdditionalCallInfo())
941 MI.eraseFromParent();
946bool AArch64ExpandPseudo::expandStoreSwiftAsyncContext(
954 if (STI.getTargetTriple().getArchName() !=
"arm64e") {
971 unsigned Opc =
Offset >= 0 ? AArch64::ADDXri : AArch64::SUBXri;
1003AArch64ExpandPseudo::ConditionalBlocks
1004AArch64ExpandPseudo::expandConditionalPseudo(MachineBasicBlock &
MBB,
1007 MachineInstrBuilder &Branch) {
1010 "Unexpected unreachable in block");
1016 MachineInstr &PrevMI = *std::prev(
MBBI);
1017 MachineBasicBlock *CondBB =
MBB.
splitAt(PrevMI,
true);
1018 MachineBasicBlock *EndBB =
1019 std::next(
MBBI) == CondBB->
end()
1032 return {*CondBB, *EndBB};
1036AArch64ExpandPseudo::expandRestoreZASave(MachineBasicBlock &
MBB,
1038 MachineInstr &
MI = *
MBBI;
1042 MachineInstrBuilder
Branch =
1045 auto [CondBB, EndBB] = expandConditionalPseudo(
MBB,
MBBI,
DL, Branch);
1047 MachineInstrBuilder MIB =
1050 for (
unsigned I = 2;
I <
MI.getNumOperands(); ++
I)
1051 MIB.
add(
MI.getOperand(
I));
1055 MI.eraseFromParent();
1071 auto [CondBB, EndBB] = expandConditionalPseudo(
MBB,
MBBI,
DL, Branch);
1076 for (
unsigned I = 2;
I <
MI.getNumOperands(); ++
I)
1077 MIB.
add(
MI.getOperand(
I));
1080 .
addImm(AArch64SysReg::TPIDR2_EL0)
1082 bool ZeroZA =
MI.getOperand(1).getImm() != 0;
1084 [[maybe_unused]]
auto *
TRI =
1085 MBB.getParent()->getSubtarget().getRegisterInfo();
1086 assert(
MI.definesRegister(AArch64::ZAB0,
TRI) &&
"should define ZA!");
1092 MI.eraseFromParent();
1097AArch64ExpandPseudo::expandCondSMToggle(MachineBasicBlock &
MBB,
1099 MachineInstr &
MI = *
MBBI;
1106 MI.getParent()->successors().begin() ==
1107 MI.getParent()->successors().end()) {
1108 MI.eraseFromParent();
1151 switch (
MI.getOperand(2).getImm()) {
1155 Opc = AArch64::TBNZW;
1158 Opc = AArch64::TBZW;
1161 auto PStateSM =
MI.getOperand(3).getReg();
1163 unsigned SMReg32 =
TRI->getSubReg(PStateSM, AArch64::sub_32);
1164 MachineInstrBuilder Tbx =
1167 auto [CondBB, EndBB] = expandConditionalPseudo(
MBB,
MBBI,
DL, Tbx);
1169 MachineInstrBuilder MIB =
BuildMI(CondBB, CondBB.
back(),
MI.getDebugLoc(),
1170 TII->get(AArch64::MSRpstatesvcrImm1));
1174 MIB.
add(
MI.getOperand(0));
1175 MIB.
add(
MI.getOperand(1));
1176 for (
unsigned i = 4; i <
MI.getNumOperands(); ++i)
1177 MIB.
add(
MI.getOperand(i));
1179 MI.eraseFromParent();
1183bool AArch64ExpandPseudo::expandMultiVecPseudo(
1185 TargetRegisterClass ContiguousClass, TargetRegisterClass StridedClass,
1186 unsigned ContiguousOp,
unsigned StridedOpc) {
1187 MachineInstr &
MI = *
MBBI;
1201 .
add(
MI.getOperand(0))
1202 .
add(
MI.getOperand(1))
1203 .
add(
MI.getOperand(2))
1204 .
add(
MI.getOperand(3));
1205 transferImpOps(
MI, MIB, MIB);
1206 MI.eraseFromParent();
1210bool AArch64ExpandPseudo::expandFormTuplePseudo(
1214 MachineInstr &
MI = *
MBBI;
1215 Register ReturnTuple =
MI.getOperand(0).getReg();
1217 const TargetRegisterInfo *
TRI =
1219 for (
unsigned I = 0;
I <
Size; ++
I) {
1220 Register FormTupleOpReg =
MI.getOperand(
I + 1).getReg();
1222 TRI->getSubReg(ReturnTuple, AArch64::zsub0 +
I);
1225 if (FormTupleOpReg != ReturnTupleSubReg)
1232 MI.eraseFromParent();
1238bool AArch64ExpandPseudo::expandMI(MachineBasicBlock &
MBB,
1241 MachineInstr &
MI = *
MBBI;
1242 unsigned Opcode =
MI.getOpcode();
1246 if (OrigInstr != -1) {
1247 auto &Orig =
TII->get(OrigInstr);
1250 return expand_DestructiveOp(
MI,
MBB,
MBBI);
1258 case AArch64::BSPv8i8:
1259 case AArch64::BSPv16i8: {
1261 if (DstReg ==
MI.getOperand(3).getReg()) {
1264 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BITv8i8
1265 : AArch64::BITv16i8))
1266 .
add(
MI.getOperand(0))
1267 .
add(
MI.getOperand(3))
1268 .
add(
MI.getOperand(2))
1269 .
add(
MI.getOperand(1));
1270 transferImpOps(
MI,
I,
I);
1271 }
else if (DstReg ==
MI.getOperand(2).getReg()) {
1274 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BIFv8i8
1275 : AArch64::BIFv16i8))
1276 .
add(
MI.getOperand(0))
1277 .
add(
MI.getOperand(2))
1278 .
add(
MI.getOperand(3))
1279 .
add(
MI.getOperand(1));
1280 transferImpOps(
MI,
I,
I);
1283 if (DstReg ==
MI.getOperand(1).getReg()) {
1286 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BSLv8i8
1287 : AArch64::BSLv16i8))
1288 .
add(
MI.getOperand(0))
1289 .
add(
MI.getOperand(1))
1290 .
add(
MI.getOperand(2))
1291 .
add(
MI.getOperand(3));
1292 transferImpOps(
MI,
I,
I);
1297 MI.getOperand(1).isKill() &&
1298 MI.getOperand(1).getReg() !=
MI.getOperand(2).getReg() &&
1299 MI.getOperand(1).getReg() !=
MI.getOperand(3).getReg());
1301 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::ORRv8i8
1302 : AArch64::ORRv16i8))
1306 .
addReg(
MI.getOperand(1).getReg(), RegState)
1307 .
addReg(
MI.getOperand(1).getReg(), RegState);
1310 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BSLv8i8
1311 : AArch64::BSLv16i8))
1312 .
add(
MI.getOperand(0))
1315 MI.getOperand(0).isRenamable()))
1316 .
add(
MI.getOperand(2))
1317 .
add(
MI.getOperand(3));
1318 transferImpOps(
MI, I2, I2);
1321 MI.eraseFromParent();
1325 case AArch64::ADDWrr:
1326 case AArch64::SUBWrr:
1327 case AArch64::ADDXrr:
1328 case AArch64::SUBXrr:
1329 case AArch64::ADDSWrr:
1330 case AArch64::SUBSWrr:
1331 case AArch64::ADDSXrr:
1332 case AArch64::SUBSXrr:
1333 case AArch64::ANDWrr:
1334 case AArch64::ANDXrr:
1335 case AArch64::BICWrr:
1336 case AArch64::BICXrr:
1337 case AArch64::ANDSWrr:
1338 case AArch64::ANDSXrr:
1339 case AArch64::BICSWrr:
1340 case AArch64::BICSXrr:
1341 case AArch64::EONWrr:
1342 case AArch64::EONXrr:
1343 case AArch64::EORWrr:
1344 case AArch64::EORXrr:
1345 case AArch64::ORNWrr:
1346 case AArch64::ORNXrr:
1347 case AArch64::ORRWrr:
1348 case AArch64::ORRXrr: {
1350 switch (
MI.getOpcode()) {
1353 case AArch64::ADDWrr: Opcode = AArch64::ADDWrs;
break;
1354 case AArch64::SUBWrr: Opcode = AArch64::SUBWrs;
break;
1355 case AArch64::ADDXrr: Opcode = AArch64::ADDXrs;
break;
1356 case AArch64::SUBXrr: Opcode = AArch64::SUBXrs;
break;
1357 case AArch64::ADDSWrr: Opcode = AArch64::ADDSWrs;
break;
1358 case AArch64::SUBSWrr: Opcode = AArch64::SUBSWrs;
break;
1359 case AArch64::ADDSXrr: Opcode = AArch64::ADDSXrs;
break;
1360 case AArch64::SUBSXrr: Opcode = AArch64::SUBSXrs;
break;
1361 case AArch64::ANDWrr: Opcode = AArch64::ANDWrs;
break;
1362 case AArch64::ANDXrr: Opcode = AArch64::ANDXrs;
break;
1363 case AArch64::BICWrr: Opcode = AArch64::BICWrs;
break;
1364 case AArch64::BICXrr: Opcode = AArch64::BICXrs;
break;
1365 case AArch64::ANDSWrr: Opcode = AArch64::ANDSWrs;
break;
1366 case AArch64::ANDSXrr: Opcode = AArch64::ANDSXrs;
break;
1367 case AArch64::BICSWrr: Opcode = AArch64::BICSWrs;
break;
1368 case AArch64::BICSXrr: Opcode = AArch64::BICSXrs;
break;
1369 case AArch64::EONWrr: Opcode = AArch64::EONWrs;
break;
1370 case AArch64::EONXrr: Opcode = AArch64::EONXrs;
break;
1371 case AArch64::EORWrr: Opcode = AArch64::EORWrs;
break;
1372 case AArch64::EORXrr: Opcode = AArch64::EORXrs;
break;
1373 case AArch64::ORNWrr: Opcode = AArch64::ORNWrs;
break;
1374 case AArch64::ORNXrr: Opcode = AArch64::ORNXrs;
break;
1375 case AArch64::ORRWrr: Opcode = AArch64::ORRWrs;
break;
1376 case AArch64::ORRXrr: Opcode = AArch64::ORRXrs;
break;
1380 MachineInstr *NewMI = MF.CreateMachineInstr(
1381 TII->get(Opcode),
MI.getDebugLoc(),
true);
1383 MachineInstrBuilder MIB1(MF, NewMI);
1384 MIB1->setPCSections(MF,
MI.getPCSections());
1386 .add(
MI.getOperand(1))
1387 .add(
MI.getOperand(2))
1389 transferImpOps(
MI, MIB1, MIB1);
1390 if (
auto DebugNumber =
MI.peekDebugInstrNum())
1392 MI.eraseFromParent();
1396 case AArch64::LOADgot: {
1399 const MachineOperand &MO1 =
MI.getOperand(1);
1405 TII->get(AArch64::LDRXl), DstReg);
1413 "Only expect globals, externalsymbols, or constant pools");
1418 MachineFunction &MF = *
MI.getParent()->getParent();
1420 MachineInstrBuilder MIB1 =
1423 MachineInstrBuilder MIB2;
1424 if (MF.
getSubtarget<AArch64Subtarget>().isTargetILP32()) {
1426 unsigned Reg32 =
TRI->getSubReg(DstReg, AArch64::sub_32);
1427 unsigned DstFlags =
MI.getOperand(0).getTargetFlags();
1435 .
add(
MI.getOperand(0))
1450 "Only expect globals, externalsymbols, or constant pools");
1461 if (
MI.peekDebugInstrNum() != 0)
1463 transferImpOps(
MI, MIB1, MIB2);
1465 MI.eraseFromParent();
1468 case AArch64::MOVaddrBA: {
1469 MachineFunction &MF = *
MI.getParent()->getParent();
1470 if (MF.
getSubtarget<AArch64Subtarget>().isTargetMachO()) {
1475 assert(
MI.getOperand(1).getOffset() == 0 &&
"unexpected offset");
1485 TII->get(AArch64::LDRXui), DstReg)
1489 transferImpOps(
MI, MIB1, MIB2);
1490 MI.eraseFromParent();
1495 case AArch64::MOVaddr:
1496 case AArch64::MOVaddrJT:
1497 case AArch64::MOVaddrCP:
1498 case AArch64::MOVaddrTLS:
1499 case AArch64::MOVaddrEXT: {
1502 assert(DstReg != AArch64::XZR);
1503 MachineInstrBuilder MIB1 =
1505 .
add(
MI.getOperand(1));
1515 auto Tag =
MI.getOperand(1);
1517 Tag.setOffset(0x100000000);
1524 MachineInstrBuilder MIB2 =
1526 .
add(
MI.getOperand(0))
1528 .
add(
MI.getOperand(2))
1531 transferImpOps(
MI, MIB1, MIB2);
1532 MI.eraseFromParent();
1535 case AArch64::ADDlowTLS:
1538 .
add(
MI.getOperand(0))
1539 .
add(
MI.getOperand(1))
1540 .
add(
MI.getOperand(2))
1542 MI.eraseFromParent();
1545 case AArch64::MOVbaseTLS: {
1547 auto SysReg = AArch64SysReg::TPIDR_EL0;
1549 if (MF->
getSubtarget<AArch64Subtarget>().useEL3ForTP())
1550 SysReg = AArch64SysReg::TPIDR_EL3;
1551 else if (MF->
getSubtarget<AArch64Subtarget>().useEL2ForTP())
1552 SysReg = AArch64SysReg::TPIDR_EL2;
1553 else if (MF->
getSubtarget<AArch64Subtarget>().useEL1ForTP())
1554 SysReg = AArch64SysReg::TPIDR_EL1;
1555 else if (MF->
getSubtarget<AArch64Subtarget>().useROEL0ForTP())
1556 SysReg = AArch64SysReg::TPIDRRO_EL0;
1559 MI.eraseFromParent();
1563 case AArch64::MOVi32imm:
1565 case AArch64::MOVi64imm:
1567 case AArch64::RET_ReallyLR: {
1573 MachineInstrBuilder MIB =
1576 transferImpOps(
MI, MIB, MIB);
1577 MI.eraseFromParent();
1580 case AArch64::CMP_SWAP_8:
1581 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRB, AArch64::STLXRB,
1584 AArch64::WZR, NextMBBI);
1585 case AArch64::CMP_SWAP_16:
1586 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRH, AArch64::STLXRH,
1589 AArch64::WZR, NextMBBI);
1590 case AArch64::CMP_SWAP_32:
1591 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRW, AArch64::STLXRW,
1594 AArch64::WZR, NextMBBI);
1595 case AArch64::CMP_SWAP_64:
1596 return expandCMP_SWAP(
MBB,
MBBI,
1597 AArch64::LDAXRX, AArch64::STLXRX, AArch64::SUBSXrs,
1599 AArch64::XZR, NextMBBI);
1600 case AArch64::CMP_SWAP_128:
1601 case AArch64::CMP_SWAP_128_RELEASE:
1602 case AArch64::CMP_SWAP_128_ACQUIRE:
1603 case AArch64::CMP_SWAP_128_MONOTONIC:
1604 return expandCMP_SWAP_128(
MBB,
MBBI, NextMBBI);
1606 case AArch64::AESMCrrTied:
1607 case AArch64::AESIMCrrTied: {
1608 MachineInstrBuilder MIB =
1610 TII->get(Opcode == AArch64::AESMCrrTied ? AArch64::AESMCrr :
1612 .
add(
MI.getOperand(0))
1613 .
add(
MI.getOperand(1));
1614 transferImpOps(
MI, MIB, MIB);
1615 MI.eraseFromParent();
1618 case AArch64::IRGstack: {
1620 const AArch64FunctionInfo *AFI = MF.
getInfo<AArch64FunctionInfo>();
1621 const AArch64FrameLowering *TFI =
1622 MF.
getSubtarget<AArch64Subtarget>().getFrameLowering();
1629 StackOffset FrameRegOffset = TFI->resolveFrameOffsetReference(
1635 if (FrameRegOffset) {
1637 SrcReg =
MI.getOperand(0).getReg();
1639 FrameRegOffset,
TII);
1642 .
add(
MI.getOperand(0))
1644 .
add(
MI.getOperand(2));
1645 MI.eraseFromParent();
1648 case AArch64::TAGPstack: {
1649 int64_t
Offset =
MI.getOperand(2).getImm();
1651 TII->get(
Offset >= 0 ? AArch64::ADDG : AArch64::SUBG))
1652 .
add(
MI.getOperand(0))
1653 .
add(
MI.getOperand(1))
1655 .
add(
MI.getOperand(4));
1656 MI.eraseFromParent();
1659 case AArch64::STGloop_wback:
1660 case AArch64::STZGloop_wback:
1661 return expandSetTagLoop(
MBB,
MBBI, NextMBBI);
1662 case AArch64::STGloop:
1663 case AArch64::STZGloop:
1665 "Non-writeback variants of STGloop / STZGloop should not "
1666 "survive past PrologEpilogInserter.");
1667 case AArch64::STR_ZZZZXI:
1668 case AArch64::STR_ZZZZXI_STRIDED_CONTIGUOUS:
1669 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 4);
1670 case AArch64::STR_ZZZXI:
1671 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 3);
1672 case AArch64::STR_ZZXI:
1673 case AArch64::STR_ZZXI_STRIDED_CONTIGUOUS:
1674 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 2);
1675 case AArch64::STR_PPXI:
1676 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_PXI, 2);
1677 case AArch64::LDR_ZZZZXI:
1678 case AArch64::LDR_ZZZZXI_STRIDED_CONTIGUOUS:
1679 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 4);
1680 case AArch64::LDR_ZZZXI:
1681 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 3);
1682 case AArch64::LDR_ZZXI:
1683 case AArch64::LDR_ZZXI_STRIDED_CONTIGUOUS:
1684 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 2);
1685 case AArch64::LDR_PPXI:
1686 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_PXI, 2);
1687 case AArch64::BLR_RVMARKER:
1688 case AArch64::BLRA_RVMARKER:
1689 return expandCALL_RVMARKER(
MBB,
MBBI);
1690 case AArch64::BLR_BTI:
1691 return expandCALL_BTI(
MBB,
MBBI);
1692 case AArch64::StoreSwiftAsyncContext:
1693 return expandStoreSwiftAsyncContext(
MBB,
MBBI);
1694 case AArch64::RestoreZAPseudo:
1695 case AArch64::CommitZASavePseudo:
1696 case AArch64::MSRpstatePseudo: {
1697 auto *NewMBB = [&] {
1699 case AArch64::RestoreZAPseudo:
1700 return expandRestoreZASave(
MBB,
MBBI);
1701 case AArch64::CommitZASavePseudo:
1702 return expandCommitZASave(
MBB,
MBBI);
1703 case AArch64::MSRpstatePseudo:
1704 return expandCondSMToggle(
MBB,
MBBI);
1713 case AArch64::InOutZAUsePseudo:
1714 case AArch64::RequiresZASavePseudo:
1715 case AArch64::SMEStateAllocPseudo:
1716 case AArch64::COALESCER_BARRIER_FPR16:
1717 case AArch64::COALESCER_BARRIER_FPR32:
1718 case AArch64::COALESCER_BARRIER_FPR64:
1719 case AArch64::COALESCER_BARRIER_FPR128:
1720 MI.eraseFromParent();
1722 case AArch64::LD1B_2Z_IMM_PSEUDO:
1723 return expandMultiVecPseudo(
1724 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1725 AArch64::LD1B_2Z_IMM, AArch64::LD1B_2Z_STRIDED_IMM);
1726 case AArch64::LD1H_2Z_IMM_PSEUDO:
1727 return expandMultiVecPseudo(
1728 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1729 AArch64::LD1H_2Z_IMM, AArch64::LD1H_2Z_STRIDED_IMM);
1730 case AArch64::LD1W_2Z_IMM_PSEUDO:
1731 return expandMultiVecPseudo(
1732 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1733 AArch64::LD1W_2Z_IMM, AArch64::LD1W_2Z_STRIDED_IMM);
1734 case AArch64::LD1D_2Z_IMM_PSEUDO:
1735 return expandMultiVecPseudo(
1736 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1737 AArch64::LD1D_2Z_IMM, AArch64::LD1D_2Z_STRIDED_IMM);
1738 case AArch64::LDNT1B_2Z_IMM_PSEUDO:
1739 return expandMultiVecPseudo(
1740 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1741 AArch64::LDNT1B_2Z_IMM, AArch64::LDNT1B_2Z_STRIDED_IMM);
1742 case AArch64::LDNT1H_2Z_IMM_PSEUDO:
1743 return expandMultiVecPseudo(
1744 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1745 AArch64::LDNT1H_2Z_IMM, AArch64::LDNT1H_2Z_STRIDED_IMM);
1746 case AArch64::LDNT1W_2Z_IMM_PSEUDO:
1747 return expandMultiVecPseudo(
1748 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1749 AArch64::LDNT1W_2Z_IMM, AArch64::LDNT1W_2Z_STRIDED_IMM);
1750 case AArch64::LDNT1D_2Z_IMM_PSEUDO:
1751 return expandMultiVecPseudo(
1752 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1753 AArch64::LDNT1D_2Z_IMM, AArch64::LDNT1D_2Z_STRIDED_IMM);
1754 case AArch64::LD1B_2Z_PSEUDO:
1755 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1756 AArch64::ZPR2StridedRegClass, AArch64::LD1B_2Z,
1757 AArch64::LD1B_2Z_STRIDED);
1758 case AArch64::LD1H_2Z_PSEUDO:
1759 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1760 AArch64::ZPR2StridedRegClass, AArch64::LD1H_2Z,
1761 AArch64::LD1H_2Z_STRIDED);
1762 case AArch64::LD1W_2Z_PSEUDO:
1763 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1764 AArch64::ZPR2StridedRegClass, AArch64::LD1W_2Z,
1765 AArch64::LD1W_2Z_STRIDED);
1766 case AArch64::LD1D_2Z_PSEUDO:
1767 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1768 AArch64::ZPR2StridedRegClass, AArch64::LD1D_2Z,
1769 AArch64::LD1D_2Z_STRIDED);
1770 case AArch64::LDNT1B_2Z_PSEUDO:
1771 return expandMultiVecPseudo(
1772 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1773 AArch64::LDNT1B_2Z, AArch64::LDNT1B_2Z_STRIDED);
1774 case AArch64::LDNT1H_2Z_PSEUDO:
1775 return expandMultiVecPseudo(
1776 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1777 AArch64::LDNT1H_2Z, AArch64::LDNT1H_2Z_STRIDED);
1778 case AArch64::LDNT1W_2Z_PSEUDO:
1779 return expandMultiVecPseudo(
1780 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1781 AArch64::LDNT1W_2Z, AArch64::LDNT1W_2Z_STRIDED);
1782 case AArch64::LDNT1D_2Z_PSEUDO:
1783 return expandMultiVecPseudo(
1784 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1785 AArch64::LDNT1D_2Z, AArch64::LDNT1D_2Z_STRIDED);
1786 case AArch64::LD1B_4Z_IMM_PSEUDO:
1787 return expandMultiVecPseudo(
1788 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1789 AArch64::LD1B_4Z_IMM, AArch64::LD1B_4Z_STRIDED_IMM);
1790 case AArch64::LD1H_4Z_IMM_PSEUDO:
1791 return expandMultiVecPseudo(
1792 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1793 AArch64::LD1H_4Z_IMM, AArch64::LD1H_4Z_STRIDED_IMM);
1794 case AArch64::LD1W_4Z_IMM_PSEUDO:
1795 return expandMultiVecPseudo(
1796 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1797 AArch64::LD1W_4Z_IMM, AArch64::LD1W_4Z_STRIDED_IMM);
1798 case AArch64::LD1D_4Z_IMM_PSEUDO:
1799 return expandMultiVecPseudo(
1800 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1801 AArch64::LD1D_4Z_IMM, AArch64::LD1D_4Z_STRIDED_IMM);
1802 case AArch64::LDNT1B_4Z_IMM_PSEUDO:
1803 return expandMultiVecPseudo(
1804 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1805 AArch64::LDNT1B_4Z_IMM, AArch64::LDNT1B_4Z_STRIDED_IMM);
1806 case AArch64::LDNT1H_4Z_IMM_PSEUDO:
1807 return expandMultiVecPseudo(
1808 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1809 AArch64::LDNT1H_4Z_IMM, AArch64::LDNT1H_4Z_STRIDED_IMM);
1810 case AArch64::LDNT1W_4Z_IMM_PSEUDO:
1811 return expandMultiVecPseudo(
1812 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1813 AArch64::LDNT1W_4Z_IMM, AArch64::LDNT1W_4Z_STRIDED_IMM);
1814 case AArch64::LDNT1D_4Z_IMM_PSEUDO:
1815 return expandMultiVecPseudo(
1816 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1817 AArch64::LDNT1D_4Z_IMM, AArch64::LDNT1D_4Z_STRIDED_IMM);
1818 case AArch64::LD1B_4Z_PSEUDO:
1819 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1820 AArch64::ZPR4StridedRegClass, AArch64::LD1B_4Z,
1821 AArch64::LD1B_4Z_STRIDED);
1822 case AArch64::LD1H_4Z_PSEUDO:
1823 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1824 AArch64::ZPR4StridedRegClass, AArch64::LD1H_4Z,
1825 AArch64::LD1H_4Z_STRIDED);
1826 case AArch64::LD1W_4Z_PSEUDO:
1827 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1828 AArch64::ZPR4StridedRegClass, AArch64::LD1W_4Z,
1829 AArch64::LD1W_4Z_STRIDED);
1830 case AArch64::LD1D_4Z_PSEUDO:
1831 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1832 AArch64::ZPR4StridedRegClass, AArch64::LD1D_4Z,
1833 AArch64::LD1D_4Z_STRIDED);
1834 case AArch64::LDNT1B_4Z_PSEUDO:
1835 return expandMultiVecPseudo(
1836 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1837 AArch64::LDNT1B_4Z, AArch64::LDNT1B_4Z_STRIDED);
1838 case AArch64::LDNT1H_4Z_PSEUDO:
1839 return expandMultiVecPseudo(
1840 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1841 AArch64::LDNT1H_4Z, AArch64::LDNT1H_4Z_STRIDED);
1842 case AArch64::LDNT1W_4Z_PSEUDO:
1843 return expandMultiVecPseudo(
1844 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1845 AArch64::LDNT1W_4Z, AArch64::LDNT1W_4Z_STRIDED);
1846 case AArch64::LDNT1D_4Z_PSEUDO:
1847 return expandMultiVecPseudo(
1848 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1849 AArch64::LDNT1D_4Z, AArch64::LDNT1D_4Z_STRIDED);
1850 case AArch64::FORM_TRANSPOSED_REG_TUPLE_X2_PSEUDO:
1851 return expandFormTuplePseudo(
MBB,
MBBI, NextMBBI, 2);
1852 case AArch64::FORM_TRANSPOSED_REG_TUPLE_X4_PSEUDO:
1853 return expandFormTuplePseudo(
MBB,
MBBI, NextMBBI, 4);
1860bool AArch64ExpandPseudo::expandMBB(MachineBasicBlock &
MBB) {
1873bool AArch64ExpandPseudo::runOnMachineFunction(MachineFunction &MF) {
1874 TII =
static_cast<const AArch64InstrInfo *
>(MF.
getSubtarget().getInstrInfo());
1877 for (
auto &
MBB : MF)
1884 return new AArch64ExpandPseudo();
#define AARCH64_EXPAND_PSEUDO_NAME
MachineInstrBuilder & UseMI
static MachineInstr * createCallWithOps(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const AArch64InstrInfo *TII, unsigned Opcode, ArrayRef< MachineOperand > ExplicitOps, unsigned RegMaskStartIdx)
static constexpr unsigned ZERO_ALL_ZA_MASK
static MachineInstr * createCall(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const AArch64InstrInfo *TII, MachineOperand &CallTarget, unsigned RegMaskStartIdx)
MachineInstrBuilder MachineInstrBuilder & DefMI
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
MachineBasicBlock MachineBasicBlock::iterator MBBI
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
const HexagonInstrInfo * TII
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
This file implements the LivePhysRegs utility for tracking liveness of physical registers.
This file declares the MachineConstantPool class which is an abstract constant pool to keep track of ...
Register const TargetRegisterInfo * TRI
Promote Memory to Register
static unsigned getReg(const MCDisassembler *D, unsigned RC, unsigned RegNo)
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
unsigned getTaggedBasePointerOffset() const
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
FunctionPass class - This class is used to implement most global optimizations.
Describe properties that are true of each instruction in the target description file.
LLVM_ABI instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
const BasicBlock * getBasicBlock() const
Return the LLVM basic block that this instance corresponded to originally.
LLVM_ABI void addSuccessor(MachineBasicBlock *Succ, BranchProbability Prob=BranchProbability::getUnknown())
Add Succ as a successor of this MachineBasicBlock.
LLVM_ABI MachineBasicBlock * splitAt(MachineInstr &SplitInst, bool UpdateLiveIns=true, LiveIntervals *LIS=nullptr)
Split a basic block into 2 pieces at SplitPoint.
LLVM_ABI void eraseFromParent()
This method unlinks 'this' from the containing function and deletes it.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
iterator_range< succ_iterator > successors()
MachineInstrBundleIterator< MachineInstr > iterator
unsigned getConstantPoolIndex(const Constant *C, Align Alignment)
getConstantPoolIndex - Create a new entry in the constant pool or return an existing one.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void moveAdditionalCallInfo(const MachineInstr *Old, const MachineInstr *New)
Move the call site info from Old to \New call site info.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
Ty * getInfo()
getInfo - Keep track of various per-function pieces of information for backends that would like to do...
MachineConstantPool * getConstantPool()
getConstantPool - Return the constant pool object for the current function.
MachineBasicBlock * CreateMachineBasicBlock(const BasicBlock *BB=nullptr, std::optional< UniqueBBID > BBID=std::nullopt)
CreateMachineInstr - Allocate a new MachineInstr.
void insert(iterator MBBI, MachineBasicBlock *MBB)
const TargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
const MachineInstrBuilder & addExternalSymbol(const char *FnName, unsigned TargetFlags=0) const
const MachineInstrBuilder & setMIFlag(MachineInstr::MIFlag Flag) const
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addConstantPoolIndex(unsigned Idx, int Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addGlobalAddress(const GlobalValue *GV, int64_t Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
const MachineInstrBuilder & cloneMemRefs(const MachineInstr &OtherMI) const
const MachineInstrBuilder & addUse(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register use operand.
const MachineInstrBuilder & setMIFlags(unsigned Flags) const
MachineInstr * getInstr() const
If conversion operators fail, use this method to get the MachineInstr explicitly.
const MachineInstrBuilder & addDef(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register definition operand.
Representation of each machine instruction.
void setDebugInstrNum(unsigned Num)
Set instruction number of this MachineInstr.
MachineOperand class - Representation of each machine instruction operand.
const GlobalValue * getGlobal() const
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isCPI() const
isCPI - Tests if this is a MO_ConstantPoolIndex operand.
bool isSymbol() const
isSymbol - Tests if this is a MO_ExternalSymbol operand.
unsigned getTargetFlags() const
bool isGlobal() const
isGlobal - Tests if this is a MO_GlobalAddress operand.
const char * getSymbolName() const
Register getReg() const
getReg - Returns the register number.
static MachineOperand CreateReg(Register Reg, bool isDef, bool isImp=false, bool isKill=false, bool isDead=false, bool isUndef=false, bool isEarlyClobber=false, unsigned SubReg=0, bool isDebug=false, bool isInternalRead=false, bool isRenamable=false)
int64_t getOffset() const
Return the offset from the symbol in this operand.
MCRegister asMCReg() const
Utility to check-convert this value to a MCRegister.
void push_back(const T &Elt)
StringRef - Represent a constant reference to a string, i.e.
CodeModel::Model getCodeModel() const
Returns the code model.
ArrayRef< MCPhysReg > getRegisters() const
virtual const TargetRegisterInfo * getRegisterInfo() const =0
Return the target's register information.
self_iterator getIterator()
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ MO_NC
MO_NC - Indicates whether the linker is expected to check the symbol reference for overflow.
@ MO_PAGEOFF
MO_PAGEOFF - A symbol operand with this flag represents the offset of that symbol within a 4K page.
@ MO_PREL
MO_PREL - Indicates that the bits of the symbol operand represented by MO_G0 etc are PC relative.
@ MO_PAGE
MO_PAGE - A symbol operand with this flag represents the pc-relative offset of the 4K page containing...
@ MO_TAGGED
MO_TAGGED - With MO_PAGE, indicates that the page includes a memory tag in bits 56-63.
@ MO_G3
MO_G3 - A symbol operand with this flag (granule 3) represents the high 16-bits of a 64-bit address,...
static unsigned getArithExtendImm(AArch64_AM::ShiftExtendType ET, unsigned Imm)
getArithExtendImm - Encode the extend type and shift amount for an arithmetic instruction: imm: 3-bit...
static unsigned getShifterImm(AArch64_AM::ShiftExtendType ST, unsigned Imm)
getShifterImm - Encode the shift type and amount: imm: 6-bit shift amount shifter: 000 ==> lsl 001 ==...
void expandMOVImm(uint64_t Imm, unsigned BitSize, SmallVectorImpl< ImmInsnModel > &Insn)
Expand a MOVi32imm or MOVi64imm pseudo instruction to one or more real move-immediate instructions to...
int getSVERevInstr(uint16_t Opcode)
@ Destructive2xRegImmUnpred
@ DestructiveInstTypeMask
@ DestructiveUnaryPassthru
@ DestructiveTernaryCommWithRev
@ DestructiveBinaryCommWithRev
int getSVEPseudoMap(uint16_t Opcode)
int getSVENonRevInstr(uint16_t Opcode)
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Implicit
Not emitted register (e.g. carry, or temporary result).
@ Renamable
Register that may be renamed.
@ Define
Register definition.
@ Kill
The last use of a register.
@ Undef
Value of the register doesn't matter.
BaseReg
Stack frame base register. Bit 0 of FREInfo.Info.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
LLVM_ABI void finalizeBundle(MachineBasicBlock &MBB, MachineBasicBlock::instr_iterator FirstMI, MachineBasicBlock::instr_iterator LastMI)
finalizeBundle - Finalize a machine instruction bundle which includes a sequence of instructions star...
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
APFloat abs(APFloat X)
Returns the absolute value of the argument.
unsigned getDeadRegState(bool B)
FunctionPass * createAArch64ExpandPseudoPass()
Returns an instance of the pseudo instruction expansion pass.
void emitFrameOffset(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const DebugLoc &DL, unsigned DestReg, unsigned SrcReg, StackOffset Offset, const TargetInstrInfo *TII, MachineInstr::MIFlag=MachineInstr::NoFlags, bool SetNZCV=false, bool NeedsWinCFI=false, bool *HasWinCFI=nullptr, bool EmitCFAOffset=false, StackOffset InitialOffset={}, unsigned FrameReg=AArch64::SP)
emitFrameOffset - Emit instructions as needed to set DestReg to SrcReg plus Offset.
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
unsigned getKillRegState(bool B)
unsigned getRenamableRegState(bool B)
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
void computeAndAddLiveIns(LivePhysRegs &LiveRegs, MachineBasicBlock &MBB)
Convenience function combining computeLiveIns() and addLiveIns().