19#define DEBUG_TYPE "frame-info"
21STATISTIC(NumRedZoneFunctions,
"Number of functions using red zone");
32 return AFI->hasStreamingModeChanges() &&
38 unsigned Opc =
MBBI->getOpcode();
39 if (
Opc == AArch64::CNTD_XPiI)
45 if (
Opc == AArch64::BL)
48 return Opc == TargetOpcode::COPY;
53 switch (
I->getOpcode()) {
56 case AArch64::LD1B_2Z_IMM:
57 case AArch64::ST1B_2Z_IMM:
58 case AArch64::STR_ZXI:
59 case AArch64::LDR_ZXI:
60 case AArch64::PTRUE_C_B:
63 case AArch64::SEH_SaveZReg:
70 switch (
I->getOpcode()) {
73 case AArch64::STR_PXI:
74 case AArch64::LDR_PXI:
77 case AArch64::SEH_SavePReg:
111 if (
Subtarget.isTargetWindows() &&
AFI->getSVECalleeSavedStackSize()) {
112 if (AFI->hasStackHazardSlotIndex())
113 reportFatalUsageError(
"SME hazard padding is not supported on Windows");
114 SVELayout = SVEStackLayout::CalleeSavesAboveFrameRecord;
115 }
else if (
AFI->hasSplitSVEObjects()) {
116 SVELayout = SVEStackLayout::Split;
129 if (
AFL.requiresSaveVG(
MF)) {
130 auto &TLI = *
Subtarget.getTargetLowering();
135 switch (
MBBI->getOpcode()) {
139 NewOpc = AArch64::STPXpre;
142 NewOpc = AArch64::STPDpre;
145 NewOpc = AArch64::STPQpre;
147 case AArch64::STRXui:
148 NewOpc = AArch64::STRXpre;
150 case AArch64::STRDui:
151 NewOpc = AArch64::STRDpre;
153 case AArch64::STRQui:
154 NewOpc = AArch64::STRQpre;
157 NewOpc = AArch64::LDPXpost;
160 NewOpc = AArch64::LDPDpost;
163 NewOpc = AArch64::LDPQpost;
165 case AArch64::LDRXui:
166 NewOpc = AArch64::LDRXpost;
168 case AArch64::LDRDui:
169 NewOpc = AArch64::LDRDpost;
171 case AArch64::LDRQui:
172 NewOpc = AArch64::LDRQpost;
176 int64_t MinOffset, MaxOffset;
178 NewOpc, Scale, Width, MinOffset, MaxOffset);
184 if (
MBBI->getOperand(
MBBI->getNumOperands() - 1).getImm() != 0 ||
185 CSStackSizeInc < MinOffset * (int64_t)Scale.
getFixedValue() ||
186 CSStackSizeInc > MaxOffset * (int64_t)Scale.
getFixedValue()) {
200 return std::prev(
MBBI);
205 auto SEH = std::next(
MBBI);
206 if (AArch64InstrInfo::isSEHInstruction(*SEH))
207 SEH->eraseFromParent();
214 unsigned OpndIdx = 0;
215 for (
unsigned OpndEnd =
MBBI->getNumOperands() - 1; OpndIdx < OpndEnd;
217 MIB.
add(
MBBI->getOperand(OpndIdx));
219 assert(
MBBI->getOperand(OpndIdx).getImm() == 0 &&
220 "Unexpected immediate offset in first/last callee-save save/restore "
222 assert(
MBBI->getOperand(OpndIdx - 1).getReg() == AArch64::SP &&
223 "Unexpected base register in callee-save save/restore instruction!");
224 assert(CSStackSizeInc % Scale == 0);
225 MIB.
addImm(CSStackSizeInc / (
int)Scale);
233 AFL.insertSEH(*MIB, *
TII, FrameFlag);
240 return std::prev(
MBB.erase(
MBBI));
245 unsigned LocalStackSize) {
247 unsigned ImmIdx =
MBBI->getNumOperands() - 1;
248 switch (
MBBI->getOpcode()) {
251 case AArch64::SEH_SaveFPLR:
252 case AArch64::SEH_SaveRegP:
253 case AArch64::SEH_SaveReg:
254 case AArch64::SEH_SaveFRegP:
255 case AArch64::SEH_SaveFReg:
256 case AArch64::SEH_SaveAnyRegQP:
257 case AArch64::SEH_SaveAnyRegQPX:
258 ImmOpnd = &
MBBI->getOperand(ImmIdx);
267 if (AArch64InstrInfo::isSEHInstruction(
MI))
270 unsigned Opc =
MI.getOpcode();
274 case AArch64::STRXui:
276 case AArch64::STRDui:
278 case AArch64::LDRXui:
280 case AArch64::LDRDui:
284 case AArch64::STRQui:
286 case AArch64::LDRQui:
293 unsigned OffsetIdx =
MI.getNumExplicitOperands() - 1;
294 assert(
MI.getOperand(OffsetIdx - 1).getReg() == AArch64::SP &&
295 "Unexpected base register in callee-save save/restore instruction!");
299 assert(LocalStackSize % Scale == 0);
300 OffsetOpnd.
setImm(OffsetOpnd.
getImm() + LocalStackSize / Scale);
305 assert(
MBBI !=
MI.getParent()->end() &&
"Expecting a valid instruction");
306 assert(AArch64InstrInfo::isSEHInstruction(*
MBBI) &&
307 "Expecting a SEH instruction");
314 if (
AFL.homogeneousPrologEpilog(
MF))
317 if (
AFI->getLocalStackSize() == 0)
328 if (
AFL.needsWinCFI(
MF) &&
AFI->getCalleeSavedStackSize() > 0 &&
329 MF.getFunction().hasOptSize())
334 if (StackBumpBytes >= 512 ||
335 AFL.windowsRequiresStackProbe(
MF, StackBumpBytes))
338 if (
MFI.hasVarSizedObjects())
347 if (
AFL.canUseRedZone(
MF))
352 if (
AFI->hasSVEStackSize())
366 return {{PPRCalleeSavesSize, PPRLocalsSize},
367 {ZPRCalleeSavesSize, ZPRLocalsSize}};
370 {ZPRCalleeSavesSize, PPRLocalsSize + ZPRLocalsSize}};
386 IsEpilogue ?
MBB.begin() :
MBB.getFirstTerminator();
387 auto AdjustI = [&](
auto MBBI) {
return IsEpilogue ? std::prev(
MBBI) :
MBBI; };
389 if (PPRCalleeSavesSize) {
390 PPRsI = AdjustI(PPRsI);
393 IsEpilogue ? (--PPRsI) : (++PPRsI);
396 if (ZPRCalleeSavesSize) {
397 ZPRsI = AdjustI(ZPRsI);
400 IsEpilogue ? (--ZPRsI) : (++ZPRsI);
403 return {{PPRsI,
MBBI}, {ZPRsI, PPRsI}};
404 return {{
MBBI, PPRsI}, {PPRsI, ZPRsI}};
412 EmitAsyncCFI =
AFI->needsAsyncDwarfUnwindInfo(
MF);
417 collectBlockLiveins();
434void AArch64PrologueEmitter::collectBlockLiveins() {
437 PrologueEndI =
MBB.begin();
438 while (PrologueEndI !=
MBB.end() &&
442 if (PrologueEndI !=
MBB.end()) {
458void AArch64PrologueEmitter::verifyPrologueClobbers()
const {
459 if (PrologueEndI ==
MBB.end())
462 for (MachineInstr &
MI :
463 make_range(
MBB.instr_begin(), PrologueEndI->getIterator())) {
464 for (
auto &
Op :
MI.operands())
465 if (
Op.isReg() &&
Op.isDef())
466 assert(!LiveRegs.contains(
Op.getReg()) &&
467 "live register clobbered by inserted prologue instructions");
472void AArch64PrologueEmitter::determineLocalsStackSize(
473 uint64_t StackSize, uint64_t PrologueSaveSize) {
474 AFI->setLocalStackSize(StackSize - PrologueSaveSize);
481 static const int64_t MAX_BYTES_PER_SCALABLE_BYTE = 16;
482 return Size.getScalable() * MAX_BYTES_PER_SCALABLE_BYTE +
Size.getFixed();
485void AArch64PrologueEmitter::allocateStackSpace(
487 StackOffset AllocSize,
bool EmitCFI, StackOffset InitialOffset,
488 bool FollowupAllocs) {
495 const uint64_t AndMask = ~(MaxAlign - 1);
498 Register TargetReg = RealignmentPadding
499 ?
AFL.findScratchNonCalleeSaveRegister(&
MBB)
506 if (RealignmentPadding) {
527 if (AllocSize.getScalable() == 0 && RealignmentPadding == 0) {
529 assert(ScratchReg != AArch64::NoRegister);
532 .
addImm(AllocSize.getFixed())
533 .
addImm(InitialOffset.getFixed())
534 .
addImm(InitialOffset.getScalable());
539 if (FollowupAllocs) {
555 int64_t ProbeSize =
AFI->getStackProbeSize();
556 if (
upperBound(AllocSize) + RealignmentPadding <= ProbeSize) {
557 Register ScratchReg = RealignmentPadding
558 ?
AFL.findScratchNonCalleeSaveRegister(&
MBB)
560 assert(ScratchReg != AArch64::NoRegister);
565 if (RealignmentPadding) {
571 AFI->setStackRealigned(
true);
573 if (FollowupAllocs ||
upperBound(AllocSize) + RealignmentPadding >
589 assert(TargetReg != AArch64::NoRegister);
594 if (RealignmentPadding) {
607 .buildDefCFARegister(AArch64::SP);
609 if (RealignmentPadding)
610 AFI->setStackRealigned(
true);
620 AFI->setHasRedZone(
false);
630 if (
AFI->getArgumentStackToRestore())
633 if (
AFI->shouldSignReturnAddress(
MF)) {
636 if (!
AFL.shouldSignReturnAddressEverywhere(
MF)) {
644 if (
AFI->needsShadowCallStackPrologueEpilogue(
MF)) {
645 emitShadowCallStackPrologue(PrologueBeginI,
DL);
657 if (
HasFP &&
AFI->hasSwiftAsyncContext())
658 emitSwiftAsyncContextFramePointer(PrologueBeginI,
DL);
667 if (std::optional<int> TBPI =
AFI->getTaggedBasePointerIndex())
668 AFI->setTaggedBasePointerOffset(-
MFI.getObjectOffset(*TBPI));
670 AFI->setTaggedBasePointerOffset(
MFI.getStackSize());
680 if (!
AFI->hasStackFrame() && !
AFL.windowsRequiresStackProbe(
MF, NumBytes))
681 return emitEmptyStackFramePrologue(NumBytes, PrologueBeginI,
DL);
683 bool IsWin64 =
Subtarget.isCallingConvWin64(F.getCallingConv(), F.isVarArg());
686 auto PrologueSaveSize =
AFI->getCalleeSavedStackSize() + FixedObject;
688 determineLocalsStackSize(NumBytes, PrologueSaveSize);
700 allocateStackSpace(PrologueBeginI, 0, SaveSize,
false,
StackOffset{},
702 NumBytes -= FixedObject;
709 MBBI,
DL, -
AFI->getCalleeSavedStackSize(), EmitAsyncCFI);
710 NumBytes -=
AFI->getCalleeSavedStackSize();
711 }
else if (CombineSPBump) {
712 assert(!
AFL.getSVEStackSize(
MF) &&
"Cannot combine SP bump with SVE");
720 NumBytes -= PrologueSaveSize;
721 }
else if (PrologueSaveSize != 0) {
723 PrologueBeginI,
DL, -PrologueSaveSize, EmitAsyncCFI);
724 NumBytes -= PrologueSaveSize;
726 assert(NumBytes >= 0 &&
"Negative stack allocation size!?");
731 auto &TLI = *
Subtarget.getTargetLowering();
734 while (AfterGPRSavesI != EndI &&
741 AFI->getLocalStackSize());
748 emitFramePointerSetup(AfterGPRSavesI,
DL, FixedObject);
754 emitCalleeSavedGPRLocations(AfterGPRSavesI);
757 const bool NeedsRealignment =
759 const int64_t RealignmentPadding =
760 (NeedsRealignment &&
MFI.getMaxAlign() >
Align(16))
761 ?
MFI.getMaxAlign().value() - 16
764 if (
AFL.windowsRequiresStackProbe(
MF, NumBytes + RealignmentPadding))
765 emitWindowsStackProbe(AfterGPRSavesI,
DL, NumBytes, RealignmentPadding);
768 StackOffset SVECalleeSavesSize = ZPR.CalleeSavesSize + PPR.CalleeSavesSize;
776 auto [PPRRange, ZPRRange] =
778 ZPR.CalleeSavesSize,
false);
779 AfterSVESavesI = ZPRRange.End;
781 emitCalleeSavedSVELocations(AfterSVESavesI);
783 StackOffset AllocateBeforePPRs = SVECalleeSavesSize;
786 AllocateBeforePPRs = PPR.CalleeSavesSize;
787 AllocateAfterPPRs = PPR.LocalsSize + ZPR.CalleeSavesSize;
789 allocateStackSpace(PPRRange.Begin, 0, AllocateBeforePPRs,
790 EmitAsyncCFI && !
HasFP, CFAOffset,
791 MFI.hasVarSizedObjects() || AllocateAfterPPRs ||
792 ZPR.LocalsSize || NonSVELocalsSize);
793 CFAOffset += AllocateBeforePPRs;
794 assert(PPRRange.End == ZPRRange.Begin &&
795 "Expected ZPR callee saves after PPR locals");
796 allocateStackSpace(PPRRange.End, RealignmentPadding, AllocateAfterPPRs,
797 EmitAsyncCFI && !
HasFP, CFAOffset,
798 MFI.hasVarSizedObjects() || ZPR.LocalsSize ||
800 CFAOffset += AllocateAfterPPRs;
806 assert(!PPR.LocalsSize &&
"Unexpected PPR locals!");
807 CFAOffset += SVECalleeSavesSize;
812 assert(!(
AFL.canUseRedZone(
MF) && NeedsRealignment) &&
813 "Cannot use redzone with stack realignment");
814 if (!
AFL.canUseRedZone(
MF)) {
819 AfterSVESavesI, RealignmentPadding, ZPR.LocalsSize + NonSVELocalsSize,
820 EmitAsyncCFI && !
HasFP, CFAOffset,
MFI.hasVarSizedObjects());
859 MBB.addLiveIn(AArch64::X1);
863 if (
EmitCFI && !EmitAsyncCFI) {
865 emitDefineCFAWithFP(AfterSVESavesI, FixedObject);
868 AFL.getSVEStackSize(
MF) +
875 emitCalleeSavedGPRLocations(AfterSVESavesI);
876 emitCalleeSavedSVELocations(AfterSVESavesI);
880void AArch64PrologueEmitter::emitShadowCallStackPrologue(
891 MBB.addLiveIn(AArch64::X18);
900 static const char CFIInst[] = {
901 dwarf::DW_CFA_val_expression,
904 static_cast<char>(
unsigned(dwarf::DW_OP_breg18)),
905 static_cast<char>(-8) & 0x7f,
908 .buildEscape(StringRef(CFIInst,
sizeof(CFIInst)));
912void AArch64PrologueEmitter::emitSwiftAsyncContextFramePointer(
914 switch (
MF.getTarget().Options.SwiftAsyncFramePointer) {
916 if (
Subtarget.swiftAsyncContextIsDynamicallySet()) {
958void AArch64PrologueEmitter::emitEmptyStackFramePrologue(
961 assert(!
HasFP &&
"unexpected function without stack frame but with FP");
963 "unexpected function without stack frame but with SVE objects");
965 AFI->setLocalStackSize(NumBytes);
975 if (
AFL.canUseRedZone(
MF)) {
976 AFI->setHasRedZone(
true);
977 ++NumRedZoneFunctions;
984 MCSymbol *FrameLabel =
MF.getContext().createTempSymbol();
987 .buildDefCFAOffset(NumBytes, FrameLabel);
998void AArch64PrologueEmitter::emitFramePointerSetup(
1000 unsigned FixedObject) {
1001 int64_t FPOffset =
AFI->getCalleeSaveBaseToFrameRecordOffset();
1003 FPOffset +=
AFI->getLocalStackSize();
1005 if (
AFI->hasSwiftAsyncContext()) {
1009 const auto &
Attrs =
MF.getFunction().getAttributes();
1010 bool HaveInitialContext =
Attrs.hasAttrSomewhere(Attribute::SwiftAsync);
1011 if (HaveInitialContext)
1012 MBB.addLiveIn(AArch64::X22);
1013 Register Reg = HaveInitialContext ? AArch64::X22 : AArch64::XZR;
1051 emitDefineCFAWithFP(
MBBI, FixedObject);
1055void AArch64PrologueEmitter::emitDefineCFAWithFP(
1057 const int OffsetToFirstCalleeSaveFromFP =
1058 AFI->getCalleeSaveBaseToFrameRecordOffset() -
1059 AFI->getCalleeSavedStackSize();
1062 .buildDefCFA(
FramePtr, FixedObject - OffsetToFirstCalleeSaveFromFP);
1065void AArch64PrologueEmitter::emitWindowsStackProbe(
1067 int64_t RealignmentPadding)
const {
1068 if (
AFI->getSVECalleeSavedStackSize())
1073 unsigned X15Scratch = AArch64::NoRegister;
1075 [
this](
const MachineBasicBlock::RegisterMaskPair &LiveIn) {
1076 return RegInfo.isSuperOrSubRegisterEq(AArch64::X15,
1079 X15Scratch =
AFL.findScratchNonCalleeSaveRegister(&
MBB,
true);
1080 assert(X15Scratch != AArch64::NoRegister &&
1081 (X15Scratch < AArch64::X15 || X15Scratch > AArch64::X17));
1083 LiveRegs.removeReg(AArch64::X15);
1092 uint64_t NumWords = (NumBytes + RealignmentPadding) >> 4;
1100 if (NumBytes >= (1 << 28))
1102 "unwinding purposes");
1104 uint32_t LowNumWords = NumWords & 0xFFFF;
1111 if ((NumWords & 0xFFFF0000) != 0) {
1114 .
addImm((NumWords & 0xFFFF0000) >> 16)
1126 const char *ChkStk =
Subtarget.getChkStkName();
1127 switch (
MF.getTarget().getCodeModel()) {
1191 if (RealignmentPadding > 0) {
1192 if (RealignmentPadding >= 4096) {
1195 .
addImm(RealignmentPadding)
1205 .
addImm(RealignmentPadding)
1210 uint64_t AndMask = ~(
MFI.getMaxAlign().value() - 1);
1214 AFI->setStackRealigned(
true);
1220 if (X15Scratch != AArch64::NoRegister) {
1229void AArch64PrologueEmitter::emitCalleeSavedGPRLocations(
1231 const std::vector<CalleeSavedInfo> &CSI =
MFI.getCalleeSavedInfo();
1236 for (
const auto &
Info : CSI) {
1237 unsigned FrameIdx =
Info.getFrameIdx();
1238 if (
MFI.hasScalableStackID(FrameIdx))
1241 assert(!
Info.isSpilledToReg() &&
"Spilling to registers not implemented");
1242 int64_t
Offset =
MFI.getObjectOffset(FrameIdx) -
AFL.getOffsetOfLocalArea();
1243 CFIBuilder.buildOffset(
Info.getReg(),
Offset);
1247void AArch64PrologueEmitter::emitCalleeSavedSVELocations(
1250 const std::vector<CalleeSavedInfo> &CSI =
MFI.getCalleeSavedInfo();
1256 std::optional<int64_t> IncomingVGOffsetFromDefCFA;
1257 if (
AFL.requiresSaveVG(
MF)) {
1259 reverse(CSI), [](
auto &
Info) {
return Info.getReg() == AArch64::VG; });
1260 IncomingVGOffsetFromDefCFA =
MFI.getObjectOffset(IncomingVG.getFrameIdx()) -
1261 AFL.getOffsetOfLocalArea();
1264 StackOffset PPRStackSize =
AFL.getPPRStackSize(
MF);
1265 for (
const auto &
Info : CSI) {
1266 int FI =
Info.getFrameIdx();
1267 if (!
MFI.hasScalableStackID(FI))
1272 assert(!
Info.isSpilledToReg() &&
"Spilling to registers not implemented");
1273 MCRegister
Reg =
Info.getReg();
1287 CFIBuilder.insertCFIInst(
1293 switch (
MI.getOpcode()) {
1296 case AArch64::CATCHRET:
1297 case AArch64::CLEANUPRET:
1308 SEHEpilogueStartI =
MBB.end();
1313 if (
MBB.end() != EpilogueEndI) {
1314 DL = EpilogueEndI->getDebugLoc();
1328 int64_t ArgumentStackToRestore =
AFL.getArgumentStackToRestore(
MF,
MBB);
1329 bool IsWin64 =
Subtarget.isCallingConvWin64(
MF.getFunction().getCallingConv(),
1330 MF.getFunction().isVarArg());
1333 int64_t AfterCSRPopSize = ArgumentStackToRestore;
1334 auto PrologueSaveSize =
AFI->getCalleeSavedStackSize() + FixedObject;
1339 if (
MF.hasEHFunclets())
1340 AFI->setLocalStackSize(NumBytes - PrologueSaveSize);
1344 auto FirstHomogenousEpilogI =
MBB.getFirstTerminator();
1345 if (FirstHomogenousEpilogI !=
MBB.begin()) {
1346 auto HomogeneousEpilog = std::prev(FirstHomogenousEpilogI);
1347 if (HomogeneousEpilog->getOpcode() == AArch64::HOM_Epilog)
1348 FirstHomogenousEpilogI = HomogeneousEpilog;
1358 assert(AfterCSRPopSize == 0);
1362 bool CombineSPBump = shouldCombineCSRLocalStackBump(NumBytes);
1364 bool CombineAfterCSRBump =
false;
1366 AfterCSRPopSize += FixedObject;
1367 }
else if (!CombineSPBump && PrologueSaveSize != 0) {
1369 while (Pop->getOpcode() == TargetOpcode::CFI_INSTRUCTION ||
1370 AArch64InstrInfo::isSEHInstruction(*Pop))
1371 Pop = std::prev(Pop);
1378 if (
OffsetOp.getImm() == 0 && AfterCSRPopSize >= 0) {
1387 AfterCSRPopSize += PrologueSaveSize;
1388 CombineAfterCSRBump =
true;
1397 while (FirstGPRRestoreI != Begin) {
1404 }
else if (CombineSPBump)
1406 AFI->getLocalStackSize());
1416 BuildMI(
MBB, FirstGPRRestoreI, DL,
TII->get(AArch64::SEH_EpilogStart))
1418 SEHEpilogueStartI = FirstGPRRestoreI;
1419 --SEHEpilogueStartI;
1422 if (
HasFP &&
AFI->hasSwiftAsyncContext())
1423 emitSwiftAsyncContextFramePointer(EpilogueEndI, DL);
1426 if (CombineSPBump) {
1427 assert(!
AFI->hasSVEStackSize() &&
"Cannot combine SP bump with SVE");
1441 NumBytes -= PrologueSaveSize;
1442 assert(NumBytes >= 0 &&
"Negative stack allocation size!?");
1448 ?
MBB.getFirstTerminator()
1450 PPR.CalleeSavesSize, ZPR.CalleeSavesSize,
true);
1452 StackOffset SVECalleeSavesSize = ZPR.CalleeSavesSize + PPR.CalleeSavesSize;
1454 SVECalleeSavesSize + PPR.LocalsSize + ZPR.LocalsSize;
1460 StackOffset SVELocalsSize = ZPR.LocalsSize + PPR.LocalsSize;
1464 if (!
AFI->isStackRealigned() && !
MFI.hasVarSizedObjects()) {
1484 }
else if (
AFI->hasSVEStackSize()) {
1489 (
AFI->isStackRealigned() ||
MFI.hasVarSizedObjects()) ? AArch64::FP
1491 if (SVECalleeSavesSize && BaseForSVEDealloc == AArch64::FP) {
1495 "unexpected stack realignment or variable sized objects with split "
1496 "SVE stack objects");
1498 Register CalleeSaveBase = AArch64::FP;
1499 if (int64_t CalleeSaveBaseOffset =
1500 AFI->getCalleeSaveBaseToFrameRecordOffset()) {
1504 CalleeSaveBase =
MBB.getParent()->getRegInfo().createVirtualRegister(
1505 &AArch64::GPR64RegClass);
1514 }
else if (BaseForSVEDealloc == AArch64::SP) {
1518 if (SVECalleeSavesSize) {
1525 CFAOffset -= NonSVELocals;
1529 if (ZPR.LocalsSize) {
1533 CFAOffset -= ZPR.LocalsSize;
1536 StackOffset SVECalleeSavesToDealloc = SVECalleeSavesSize;
1538 (PPR.LocalsSize || ZPR.CalleeSavesSize)) {
1539 assert(PPRRange.Begin == ZPRRange.End &&
1540 "Expected PPR restores after ZPR");
1542 PPR.LocalsSize + ZPR.CalleeSavesSize,
TII,
1545 CFAOffset -= PPR.LocalsSize + ZPR.CalleeSavesSize;
1546 SVECalleeSavesToDealloc -= ZPR.CalleeSavesSize;
1550 if (SVECalleeSavesToDealloc)
1552 SVECalleeSavesToDealloc,
TII,
1558 emitCalleeSavedSVERestores(
1563 bool RedZone =
AFL.canUseRedZone(
MF);
1566 if (RedZone && AfterCSRPopSize == 0)
1573 bool NoCalleeSaveRestore = PrologueSaveSize == 0;
1574 int64_t StackRestoreBytes = RedZone ? 0 : NumBytes;
1575 if (NoCalleeSaveRestore)
1576 StackRestoreBytes += AfterCSRPopSize;
1579 MBB, FirstGPRRestoreI, DL, AArch64::SP, AArch64::SP,
1586 if (NoCalleeSaveRestore || AfterCSRPopSize == 0)
1596 if (!
IsFunclet && (
MFI.hasVarSizedObjects() ||
AFI->isStackRealigned())) {
1598 MBB, FirstGPRRestoreI, DL, AArch64::SP, AArch64::FP,
1601 }
else if (NumBytes)
1614 if (AfterCSRPopSize) {
1615 assert(AfterCSRPopSize > 0 &&
"attempting to reallocate arg stack that an "
1616 "interrupt may have clobbered");
1619 MBB,
MBB.getFirstTerminator(), DL, AArch64::SP, AArch64::SP,
1626bool AArch64EpilogueEmitter::shouldCombineCSRLocalStackBump(
1638 while (LastI != Begin) {
1640 if (LastI->isTransient())
1645 switch (LastI->getOpcode()) {
1646 case AArch64::STGloop:
1647 case AArch64::STZGloop:
1649 case AArch64::STZGi:
1650 case AArch64::ST2Gi:
1651 case AArch64::STZ2Gi:
1659void AArch64EpilogueEmitter::emitSwiftAsyncContextFramePointer(
1661 switch (
MF.getTarget().Options.SwiftAsyncFramePointer) {
1689void AArch64EpilogueEmitter::emitShadowCallStackEpilogue(
1703 if (
AFI->needsAsyncDwarfUnwindInfo(
MF))
1705 .buildRestore(AArch64::X18);
1708void AArch64EpilogueEmitter::emitCalleeSavedRestores(
1710 const std::vector<CalleeSavedInfo> &CSI =
MFI.getCalleeSavedInfo();
1716 for (
const auto &
Info : CSI) {
1717 if (
SVE !=
MFI.hasScalableStackID(
Info.getFrameIdx()))
1720 MCRegister
Reg =
Info.getReg();
1724 CFIBuilder.buildRestore(
Info.getReg());
1728void AArch64EpilogueEmitter::finalizeEpilogue()
const {
1729 if (
AFI->needsShadowCallStackPrologueEpilogue(
MF)) {
1730 emitShadowCallStackEpilogue(
MBB.getFirstTerminator(), DL);
1734 emitCalleeSavedGPRRestores(
MBB.getFirstTerminator());
1735 if (
AFI->shouldSignReturnAddress(
MF)) {
1738 if (!
AFL.shouldSignReturnAddressEverywhere(
MF)) {
1740 TII->get(AArch64::PAUTH_EPILOGUE))
1747 BuildMI(
MBB,
MBB.getFirstTerminator(), DL,
TII->get(AArch64::SEH_EpilogEnd))
1749 if (!
MF.hasWinCFI())
1750 MF.setHasWinCFI(
true);
1755 MBB.erase(SEHEpilogueStartI);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
This file contains the declaration of the AArch64PrologueEmitter and AArch64EpilogueEmitter classes,...
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
MachineBasicBlock MachineBasicBlock::iterator MBBI
Analysis containing CSE Info
This file contains constants used for implementing Dwarf debug support.
const HexagonInstrInfo * TII
std::pair< Instruction::BinaryOps, Value * > OffsetOp
Find all possible pairs (BinOp, RHS) that BinOp V, RHS can be simplified.
Register const TargetRegisterInfo * TRI
Promote Memory to Register
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static Function * getFunction(FunctionType *Ty, const Twine &Name, Module *M)
static const unsigned FramePtr
void emitEpilogue()
Emit the epilogue.
AArch64EpilogueEmitter(MachineFunction &MF, MachineBasicBlock &MBB, const AArch64FrameLowering &AFL)
AArch64FunctionInfo - This class is derived from MachineFunctionInfo and contains private AArch64-spe...
void setStackRealigned(bool s)
void emitPrologue()
Emit the prologue.
AArch64PrologueEmitter(MachineFunction &MF, MachineBasicBlock &MBB, const AArch64FrameLowering &AFL)
const MachineFrameInfo & MFI
AArch64FunctionInfo * AFI
MachineBasicBlock::iterator convertCalleeSaveRestoreToSPPrePostIncDec(MachineBasicBlock::iterator MBBI, const DebugLoc &DL, int CSStackSizeInc, bool EmitCFI, MachineInstr::MIFlag FrameFlag=MachineInstr::FrameSetup, int CFAOffset=0) const
SVEFrameSizes getSVEStackFrameSizes() const
bool isVGInstruction(MachineBasicBlock::iterator MBBI, const TargetLowering &TLI) const
AArch64PrologueEpilogueCommon(MachineFunction &MF, MachineBasicBlock &MBB, const AArch64FrameLowering &AFL)
const AArch64RegisterInfo & RegInfo
const AArch64FrameLowering & AFL
@ CalleeSavesAboveFrameRecord
void fixupCalleeSaveRestoreStackOffset(MachineInstr &MI, uint64_t LocalStackSize) const
bool shouldCombineCSRLocalStackBump(uint64_t StackBumpBytes) const
const AArch64Subtarget & Subtarget
bool requiresGetVGCall() const
const TargetInstrInfo * TII
const AArch64TargetLowering * getTargetLowering() const override
bool hasInlineStackProbe(const MachineFunction &MF) const override
True if stack clash protection is enabled for this functions.
Helper class for creating CFI instructions and inserting them into MIR.
void buildDefCFAOffset(int64_t Offset, MCSymbol *Label=nullptr) const
void insertCFIInst(const MCCFIInstruction &CFIInst) const
void buildDefCFA(MCRegister Reg, int64_t Offset) const
A set of physical registers with utility functions to track liveness when walking backward/forward th...
MachineInstrBundleIterator< MachineInstr > iterator
Align getMaxAlign() const
Return the alignment in bytes that this function must be aligned to, which is greater than the defaul...
const MachineInstrBuilder & setMemRefs(ArrayRef< MachineMemOperand * > MMOs) const
const MachineInstrBuilder & addExternalSymbol(const char *FnName, unsigned TargetFlags=0) const
const MachineInstrBuilder & setMIFlag(MachineInstr::MIFlag Flag) const
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addUse(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register use operand.
const MachineInstrBuilder & setMIFlags(unsigned Flags) const
const MachineInstrBuilder & addDef(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register definition operand.
Representation of each machine instruction.
MachineOperand class - Representation of each machine instruction operand.
void setImm(int64_t immVal)
bool isSymbol() const
isSymbol - Tests if this is a MO_ExternalSymbol operand.
static MachineOperand CreateImm(int64_t Val)
const char * getSymbolName() const
Wrapper class representing virtual and physical registers.
StackOffset holds a fixed and a scalable offset in bytes.
int64_t getFixed() const
Returns the fixed component of the stack.
int64_t getScalable() const
Returns the scalable component of the stack.
static StackOffset getFixed(int64_t Fixed)
StringRef - Represent a constant reference to a string, i.e.
const char * getLibcallName(RTLIB::Libcall Call) const
Get the libcall routine name for the specified libcall.
This class defines information used to lower LLVM code to legal SelectionDAG operators that the targe...
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
static constexpr TypeSize getFixed(ScalarTy ExactSize)
constexpr ScalarTy getFixedValue() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ MO_GOT
MO_GOT - This flag indicates that a symbol operand represents the address of the GOT entry for the sy...
static unsigned getArithExtendImm(AArch64_AM::ShiftExtendType ET, unsigned Imm)
getArithExtendImm - Encode the extend type and shift amount for an arithmetic instruction: imm: 3-bit...
static uint64_t encodeLogicalImmediate(uint64_t imm, unsigned regSize)
encodeLogicalImmediate - Return the encoded immediate value for a logical immediate instruction of th...
static unsigned getShifterImm(AArch64_AM::ShiftExtendType ST, unsigned Imm)
getShifterImm - Encode the shift type and amount: imm: 6-bit shift amount shifter: 000 ==> lsl 001 ==...
const unsigned StackProbeMaxUnprobedStack
Maximum allowed number of unprobed bytes above SP at an ABI boundary.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
@ GHC
Used by the Glasgow Haskell Compiler (GHC).
@ Implicit
Not emitted register (e.g. carry, or temporary result).
@ Define
Register definition.
@ Kill
The last use of a register.
@ Undef
Value of the register doesn't matter.
This is an optimization pass for GlobalISel generic memory operations.
MCCFIInstruction createDefCFA(const TargetRegisterInfo &TRI, unsigned FrameReg, unsigned Reg, const StackOffset &Offset, bool LastAdjustmentWasScalable=true)
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
unsigned getBLRCallOpcode(const MachineFunction &MF)
Return opcode to be used for indirect calls.
static bool matchLibcall(const TargetLowering &TLI, const MachineOperand &MO, RTLIB::Libcall LC)
static bool isPartOfSVECalleeSaves(MachineBasicBlock::iterator I)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
static bool isFuncletReturnInstr(const MachineInstr &MI)
auto reverse(ContainerTy &&C)
static void getLivePhysRegsUpTo(MachineInstr &MI, const TargetRegisterInfo &TRI, LivePhysRegs &LiveRegs)
Collect live registers from the end of MI's parent up to (including) MI in LiveRegs.
@ Always
Always set the bit.
@ Never
Never set the bit.
@ DeploymentBased
Determine whether to set the bit statically or dynamically based on the deployment target.
void emitFrameOffset(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const DebugLoc &DL, unsigned DestReg, unsigned SrcReg, StackOffset Offset, const TargetInstrInfo *TII, MachineInstr::MIFlag=MachineInstr::NoFlags, bool SetNZCV=false, bool NeedsWinCFI=false, bool *HasWinCFI=nullptr, bool EmitCFAOffset=false, StackOffset InitialOffset={}, unsigned FrameReg=AArch64::SP)
emitFrameOffset - Emit instructions as needed to set DestReg to SrcReg plus Offset.
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
LLVM_ABI EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
static bool isPartOfPPRCalleeSaves(MachineBasicBlock::iterator I)
@ Success
The lock was released successfully.
static void fixupSEHOpcode(MachineBasicBlock::iterator MBBI, unsigned LocalStackSize)
MCCFIInstruction createCFAOffset(const TargetRegisterInfo &MRI, unsigned Reg, const StackOffset &OffsetFromDefCFA, std::optional< int64_t > IncomingVGOffsetFromDefCFA)
DWARFExpression::Operation Op
bool isAsynchronousEHPersonality(EHPersonality Pers)
Returns true if this personality function catches asynchronous exceptions.
static SVEPartitions partitionSVECS(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, StackOffset PPRCalleeSavesSize, StackOffset ZPRCalleeSavesSize, bool IsEpilogue)
auto find_if(R &&Range, UnaryPredicate P)
Provide wrappers to std::find_if which take ranges instead of having to pass begin/end explicitly.
static bool isPartOfZPRCalleeSaves(MachineBasicBlock::iterator I)
static int64_t upperBound(StackOffset Size)
This struct is a compact representation of a valid (non-zero power of two) alignment.
constexpr uint64_t value() const
This is a hole in the type system and should not be abused.
MachineBasicBlock::iterator End
struct llvm::SVEPartitions::@327166152017175235362202041204223104077330276266 ZPR
struct llvm::SVEPartitions::@327166152017175235362202041204223104077330276266 PPR
MachineBasicBlock::iterator Begin