13#ifndef LLVM_LIB_TARGET_AARCH64_AARCH64MACHINEFUNCTIONINFO_H
14#define LLVM_LIB_TARGET_AARCH64_AARCH64MACHINEFUNCTIONINFO_H
57 unsigned BytesInStackArgArea = 0;
62 unsigned ArgumentStackToRestore = 0;
68 unsigned TailCallReservedStack = 0;
72 bool HasStackFrame =
false;
78 unsigned CalleeSavedStackSize = 0;
79 unsigned ZPRCalleeSavedStackSize = 0;
80 unsigned PPRCalleeSavedStackSize = 0;
81 bool HasCalleeSavedStackSize =
false;
82 bool HasSVECalleeSavedStackSize =
false;
86 unsigned NumLocalDynamicTLSAccesses = 0;
90 int VarArgsStackIndex = 0;
93 unsigned VarArgsStackOffset = 0;
97 int VarArgsGPRIndex = 0;
101 unsigned VarArgsGPRSize = 0;
105 int VarArgsFPRIndex = 0;
109 unsigned VarArgsFPRSize = 0;
114 int StackHazardSlotIndex = std::numeric_limits<int>::max();
115 int StackHazardCSRSlotIndex = std::numeric_limits<int>::max();
119 bool IsSplitCSR =
false;
123 bool StackRealigned =
false;
127 bool CalleeSaveStackHasFreeSpace =
false;
142 bool SplitSVEObjects =
false;
145 bool HasCalculatedStackSizeSVE =
false;
151 std::optional<bool> HasRedZone;
158 std::optional<int> TaggedBasePointerIndex;
163 unsigned TaggedBasePointerOffset;
167 std::optional<std::string> OutliningStyle;
171 int CalleeSaveBaseToFrameRecordOffset = 0;
175 bool SignReturnAddress =
false;
179 bool SignReturnAddressAll =
false;
182 bool SignWithBKey =
false;
187 bool HasELFSignedGOT =
false;
196 bool BranchTargetEnforcement =
false;
202 bool BranchProtectionPAuthLR =
false;
207 bool HasSwiftAsyncContext =
false;
210 int SwiftAsyncContextFrameIdx = std::numeric_limits<int>::max();
212 bool IsMTETagged =
false;
216 bool IsSVECC =
false;
219 bool HasStreamingModeChanges =
false;
222 mutable std::optional<bool> NeedsDwarfUnwindInfo;
225 mutable std::optional<bool> NeedsAsyncDwarfUnwindInfo;
227 int64_t StackProbeSize = 0;
235 unsigned PredicateRegForFillSpill = 0;
242 Register EarlyAllocSMESaveBuffer = AArch64::NoRegister;
245 int ZT0SpillSlotIndex = std::numeric_limits<int>::max();
255 bool SMESaveBufferUsed =
false;
266 EarlyAllocSMESaveBuffer =
Ptr;
270 return EarlyAllocSMESaveBuffer;
276 return ZT0SpillSlotIndex;
279 return ZT0SpillSlotIndex != std::numeric_limits<int>::max();
290 PredicateRegForFillSpill =
Reg;
293 return PredicateRegForFillSpill;
309 ArgumentStackToRestore = bytes;
314 TailCallReservedStack = bytes;
320 HasCalculatedStackSizeSVE =
true;
344 return CalleeSaveStackHasFreeSpace;
347 CalleeSaveStackHasFreeSpace = s;
357 return OutliningStyle;
361 CalleeSavedStackSize =
Size;
362 HasCalleeSavedStackSize =
true;
371 bool ValidateCalleeSavedStackSize =
false;
377 ValidateCalleeSavedStackSize = HasCalleeSavedStackSize;
380 if (!HasCalleeSavedStackSize || ValidateCalleeSavedStackSize) {
385 int64_t MinOffset = std::numeric_limits<int64_t>::max();
386 int64_t MaxOffset = std::numeric_limits<int64_t>::min();
388 int FrameIdx =
Info.getFrameIdx();
393 MinOffset = std::min<int64_t>(
Offset, MinOffset);
394 MaxOffset = std::max<int64_t>(
Offset + ObjSize, MaxOffset);
397 if (SwiftAsyncContextFrameIdx != std::numeric_limits<int>::max()) {
400 MinOffset = std::min<int64_t>(
Offset, MinOffset);
401 MaxOffset = std::max<int64_t>(
Offset + ObjSize, MaxOffset);
404 if (StackHazardCSRSlotIndex != std::numeric_limits<int>::max()) {
406 int64_t ObjSize = MFI.
getObjectSize(StackHazardCSRSlotIndex);
407 MinOffset = std::min<int64_t>(
Offset, MinOffset);
408 MaxOffset = std::max<int64_t>(
Offset + ObjSize, MaxOffset);
411 unsigned Size =
alignTo(MaxOffset - MinOffset, 16);
413 "Invalid size calculated for callee saves");
421 assert(HasCalleeSavedStackSize &&
422 "CalleeSavedStackSize has not been calculated");
423 return CalleeSavedStackSize;
428 ZPRCalleeSavedStackSize = ZPR;
429 PPRCalleeSavedStackSize = PPR;
430 HasSVECalleeSavedStackSize =
true;
433 assert(HasSVECalleeSavedStackSize &&
434 "ZPRCalleeSavedStackSize has not been calculated");
435 return ZPRCalleeSavedStackSize;
438 assert(HasSVECalleeSavedStackSize &&
439 "PPRCalleeSavedStackSize has not been calculated");
440 return PPRCalleeSavedStackSize;
445 "ZPRs and PPRs are split. Use get[ZPR|PPR]CalleeSavedStackSize()");
451 return NumLocalDynamicTLSAccesses;
458 std::optional<bool>
hasRedZone()
const {
return HasRedZone; }
480 return StackHazardSlotIndex != std::numeric_limits<int>::max();
484 assert(StackHazardSlotIndex == std::numeric_limits<int>::max());
485 StackHazardSlotIndex = Index;
489 assert(StackHazardCSRSlotIndex == std::numeric_limits<int>::max());
490 StackHazardCSRSlotIndex = Index;
508 return JumpTableEntryInfo[Idx].first;
511 return JumpTableEntryInfo[Idx].second;
514 if ((
unsigned)Idx >= JumpTableEntryInfo.size())
515 JumpTableEntryInfo.resize(Idx+1);
516 JumpTableEntryInfo[Idx] = std::make_pair(
Size, PCRelSym);
534 : Kind(Kind), Args(Args.begin(), Args.end()) {
550 LOHRelated.insert_range(Args);
555 size_t InitialSize = LOHContainerSet.size();
556 erase_if(LOHContainerSet, [&](
const auto &
D) {
557 return any_of(
D.getArgs(), [&](
auto *Arg) { return MIs.contains(Arg); });
563 LOHRelated.remove_if([&](
auto *
MI) {
return MIs.
contains(
MI); });
564 return InitialSize - LOHContainerSet.size();
568 return ForwardedMustTailRegParms;
572 return TaggedBasePointerIndex;
577 return TaggedBasePointerOffset;
580 TaggedBasePointerOffset =
Offset;
584 return CalleeSaveBaseToFrameRecordOffset;
587 CalleeSaveBaseToFrameRecordOffset =
Offset;
609 HasSwiftAsyncContext = HasContext;
614 SwiftAsyncContextFrameIdx = FI;
623 HasStreamingModeChanges = HasChanges;
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static GCRegistry::Add< StatepointGC > D("statepoint-example", "an example strategy for statepoint")
Analysis containing CSE Info
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
MILOHDirective(MCLOHType Kind, LOHArgs Args)
MCLOHType getKind() const
ArrayRef< const MachineInstr * > LOHArgs
AArch64FunctionInfo - This class is derived from MachineFunctionInfo and contains private AArch64-spe...
void addLOHDirective(MCLOHType Kind, MILOHArgs Args)
Add a LOH directive of this Kind and this Args.
bool needsShadowCallStackPrologueEpilogue(MachineFunction &MF) const
bool branchTargetEnforcement() const
unsigned getPPRCalleeSavedStackSize() const
void setHasStackFrame(bool s)
void setSwiftAsyncContextFrameIdx(int FI)
unsigned getTailCallReservedStack() const
unsigned getCalleeSavedStackSize(const MachineFrameInfo &MFI) const
Register getEarlyAllocSMESaveBuffer() const
unsigned getVarArgsFPRSize() const
void setCalleeSaveBaseToFrameRecordOffset(int Offset)
void setVarArgsStackOffset(unsigned Offset)
bool hasStackProbing() const
void setVarArgsStackIndex(int Index)
void setEarlyAllocSMESaveBuffer(Register Ptr)
int getZT0SpillSlotIndex() const
unsigned getArgumentStackToRestore() const
TPIDR2Object & getTPIDR2Obj()
void setTailCallReservedStack(unsigned bytes)
SmallVector< MILOHDirective, 32 > MILOHContainer
void setLocalStackSize(uint64_t Size)
void setCalleeSaveStackHasFreeSpace(bool s)
int getCalleeSaveBaseToFrameRecordOffset() const
bool hasELFSignedGOT() const
SmallVectorImpl< ForwardedRegister > & getForwardedMustTailRegParms()
void setIsSplitCSR(bool s)
int getVarArgsFPRIndex() const
bool hasStreamingModeChanges() const
bool shouldSignReturnAddress(const MachineFunction &MF) const
void setPredicateRegForFillSpill(unsigned Reg)
void setOutliningStyle(const std::string &Style)
void incNumLocalDynamicTLSAccesses()
const SetOfInstructions & getLOHRelated() const
void setBytesInStackArgArea(unsigned bytes)
int getVarArgsStackIndex() const
void setVarArgsGPRIndex(int Index)
int getStackHazardSlotIndex() const
unsigned getTaggedBasePointerOffset() const
void setCalleeSavedStackSize(unsigned Size)
int getVarArgsGPRIndex() const
int64_t getStackProbeSize() const
void setSigningInstrLabel(MCSymbol *Label)
void setHasSwiftAsyncContext(bool HasContext)
void setSplitSVEObjects(bool s)
bool branchProtectionPAuthLR() const
void setPStateSMReg(Register Reg)
void setHasRedZone(bool s)
MILOHDirective::LOHArgs MILOHArgs
bool hasStackFrame() const
int getStackHazardCSRSlotIndex() const
void setStackSizeSVE(uint64_t ZPR, uint64_t PPR)
void setVarArgsFPRSize(unsigned Size)
std::optional< int > getTaggedBasePointerIndex() const
unsigned getVarArgsStackOffset() const
SMEAttrs getSMEFnAttrs() const
AArch64FunctionInfo(const Function &F, const AArch64Subtarget *STI)
uint64_t getLocalStackSize() const
void setStackRealigned(bool s)
unsigned getJumpTableEntrySize(int Idx) const
bool needsDwarfUnwindInfo(const MachineFunction &MF) const
size_t clearLinkerOptimizationHints(const SmallPtrSetImpl< MachineInstr * > &MIs)
unsigned getVarArgsGPRSize() const
void setZT0SpillSlotIndex(int FI)
unsigned getSRetReturnReg() const
MCSymbol * getJumpTableEntryPCRelSymbol(int Idx) const
bool isStackRealigned() const
Register getPStateSMReg() const
uint64_t getStackSizePPR() const
unsigned getNumLocalDynamicTLSAccesses() const
bool hasZT0SpillSlotIndex() const
SmallPtrSet< const MachineInstr *, 16 > SetOfInstructions
bool hasSwiftAsyncContext() const
bool hasStackHazardSlotIndex() const
void setTaggedBasePointerOffset(unsigned Offset)
void setStackHazardSlotIndex(int Index)
std::optional< bool > hasRedZone() const
unsigned getZPRCalleeSavedStackSize() const
void setSMESaveBufferUsed(bool Used=true)
void setSRetReturnReg(unsigned Reg)
void setStackHazardCSRSlotIndex(int Index)
void setSMESaveBufferAddr(Register Reg)
int getSwiftAsyncContextFrameIdx() const
unsigned getPredicateRegForFillSpill() const
void setSVECalleeSavedStackSize(unsigned ZPR, unsigned PPR)
bool hasCalculatedStackSizeSVE() const
std::optional< std::string > getOutliningStyle() const
unsigned getBytesInStackArgArea() const
Register getSMESaveBufferAddr() const
uint64_t getStackSizeZPR() const
void initializeBaseYamlFields(const yaml::AArch64FunctionInfo &YamlMFI)
const MILOHContainer & getLOHContainer() const
void setJumpTableEntryInfo(int Idx, unsigned Size, MCSymbol *PCRelSym)
bool hasSVEStackSize() const
bool isStackHazardIncludedInCalleeSaveArea() const
unsigned isSMESaveBufferUsed() const
unsigned getSVECalleeSavedStackSize() const
bool hasSplitSVEObjects() const
bool needsAsyncDwarfUnwindInfo(const MachineFunction &MF) const
void setVarArgsFPRIndex(int Index)
MachineFunctionInfo * clone(BumpPtrAllocator &Allocator, MachineFunction &DestMF, const DenseMap< MachineBasicBlock *, MachineBasicBlock * > &Src2DstMBB) const override
Make a functionally equivalent copy of this MachineFunctionInfo in MF.
void setVarArgsGPRSize(unsigned Size)
void setTaggedBasePointerIndex(int Index)
MCSymbol * getSigningInstrLabel() const
bool hasSVE_AAPCS(const MachineFunction &MF) const
void setArgumentStackToRestore(unsigned bytes)
void setHasStreamingModeChanges(bool HasChanges)
unsigned getCalleeSavedStackSize() const
bool hasCalleeSaveStackFreeSpace() const
bool shouldSignWithBKey() const
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
CallingConv::ID getCallingConv() const
getCallingConv()/setCallingConv(CC) - These method get and set the calling convention of this functio...
static constexpr unsigned NoRegister
MCSymbol - Instances of this class represent a symbol name in the MC file, and MCSymbols are created ...
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted.
bool isCalleeSavedInfoValid() const
Has the callee saved info been calculated yet?
int64_t getObjectSize(int ObjectIdx) const
Return the size of the specified object.
const std::vector< CalleeSavedInfo > & getCalleeSavedInfo() const
Returns a reference to call saved info vector for the current function.
uint8_t getStackID(int ObjectIdx) const
int64_t getObjectOffset(int ObjectIdx) const
Return the assigned stack offset of the specified object from the incoming stack pointer.
Function & getFunction()
Return the LLVM function that this machine code represents.
Representation of each machine instruction.
Wrapper class representing virtual and physical registers.
SMEAttrs is a utility class to parse the SME ACLE attributes on functions.
A templated base class for SmallPtrSet which provides the typesafe interface that is common across al...
bool contains(ConstPtrType Ptr) const
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
void mapOptional(StringRef Key, T &Val)
@ AArch64_SVE_VectorCall
Used between AArch64 SVE functions.
This is an optimization pass for GlobalISel generic memory operations.
static bool isValidMCLOHType(unsigned Kind)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
BumpPtrAllocatorImpl BumpPtrAllocator
The standard BumpPtrAllocator which just uses the default template parameters.
MCLOHType
Linker Optimization Hint Type.
uint64_t alignTo(uint64_t Size, Align A)
Returns a multiple of A needed to store Size bytes.
void erase_if(Container &C, UnaryPredicate P)
Provide a container algorithm similar to C++ Library Fundamentals v2's erase_if which is equivalent t...
MachineFunctionInfo - This class can be derived from and used by targets to hold private target-speci...
std::optional< bool > HasRedZone
std::optional< uint64_t > StackSizePPR
~AArch64FunctionInfo()=default
AArch64FunctionInfo()=default
std::optional< bool > HasStackFrame
std::optional< uint64_t > StackSizeZPR
void mappingImpl(yaml::IO &YamlIO) override
Targets should override this in a way that mirrors the implementation of llvm::MachineFunctionInfo.
static void mapping(IO &YamlIO, AArch64FunctionInfo &MFI)
This class should be specialized by any type that needs to be converted to/from a YAML mapping.