54#include "llvm/IR/IntrinsicsAMDGPU.h"
55#include "llvm/IR/IntrinsicsNVPTX.h"
82#define DEBUG_TYPE "attributor"
86 cl::desc(
"Manifest Attributor internal string attributes."),
99 cl::desc(
"Maximum number of potential values to be "
100 "tracked for each position."),
105 "attributor-max-potential-values-iterations",
cl::Hidden,
107 "Maximum number of iterations we keep dismantling potential values."),
110STATISTIC(NumAAs,
"Number of abstract attributes created");
111STATISTIC(NumIndirectCallsPromoted,
"Number of indirect calls promoted");
126#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
127 ("Number of " #TYPE " marked '" #NAME "'")
128#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
129#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
130#define STATS_DECL(NAME, TYPE, MSG) \
131 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
132#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
133#define STATS_DECLTRACK(NAME, TYPE, MSG) \
134 {STATS_DECL(NAME, TYPE, MSG) STATS_TRACK(NAME, TYPE)}
135#define STATS_DECLTRACK_ARG_ATTR(NAME) \
136 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
137#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
138 STATS_DECLTRACK(NAME, CSArguments, \
139 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
140#define STATS_DECLTRACK_FN_ATTR(NAME) \
141 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
142#define STATS_DECLTRACK_CS_ATTR(NAME) \
143 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
144#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
145 STATS_DECLTRACK(NAME, FunctionReturn, \
146 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
147#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
148 STATS_DECLTRACK(NAME, CSReturn, \
149 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
150#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
151 STATS_DECLTRACK(NAME, Floating, \
152 ("Number of floating values known to be '" #NAME "'"))
157#define PIPE_OPERATOR(CLASS) \
158 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
159 return OS << static_cast<const AbstractAttribute &>(AA); \
216 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
219 auto *BB =
I->getParent();
225 return !HeaderOnly || BB ==
C->getHeader();
236 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
261 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
271 bool AllowVolatile) {
272 if (!AllowVolatile &&
I->isVolatile())
276 return LI->getPointerOperand();
280 return SI->getPointerOperand();
284 return CXI->getPointerOperand();
288 return RMWI->getPointerOperand();
310 bool GetMinOffset,
bool AllowNonInbounds,
311 bool UseAssumed =
false) {
313 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
320 if (!ValueConstantRangeAA)
324 if (
Range.isFullSet())
330 ROffset =
Range.getSignedMin();
332 ROffset =
Range.getSignedMax();
343 const Value *
Ptr, int64_t &BytesOffset,
345 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
348 true, AllowNonInbounds);
356template <
typename AAType,
typename StateType =
typename AAType::StateType,
358 bool RecurseForSelectAndPHI =
true>
360 Attributor &
A,
const AAType &QueryingAA, StateType &S,
362 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
363 << QueryingAA <<
" into " << S <<
"\n");
365 assert((QueryingAA.getIRPosition().getPositionKind() ==
367 QueryingAA.getIRPosition().getPositionKind() ==
369 "Can only clamp returned value states for a function returned or call "
370 "site returned position!");
374 std::optional<StateType>
T;
377 auto CheckReturnValue = [&](
Value &RV) ->
bool {
391 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
392 const StateType &AAS =
AA->getState();
394 T = StateType::getBestState(AAS);
396 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
398 return T->isValidState();
401 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
403 RecurseForSelectAndPHI))
404 S.indicatePessimisticFixpoint();
411template <
typename AAType,
typename BaseType,
412 typename StateType =
typename BaseType::StateType,
413 bool PropagateCallBaseContext =
false,
415 bool RecurseForSelectAndPHI =
true>
416struct AAReturnedFromReturnedValues :
public BaseType {
417 AAReturnedFromReturnedValues(
const IRPosition &IRP, Attributor &
A)
422 StateType S(StateType::getBestState(this->getState()));
424 RecurseForSelectAndPHI>(
426 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
435template <
typename AAType,
typename StateType =
typename AAType::StateType,
437static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
439 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
440 << QueryingAA <<
" into " << S <<
"\n");
442 assert(QueryingAA.getIRPosition().getPositionKind() ==
444 "Can only clamp call site argument states for an argument position!");
448 std::optional<StateType>
T;
451 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
471 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
472 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @" << ACSArgPos
474 const StateType &AAS =
AA->getState();
476 T = StateType::getBestState(AAS);
478 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
480 return T->isValidState();
483 bool UsedAssumedInformation =
false;
484 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
485 UsedAssumedInformation))
486 S.indicatePessimisticFixpoint();
493template <
typename AAType,
typename BaseType,
494 typename StateType =
typename AAType::StateType,
496bool getArgumentStateFromCallBaseContext(
Attributor &
A,
500 "Expected an 'argument' position !");
506 assert(ArgNo >= 0 &&
"Invalid Arg No!");
520 const StateType &CBArgumentState =
521 static_cast<const StateType &
>(
AA->getState());
523 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
524 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
528 State ^= CBArgumentState;
533template <
typename AAType,
typename BaseType,
534 typename StateType =
typename AAType::StateType,
535 bool BridgeCallBaseContext =
false,
537struct AAArgumentFromCallSiteArguments :
public BaseType {
538 AAArgumentFromCallSiteArguments(
const IRPosition &IRP, Attributor &
A)
543 StateType S = StateType::getBestState(this->getState());
545 if (BridgeCallBaseContext) {
547 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
549 A, *
this, this->getIRPosition(), S);
553 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
563template <
typename AAType,
typename BaseType,
564 typename StateType =
typename BaseType::StateType,
565 bool IntroduceCallBaseContext =
false,
567struct AACalleeToCallSite :
public BaseType {
568 AACalleeToCallSite(
const IRPosition &IRP, Attributor &
A) :
BaseType(IRP,
A) {}
572 auto IRPKind = this->getIRPosition().getPositionKind();
575 "Can only wrap function returned positions for call site "
576 "returned positions!");
577 auto &S = this->getState();
580 if (IntroduceCallBaseContext)
581 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
586 for (
const Function *Callee : Callees) {
590 IntroduceCallBaseContext ? &CB :
nullptr)
592 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
594 if (Attribute::isEnumAttrKind(IRAttributeKind)) {
597 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
603 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
607 if (S.isAtFixpoint())
608 return S.isValidState();
612 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
613 return S.indicatePessimisticFixpoint();
619template <
class AAType,
typename StateType =
typename AAType::StateType>
625 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
626 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
630 if (Found &&
AA.followUseInMBEC(
A, U, UserI, State))
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &
AA,
Attributor &
A, StateType &S,
648 const Value &Val =
AA.getIRPosition().getAssociatedValue();
653 A.getInfoCache().getMustBeExecutedContextExplorer();
659 for (
const Use &U : Val.
uses())
662 followUsesInContext<AAType>(
AA,
A, *Explorer, &CtxI,
Uses, S);
664 if (S.isAtFixpoint())
670 if (Br->isConditional())
709 StateType ParentState;
713 ParentState.indicateOptimisticFixpoint();
715 for (
const BasicBlock *BB : Br->successors()) {
716 StateType ChildState;
718 size_t BeforeSize =
Uses.size();
719 followUsesInContext(
AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
722 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
725 ParentState &= ChildState;
799 R.indicatePessimisticFixpoint();
816 BS.indicateOptimisticFixpoint();
822 BS.indicatePessimisticFixpoint();
892 template <
typename F>
899 if (!
Range.mayOverlap(ItRange))
901 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
902 for (
auto Index : It.getSecond()) {
912 template <
typename F>
923 for (
unsigned Index : LocalList->getSecond()) {
926 if (
Range.offsetAndSizeAreUnknown())
942 RemoteI = RemoteI ? RemoteI : &
I;
946 bool AccExists =
false;
948 for (
auto Index : LocalList) {
950 if (
A.getLocalInst() == &
I) {
959 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
961 for (
auto Key : ToAdd) {
968 AccessList.emplace_back(&
I, RemoteI, Ranges, Content, Kind, Ty);
970 "New Access should have been at AccIndex");
971 LocalList.push_back(AccIndex);
980 auto Before = Current;
982 if (Current == Before)
985 auto &ExistingRanges = Before.getRanges();
986 auto &NewRanges = Current.getRanges();
993 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
1000 "Expected bin to actually contain the Access.");
1001 Bin.erase(AccIndex);
1022struct AAPointerInfoImpl
1023 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1028 const std::string getAsStr(
Attributor *
A)
const override {
1029 return std::string(
"PointerInfo ") +
1030 (isValidState() ? (std::string(
"#") +
1031 std::to_string(OffsetBins.size()) +
" bins")
1036 [](int64_t O) {
return std::to_string(O); }),
1044 return AAPointerInfo::manifest(
A);
1047 virtual const_bin_iterator
begin()
const override {
return State::begin(); }
1048 virtual const_bin_iterator
end()
const override {
return State::end(); }
1049 virtual int64_t numOffsetBins()
const override {
1050 return State::numOffsetBins();
1052 virtual bool reachesReturn()
const override {
1053 return !ReturnedOffsets.isUnassigned();
1055 virtual void addReturnedOffsetsTo(OffsetInfo &OI)
const override {
1056 if (ReturnedOffsets.isUnknown()) {
1061 OffsetInfo MergedOI;
1062 for (
auto Offset : ReturnedOffsets) {
1063 OffsetInfo TmpOI = OI;
1065 MergedOI.merge(TmpOI);
1067 OI = std::move(MergedOI);
1070 ChangeStatus setReachesReturn(
const OffsetInfo &ReachedReturnedOffsets) {
1071 if (ReturnedOffsets.isUnknown())
1072 return ChangeStatus::UNCHANGED;
1073 if (ReachedReturnedOffsets.isUnknown()) {
1074 ReturnedOffsets.setUnknown();
1075 return ChangeStatus::CHANGED;
1077 if (ReturnedOffsets.merge(ReachedReturnedOffsets))
1078 return ChangeStatus::CHANGED;
1079 return ChangeStatus::UNCHANGED;
1082 bool forallInterferingAccesses(
1084 function_ref<
bool(
const AAPointerInfo::Access &,
bool)> CB)
1086 return State::forallInterferingAccesses(
Range, CB);
1089 bool forallInterferingAccesses(
1090 Attributor &
A,
const AbstractAttribute &QueryingAA, Instruction &
I,
1091 bool FindInterferingWrites,
bool FindInterferingReads,
1092 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1094 function_ref<
bool(
const Access &)> SkipCB)
const override {
1095 HasBeenWrittenTo =
false;
1097 SmallPtrSet<const Access *, 8> DominatingWrites;
1105 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
1107 bool AllInSameNoSyncFn = IsAssumedNoSync;
1108 bool InstIsExecutedByInitialThreadOnly =
1109 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1116 bool InstIsExecutedInAlignedRegion =
1117 FindInterferingReads && ExecDomainAA &&
1118 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1120 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1121 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1123 InformationCache &InfoCache =
A.getInfoCache();
1124 bool IsThreadLocalObj =
1133 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1134 if (IsThreadLocalObj || AllInSameNoSyncFn)
1136 const auto *FnExecDomainAA =
1137 I.getFunction() == &
Scope
1139 :
A.lookupAAFor<AAExecutionDomain>(
1142 if (!FnExecDomainAA)
1144 if (InstIsExecutedInAlignedRegion ||
1145 (FindInterferingWrites &&
1146 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1147 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1150 if (InstIsExecutedByInitialThreadOnly &&
1151 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1152 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1161 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1162 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1163 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1164 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1168 bool IsKnownNoRecurse;
1176 bool InstInKernel =
A.getInfoCache().isKernel(Scope);
1177 bool ObjHasKernelLifetime =
false;
1178 const bool UseDominanceReasoning =
1179 FindInterferingWrites && IsKnownNoRecurse;
1180 const DominatorTree *DT =
1190 case AA::GPUAddressSpace::Shared:
1191 case AA::GPUAddressSpace::Constant:
1192 case AA::GPUAddressSpace::Local:
1204 std::function<bool(
const Function &)> IsLiveInCalleeCB;
1209 const Function *AIFn = AI->getFunction();
1210 ObjHasKernelLifetime =
A.getInfoCache().isKernel(*AIFn);
1211 bool IsKnownNoRecurse;
1214 IsKnownNoRecurse)) {
1215 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1220 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1221 if (ObjHasKernelLifetime)
1222 IsLiveInCalleeCB = [&
A](
const Function &Fn) {
1223 return !
A.getInfoCache().isKernel(Fn);
1231 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1232 Function *AccScope = Acc.getRemoteInst()->getFunction();
1233 bool AccInSameScope = AccScope == &
Scope;
1237 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1238 A.getInfoCache().isKernel(*AccScope))
1241 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1242 if (Acc.isWrite() || (
isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1243 ExclusionSet.
insert(Acc.getRemoteInst());
1246 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1247 (!FindInterferingReads || !Acc.isRead()))
1250 bool Dominates = FindInterferingWrites && DT && Exact &&
1251 Acc.isMustAccess() && AccInSameScope &&
1254 DominatingWrites.
insert(&Acc);
1258 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1260 InterferingAccesses.
push_back({&Acc, Exact});
1263 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1266 HasBeenWrittenTo = !DominatingWrites.
empty();
1270 for (
const Access *Acc : DominatingWrites) {
1271 if (!LeastDominatingWriteInst) {
1272 LeastDominatingWriteInst = Acc->getRemoteInst();
1273 }
else if (DT->
dominates(LeastDominatingWriteInst,
1274 Acc->getRemoteInst())) {
1275 LeastDominatingWriteInst = Acc->getRemoteInst();
1280 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1281 if (SkipCB && SkipCB(Acc))
1283 if (!CanIgnoreThreading(Acc))
1289 bool ReadChecked = !FindInterferingReads;
1290 bool WriteChecked = !FindInterferingWrites;
1296 &ExclusionSet, IsLiveInCalleeCB))
1301 if (!WriteChecked) {
1303 &ExclusionSet, IsLiveInCalleeCB))
1304 WriteChecked =
true;
1318 if (!WriteChecked && HasBeenWrittenTo &&
1319 Acc.getRemoteInst()->getFunction() != &Scope) {
1321 const auto *FnReachabilityAA =
A.getAAFor<AAInterFnReachability>(
1323 if (FnReachabilityAA) {
1329 if (!FnReachabilityAA->instructionCanReach(
1330 A, *LeastDominatingWriteInst,
1331 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1332 WriteChecked =
true;
1339 if (ReadChecked && WriteChecked)
1342 if (!DT || !UseDominanceReasoning)
1344 if (!DominatingWrites.count(&Acc))
1346 return LeastDominatingWriteInst != Acc.getRemoteInst();
1351 for (
auto &It : InterferingAccesses) {
1352 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1353 !CanSkipAccess(*It.first, It.second)) {
1354 if (!UserCB(*It.first, It.second))
1362 const AAPointerInfo &OtherAA,
1364 using namespace AA::PointerInfo;
1366 return indicatePessimisticFixpoint();
1369 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1370 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1371 Changed |= setReachesReturn(OtherAAImpl.ReturnedOffsets);
1374 const auto &State = OtherAAImpl.getState();
1375 for (
const auto &It : State) {
1376 for (
auto Index : It.getSecond()) {
1377 const auto &RAcc = State.getAccess(Index);
1378 if (IsByval && !RAcc.isRead())
1380 bool UsedAssumedInformation =
false;
1382 auto Content =
A.translateArgumentToCallSiteContent(
1383 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1384 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1385 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1387 Changed |= addAccess(
A, RAcc.getRanges(), CB, Content, AK,
1388 RAcc.getType(), RAcc.getRemoteInst());
1394 ChangeStatus translateAndAddState(Attributor &
A,
const AAPointerInfo &OtherAA,
1395 const OffsetInfo &Offsets, CallBase &CB,
1397 using namespace AA::PointerInfo;
1399 return indicatePessimisticFixpoint();
1401 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1405 const auto &State = OtherAAImpl.getState();
1406 for (
const auto &It : State) {
1407 for (
auto Index : It.getSecond()) {
1408 const auto &RAcc = State.getAccess(Index);
1409 if (!IsMustAcc && RAcc.isAssumption())
1411 for (
auto Offset : Offsets) {
1415 if (!NewRanges.isUnknown()) {
1416 NewRanges.addToAllOffsets(Offset);
1421 Changed |= addAccess(
A, NewRanges, CB, RAcc.getContent(), AK,
1422 RAcc.getType(), RAcc.getRemoteInst());
1431 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1434 void dumpState(raw_ostream &O) {
1435 for (
auto &It : OffsetBins) {
1436 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1437 <<
"] : " << It.getSecond().size() <<
"\n";
1438 for (
auto AccIndex : It.getSecond()) {
1439 auto &Acc = AccessList[AccIndex];
1440 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1441 if (Acc.getLocalInst() != Acc.getRemoteInst())
1442 O <<
" --> " << *Acc.getRemoteInst()
1444 if (!Acc.isWrittenValueYetUndetermined()) {
1446 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1448 else if (Acc.getWrittenValue())
1449 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1451 O <<
" - c: <unknown>\n";
1458struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1460 AAPointerInfoFloating(
const IRPosition &IRP, Attributor &
A)
1461 : AAPointerInfoImpl(IRP,
A) {}
1464 bool handleAccess(Attributor &
A, Instruction &
I,
1465 std::optional<Value *> Content,
AccessKind Kind,
1468 using namespace AA::PointerInfo;
1470 const DataLayout &
DL =
A.getDataLayout();
1471 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1480 if (!VT || VT->getElementCount().isScalable() ||
1482 (*Content)->getType() != VT ||
1483 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1494 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1499 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1501 ConstContent, ConstantInt::get(
Int32Ty, i));
1508 for (
auto &ElementOffset : ElementOffsets)
1509 ElementOffset += ElementSize;
1522 bool collectConstantsForGEP(Attributor &
A,
const DataLayout &
DL,
1523 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1524 const GEPOperator *
GEP);
1527 void trackStatistics()
const override {
1528 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1532bool AAPointerInfoFloating::collectConstantsForGEP(Attributor &
A,
1533 const DataLayout &
DL,
1535 const OffsetInfo &PtrOI,
1536 const GEPOperator *
GEP) {
1537 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1538 SmallMapVector<Value *, APInt, 4> VariableOffsets;
1541 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1542 "Don't look for constant values if the offset has already been "
1543 "determined to be unknown.");
1545 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1551 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1555 Union.addToAll(ConstantOffset.getSExtValue());
1560 for (
const auto &VI : VariableOffsets) {
1561 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
1563 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1569 if (PotentialConstantsAA->undefIsContained())
1576 auto &AssumedSet = PotentialConstantsAA->getAssumedSet();
1577 if (AssumedSet.empty())
1581 for (
const auto &ConstOffset : AssumedSet) {
1582 auto CopyPerOffset =
Union;
1583 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1584 VI.second.getZExtValue());
1585 Product.merge(CopyPerOffset);
1590 UsrOI = std::move(Union);
1594ChangeStatus AAPointerInfoFloating::updateImpl(Attributor &
A) {
1595 using namespace AA::PointerInfo;
1597 const DataLayout &
DL =
A.getDataLayout();
1598 Value &AssociatedValue = getAssociatedValue();
1600 DenseMap<Value *, OffsetInfo> OffsetInfoMap;
1601 OffsetInfoMap[&AssociatedValue].
insert(0);
1603 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1614 "CurPtr does not exist in the map!");
1616 auto &UsrOI = OffsetInfoMap[Usr];
1617 auto &PtrOI = OffsetInfoMap[CurPtr];
1618 assert(!PtrOI.isUnassigned() &&
1619 "Cannot pass through if the input Ptr was not visited!");
1625 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1627 User *Usr =
U.getUser();
1628 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1631 "The current pointer offset should have been seeded!");
1632 assert(!OffsetInfoMap[CurPtr].isUnassigned() &&
1633 "Current pointer should be assigned");
1637 return HandlePassthroughUser(Usr, CurPtr, Follow);
1639 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1647 auto &UsrOI = OffsetInfoMap[Usr];
1648 auto &PtrOI = OffsetInfoMap[CurPtr];
1650 if (UsrOI.isUnknown())
1653 if (PtrOI.isUnknown()) {
1659 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1665 return HandlePassthroughUser(Usr, CurPtr, Follow);
1670 if (RI->getFunction() == getAssociatedFunction()) {
1671 auto &PtrOI = OffsetInfoMap[CurPtr];
1672 Changed |= setReachesReturn(PtrOI);
1685 auto &UsrOI = PhiIt->second;
1686 auto &PtrOI = OffsetInfoMap[CurPtr];
1690 if (PtrOI.isUnknown()) {
1691 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1692 << *CurPtr <<
" in " << *
PHI <<
"\n");
1693 Follow = !UsrOI.isUnknown();
1699 if (UsrOI == PtrOI) {
1700 assert(!PtrOI.isUnassigned() &&
1701 "Cannot assign if the current Ptr was not visited!");
1702 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1712 auto It = OffsetInfoMap.
find(CurPtrBase);
1713 if (It == OffsetInfoMap.
end()) {
1714 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1715 << *CurPtr <<
" in " << *
PHI
1716 <<
" (base: " << *CurPtrBase <<
")\n");
1730 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
1731 *
PHI->getFunction());
1733 auto BaseOI = It->getSecond();
1734 BaseOI.addToAll(
Offset.getZExtValue());
1735 if (IsFirstPHIUser || BaseOI == UsrOI) {
1736 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1737 <<
" in " << *Usr <<
"\n");
1738 return HandlePassthroughUser(Usr, CurPtr, Follow);
1742 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1743 << *CurPtr <<
" in " << *
PHI <<
"\n");
1762 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1763 OffsetInfoMap[CurPtr].Offsets,
Changed,
1769 return II->isAssumeLikeIntrinsic();
1780 }
while (FromI && FromI != ToI);
1785 auto IsValidAssume = [&](IntrinsicInst &IntrI) {
1786 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1789 if (IntrI.getParent() == BB) {
1790 if (IsImpactedInRange(LoadI->getNextNode(), &IntrI))
1796 if ((*PredIt) != BB)
1801 if (SuccBB == IntrBB)
1807 if (IsImpactedInRange(LoadI->getNextNode(), BB->
getTerminator()))
1809 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1815 std::pair<Value *, IntrinsicInst *> Assumption;
1816 for (
const Use &LoadU : LoadI->uses()) {
1818 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1820 for (
const Use &CmpU : CmpI->uses()) {
1822 if (!IsValidAssume(*IntrI))
1824 int Idx = CmpI->getOperandUse(0) == LoadU;
1825 Assumption = {CmpI->getOperand(Idx), IntrI};
1830 if (Assumption.first)
1835 if (!Assumption.first || !Assumption.second)
1839 << *Assumption.second <<
": " << *LoadI
1840 <<
" == " << *Assumption.first <<
"\n");
1841 bool UsedAssumedInformation =
false;
1842 std::optional<Value *> Content =
nullptr;
1843 if (Assumption.first)
1845 A.getAssumedSimplified(*Assumption.first, *
this,
1847 return handleAccess(
1848 A, *Assumption.second, Content, AccessKind::AK_ASSUMPTION,
1849 OffsetInfoMap[CurPtr].Offsets,
Changed, *LoadI->getType());
1854 for (
auto *OtherOp : OtherOps) {
1855 if (OtherOp == CurPtr) {
1858 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1870 bool UsedAssumedInformation =
false;
1871 std::optional<Value *> Content =
nullptr;
1873 Content =
A.getAssumedSimplified(
1875 return handleAccess(
A,
I, Content, AK, OffsetInfoMap[CurPtr].Offsets,
1880 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1881 *StoreI->getValueOperand()->getType(),
1882 {StoreI->getValueOperand()}, AccessKind::AK_W);
1884 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1885 {RMWI->getValOperand()}, AccessKind::AK_RW);
1887 return HandleStoreLike(
1888 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1889 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1896 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1901 const auto *CSArgPI =
A.getAAFor<AAPointerInfo>(
1907 Changed = translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB,
1910 if (!CSArgPI->reachesReturn())
1911 return isValidState();
1914 if (!Callee ||
Callee->arg_size() <= ArgNo)
1916 bool UsedAssumedInformation =
false;
1917 auto ReturnedValue =
A.getAssumedSimplified(
1922 auto *Arg =
Callee->getArg(ArgNo);
1923 if (ReturnedArg && Arg != ReturnedArg)
1925 bool IsRetMustAcc = IsArgMustAcc && (ReturnedArg == Arg);
1926 const auto *CSRetPI =
A.getAAFor<AAPointerInfo>(
1930 OffsetInfo OI = OffsetInfoMap[CurPtr];
1931 CSArgPI->addReturnedOffsetsTo(OI);
1933 translateAndAddState(
A, *CSRetPI, OI, *CB, IsRetMustAcc) |
Changed;
1934 return isValidState();
1936 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1941 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1944 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1945 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1946 assert(!OffsetInfoMap[OldU].isUnassigned() &&
"Old use should be assinged");
1947 if (OffsetInfoMap.
count(NewU)) {
1949 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1950 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1951 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1955 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1958 return HandlePassthroughUser(NewU.get(), OldU.
get(), Unused);
1960 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1962 true, EquivalentUseCB)) {
1963 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1964 return indicatePessimisticFixpoint();
1968 dbgs() <<
"Accesses by bin after update:\n";
1975struct AAPointerInfoReturned final : AAPointerInfoImpl {
1976 AAPointerInfoReturned(
const IRPosition &IRP, Attributor &
A)
1977 : AAPointerInfoImpl(IRP,
A) {}
1981 return indicatePessimisticFixpoint();
1985 void trackStatistics()
const override {
1986 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1990struct AAPointerInfoArgument final : AAPointerInfoFloating {
1991 AAPointerInfoArgument(
const IRPosition &IRP, Attributor &
A)
1992 : AAPointerInfoFloating(IRP,
A) {}
1995 void trackStatistics()
const override {
1996 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2000struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
2001 AAPointerInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2002 : AAPointerInfoFloating(IRP,
A) {}
2006 using namespace AA::PointerInfo;
2012 if (
auto Length =
MI->getLengthInBytes())
2013 LengthVal =
Length->getSExtValue();
2014 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
2017 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
2019 return indicatePessimisticFixpoint();
2022 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
2024 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
2027 dbgs() <<
"Accesses by bin after update:\n";
2038 Argument *Arg = getAssociatedArgument();
2042 A.getAAFor<AAPointerInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
2043 if (ArgAA && ArgAA->getState().isValidState())
2044 return translateAndAddStateFromCallee(
A, *ArgAA,
2047 return indicatePessimisticFixpoint();
2050 bool IsKnownNoCapture;
2052 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2053 return indicatePessimisticFixpoint();
2055 bool IsKnown =
false;
2057 return ChangeStatus::UNCHANGED;
2060 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2066 void trackStatistics()
const override {
2067 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2071struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2072 AAPointerInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2073 : AAPointerInfoFloating(IRP,
A) {}
2076 void trackStatistics()
const override {
2077 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2085struct AANoUnwindImpl : AANoUnwind {
2086 AANoUnwindImpl(
const IRPosition &IRP, Attributor &
A) : AANoUnwind(IRP,
A) {}
2092 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2096 const std::string getAsStr(Attributor *
A)
const override {
2097 return getAssumed() ?
"nounwind" :
"may-unwind";
2103 (unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2104 (unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2105 (unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2108 if (!
I.mayThrow(
true))
2112 bool IsKnownNoUnwind;
2120 bool UsedAssumedInformation =
false;
2121 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2122 UsedAssumedInformation))
2123 return indicatePessimisticFixpoint();
2125 return ChangeStatus::UNCHANGED;
2129struct AANoUnwindFunction final :
public AANoUnwindImpl {
2130 AANoUnwindFunction(
const IRPosition &IRP, Attributor &
A)
2131 : AANoUnwindImpl(IRP,
A) {}
2138struct AANoUnwindCallSite final
2139 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2140 AANoUnwindCallSite(
const IRPosition &IRP, Attributor &
A)
2141 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2152 case Intrinsic::nvvm_barrier_cta_sync_aligned_all:
2153 case Intrinsic::nvvm_barrier_cta_sync_aligned_count:
2154 case Intrinsic::nvvm_barrier0_and:
2155 case Intrinsic::nvvm_barrier0_or:
2156 case Intrinsic::nvvm_barrier0_popc:
2158 case Intrinsic::amdgcn_s_barrier:
2159 if (ExecutedAligned)
2182 switch (
I->getOpcode()) {
2183 case Instruction::AtomicRMW:
2186 case Instruction::Store:
2189 case Instruction::Load:
2194 "New atomic operations need to be known in the attributor.");
2206 return !
MI->isVolatile();
2222 const std::string getAsStr(Attributor *
A)
const override {
2223 return getAssumed() ?
"nosync" :
"may-sync";
2239 if (
I.mayReadOrWriteMemory())
2253 bool UsedAssumedInformation =
false;
2254 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2255 UsedAssumedInformation) ||
2256 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2257 UsedAssumedInformation))
2258 return indicatePessimisticFixpoint();
2263struct AANoSyncFunction final :
public AANoSyncImpl {
2264 AANoSyncFunction(
const IRPosition &IRP, Attributor &
A)
2265 : AANoSyncImpl(IRP,
A) {}
2272struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2273 AANoSyncCallSite(
const IRPosition &IRP, Attributor &
A)
2274 : AACalleeToCallSite<AANoSync, AANoSyncImpl>(IRP,
A) {}
2284struct AANoFreeImpl :
public AANoFree {
2285 AANoFreeImpl(
const IRPosition &IRP, Attributor &
A) : AANoFree(IRP,
A) {}
2291 DepClassTy::NONE, IsKnown));
2301 DepClassTy::REQUIRED, IsKnown);
2304 bool UsedAssumedInformation =
false;
2305 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2306 UsedAssumedInformation))
2307 return indicatePessimisticFixpoint();
2308 return ChangeStatus::UNCHANGED;
2312 const std::string getAsStr(Attributor *
A)
const override {
2313 return getAssumed() ?
"nofree" :
"may-free";
2317struct AANoFreeFunction final :
public AANoFreeImpl {
2318 AANoFreeFunction(
const IRPosition &IRP, Attributor &
A)
2319 : AANoFreeImpl(IRP,
A) {}
2326struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2327 AANoFreeCallSite(
const IRPosition &IRP, Attributor &
A)
2328 : AACalleeToCallSite<AANoFree, AANoFreeImpl>(IRP,
A) {}
2335struct AANoFreeFloating : AANoFreeImpl {
2336 AANoFreeFloating(
const IRPosition &IRP, Attributor &
A)
2337 : AANoFreeImpl(IRP,
A) {}
2344 const IRPosition &IRP = getIRPosition();
2349 DepClassTy::OPTIONAL, IsKnown))
2350 return ChangeStatus::UNCHANGED;
2352 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2353 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2365 DepClassTy::REQUIRED, IsKnown);
2382 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2383 return indicatePessimisticFixpoint();
2385 return ChangeStatus::UNCHANGED;
2390struct AANoFreeArgument final : AANoFreeFloating {
2391 AANoFreeArgument(
const IRPosition &IRP, Attributor &
A)
2392 : AANoFreeFloating(IRP,
A) {}
2399struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2400 AANoFreeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2401 : AANoFreeFloating(IRP,
A) {}
2409 Argument *Arg = getAssociatedArgument();
2411 return indicatePessimisticFixpoint();
2415 DepClassTy::REQUIRED, IsKnown))
2416 return ChangeStatus::UNCHANGED;
2417 return indicatePessimisticFixpoint();
2425struct AANoFreeReturned final : AANoFreeFloating {
2426 AANoFreeReturned(
const IRPosition &IRP, Attributor &
A)
2427 : AANoFreeFloating(IRP,
A) {
2442 void trackStatistics()
const override {}
2446struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2447 AANoFreeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2448 : AANoFreeFloating(IRP,
A) {}
2451 return ChangeStatus::UNCHANGED;
2462 bool IgnoreSubsumingPositions) {
2464 AttrKinds.
push_back(Attribute::NonNull);
2467 AttrKinds.
push_back(Attribute::Dereferenceable);
2468 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2475 if (!Fn->isDeclaration()) {
2485 bool UsedAssumedInformation =
false;
2486 if (!
A.checkForAllInstructions(
2488 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2492 UsedAssumedInformation,
false,
true))
2504 Attribute::NonNull)});
2509static int64_t getKnownNonNullAndDerefBytesForUse(
2510 Attributor &
A,
const AbstractAttribute &QueryingAA,
Value &AssociatedValue,
2511 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2514 const Value *UseV =
U->get();
2535 const DataLayout &
DL =
A.getInfoCache().getDL();
2539 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2556 bool IsKnownNonNull;
2559 IsNonNull |= IsKnownNonNull;
2562 return DerefAA ? DerefAA->getKnownDereferenceableBytes() : 0;
2566 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2567 Loc->Size.isScalable() ||
I->isVolatile())
2573 if (
Base &&
Base == &AssociatedValue) {
2574 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2576 return std::max(int64_t(0), DerefBytes);
2583 int64_t DerefBytes = Loc->Size.getValue();
2585 return std::max(int64_t(0), DerefBytes);
2591struct AANonNullImpl : AANonNull {
2592 AANonNullImpl(
const IRPosition &IRP, Attributor &
A) : AANonNull(IRP,
A) {}
2596 Value &
V = *getAssociatedValue().stripPointerCasts();
2598 indicatePessimisticFixpoint();
2602 if (Instruction *CtxI = getCtxI())
2603 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2607 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
2608 AANonNull::StateType &State) {
2609 bool IsNonNull =
false;
2610 bool TrackUse =
false;
2611 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2612 IsNonNull, TrackUse);
2613 State.setKnown(IsNonNull);
2618 const std::string getAsStr(Attributor *
A)
const override {
2619 return getAssumed() ?
"nonnull" :
"may-null";
2624struct AANonNullFloating :
public AANonNullImpl {
2625 AANonNullFloating(
const IRPosition &IRP, Attributor &
A)
2626 : AANonNullImpl(IRP,
A) {}
2630 auto CheckIRP = [&](
const IRPosition &IRP) {
2631 bool IsKnownNonNull;
2633 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2637 bool UsedAssumedInformation =
false;
2638 Value *AssociatedValue = &getAssociatedValue();
2640 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2645 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2651 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2652 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2655 return ChangeStatus::UNCHANGED;
2659 DepClassTy::OPTIONAL, IsKnown) &&
2662 DepClassTy::OPTIONAL, IsKnown))
2663 return ChangeStatus::UNCHANGED;
2670 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2671 return indicatePessimisticFixpoint();
2672 return ChangeStatus::UNCHANGED;
2675 for (
const auto &VAC : Values)
2677 return indicatePessimisticFixpoint();
2679 return ChangeStatus::UNCHANGED;
2687struct AANonNullReturned final
2688 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2689 false, AANonNull::IRAttributeKind, false> {
2690 AANonNullReturned(
const IRPosition &IRP, Attributor &
A)
2691 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2696 const std::string getAsStr(Attributor *
A)
const override {
2697 return getAssumed() ?
"nonnull" :
"may-null";
2705struct AANonNullArgument final
2706 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2707 AANonNullArgument(
const IRPosition &IRP, Attributor &
A)
2708 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl>(IRP,
A) {}
2714struct AANonNullCallSiteArgument final : AANonNullFloating {
2715 AANonNullCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2716 : AANonNullFloating(IRP,
A) {}
2723struct AANonNullCallSiteReturned final
2724 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2725 AANonNullCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2726 : AACalleeToCallSite<AANonNull, AANonNullImpl>(IRP,
A) {}
2735struct AAMustProgressImpl :
public AAMustProgress {
2736 AAMustProgressImpl(
const IRPosition &IRP, Attributor &
A)
2737 : AAMustProgress(IRP,
A) {}
2743 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2748 const std::string getAsStr(Attributor *
A)
const override {
2749 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2753struct AAMustProgressFunction final : AAMustProgressImpl {
2754 AAMustProgressFunction(
const IRPosition &IRP, Attributor &
A)
2755 : AAMustProgressImpl(IRP,
A) {}
2761 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2763 return indicateOptimisticFixpoint();
2764 return ChangeStatus::UNCHANGED;
2767 auto CheckForMustProgress = [&](AbstractCallSite ACS) {
2769 bool IsKnownMustProgress;
2771 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2775 bool AllCallSitesKnown =
true;
2776 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2779 return indicatePessimisticFixpoint();
2781 return ChangeStatus::UNCHANGED;
2785 void trackStatistics()
const override {
2791struct AAMustProgressCallSite final : AAMustProgressImpl {
2792 AAMustProgressCallSite(
const IRPosition &IRP, Attributor &
A)
2793 : AAMustProgressImpl(IRP,
A) {}
2802 bool IsKnownMustProgress;
2804 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2805 return indicatePessimisticFixpoint();
2806 return ChangeStatus::UNCHANGED;
2810 void trackStatistics()
const override {
2819struct AANoRecurseImpl :
public AANoRecurse {
2820 AANoRecurseImpl(
const IRPosition &IRP, Attributor &
A) : AANoRecurse(IRP,
A) {}
2826 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2831 const std::string getAsStr(Attributor *
A)
const override {
2832 return getAssumed() ?
"norecurse" :
"may-recurse";
2836struct AANoRecurseFunction final : AANoRecurseImpl {
2837 AANoRecurseFunction(
const IRPosition &IRP, Attributor &
A)
2838 : AANoRecurseImpl(IRP,
A) {}
2844 auto CallSitePred = [&](AbstractCallSite ACS) {
2845 bool IsKnownNoRecurse;
2849 DepClassTy::NONE, IsKnownNoRecurse))
2851 return IsKnownNoRecurse;
2853 bool UsedAssumedInformation =
false;
2854 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2855 UsedAssumedInformation)) {
2861 if (!UsedAssumedInformation)
2862 indicateOptimisticFixpoint();
2863 return ChangeStatus::UNCHANGED;
2866 const AAInterFnReachability *EdgeReachability =
2867 A.getAAFor<AAInterFnReachability>(*
this, getIRPosition(),
2868 DepClassTy::REQUIRED);
2869 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2870 return indicatePessimisticFixpoint();
2871 return ChangeStatus::UNCHANGED;
2878struct AANoRecurseCallSite final
2879 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2880 AANoRecurseCallSite(
const IRPosition &IRP, Attributor &
A)
2881 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2891struct AANonConvergentImpl :
public AANonConvergent {
2892 AANonConvergentImpl(
const IRPosition &IRP, Attributor &
A)
2893 : AANonConvergent(IRP,
A) {}
2896 const std::string getAsStr(Attributor *
A)
const override {
2897 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2901struct AANonConvergentFunction final : AANonConvergentImpl {
2902 AANonConvergentFunction(
const IRPosition &IRP, Attributor &
A)
2903 : AANonConvergentImpl(IRP,
A) {}
2909 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2912 if (!Callee ||
Callee->isIntrinsic()) {
2915 if (
Callee->isDeclaration()) {
2916 return !
Callee->hasFnAttribute(Attribute::Convergent);
2918 const auto *ConvergentAA =
A.getAAFor<AANonConvergent>(
2920 return ConvergentAA && ConvergentAA->isAssumedNotConvergent();
2923 bool UsedAssumedInformation =
false;
2924 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2925 UsedAssumedInformation)) {
2926 return indicatePessimisticFixpoint();
2928 return ChangeStatus::UNCHANGED;
2932 if (isKnownNotConvergent() &&
2933 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2934 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2935 return ChangeStatus::CHANGED;
2937 return ChangeStatus::UNCHANGED;
2947struct AAUndefinedBehaviorImpl :
public AAUndefinedBehavior {
2948 AAUndefinedBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
2949 : AAUndefinedBehavior(IRP,
A) {}
2954 const size_t UBPrevSize = KnownUBInsts.size();
2955 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2959 if (
I.isVolatile() &&
I.mayWriteToMemory())
2963 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2972 "Expected pointer operand of memory accessing instruction");
2976 std::optional<Value *> SimplifiedPtrOp =
2977 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2978 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2980 const Value *PtrOpVal = *SimplifiedPtrOp;
2986 AssumedNoUBInsts.insert(&
I);
2998 AssumedNoUBInsts.insert(&
I);
3000 KnownUBInsts.insert(&
I);
3009 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3016 if (BrInst->isUnconditional())
3021 std::optional<Value *> SimplifiedCond =
3022 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3023 if (!SimplifiedCond || !*SimplifiedCond)
3025 AssumedNoUBInsts.insert(&
I);
3033 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3042 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3048 if (idx >=
Callee->arg_size())
3060 bool IsKnownNoUndef;
3062 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3063 if (!IsKnownNoUndef)
3065 bool UsedAssumedInformation =
false;
3066 std::optional<Value *> SimplifiedVal =
3069 if (UsedAssumedInformation)
3071 if (SimplifiedVal && !*SimplifiedVal)
3074 KnownUBInsts.insert(&
I);
3080 bool IsKnownNonNull;
3082 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3084 KnownUBInsts.insert(&
I);
3093 std::optional<Value *> SimplifiedRetValue =
3094 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3095 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3113 bool IsKnownNonNull;
3118 KnownUBInsts.insert(&
I);
3124 bool UsedAssumedInformation =
false;
3125 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3126 {Instruction::Load, Instruction::Store,
3127 Instruction::AtomicCmpXchg,
3128 Instruction::AtomicRMW},
3129 UsedAssumedInformation,
3131 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3132 UsedAssumedInformation,
3134 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3135 UsedAssumedInformation);
3139 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3141 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3142 bool IsKnownNoUndef;
3144 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3146 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3147 {Instruction::Ret}, UsedAssumedInformation,
3152 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3153 UBPrevSize != KnownUBInsts.size())
3154 return ChangeStatus::CHANGED;
3155 return ChangeStatus::UNCHANGED;
3158 bool isKnownToCauseUB(Instruction *
I)
const override {
3159 return KnownUBInsts.count(
I);
3162 bool isAssumedToCauseUB(Instruction *
I)
const override {
3169 switch (
I->getOpcode()) {
3170 case Instruction::Load:
3171 case Instruction::Store:
3172 case Instruction::AtomicCmpXchg:
3173 case Instruction::AtomicRMW:
3174 return !AssumedNoUBInsts.count(
I);
3175 case Instruction::Br: {
3177 if (BrInst->isUnconditional())
3179 return !AssumedNoUBInsts.count(
I);
3188 if (KnownUBInsts.empty())
3189 return ChangeStatus::UNCHANGED;
3190 for (Instruction *
I : KnownUBInsts)
3191 A.changeToUnreachableAfterManifest(
I);
3192 return ChangeStatus::CHANGED;
3196 const std::string getAsStr(Attributor *
A)
const override {
3197 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3225 SmallPtrSet<Instruction *, 8> KnownUBInsts;
3229 SmallPtrSet<Instruction *, 8> AssumedNoUBInsts;
3240 std::optional<Value *> stopOnUndefOrAssumed(Attributor &
A,
Value *V,
3242 bool UsedAssumedInformation =
false;
3243 std::optional<Value *> SimplifiedV =
3246 if (!UsedAssumedInformation) {
3251 KnownUBInsts.insert(
I);
3252 return std::nullopt;
3259 KnownUBInsts.insert(
I);
3260 return std::nullopt;
3266struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3267 AAUndefinedBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
3268 : AAUndefinedBehaviorImpl(IRP,
A) {}
3271 void trackStatistics()
const override {
3272 STATS_DECL(UndefinedBehaviorInstruction, Instruction,
3273 "Number of instructions known to have UB");
3275 KnownUBInsts.size();
3286static bool mayContainUnboundedCycle(Function &
F, Attributor &
A) {
3287 ScalarEvolution *SE =
3288 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
F);
3289 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
F);
3295 for (scc_iterator<Function *> SCCI =
scc_begin(&
F); !SCCI.isAtEnd(); ++SCCI)
3296 if (SCCI.hasCycle())
3306 for (
auto *L : LI->getLoopsInPreorder()) {
3313struct AAWillReturnImpl :
public AAWillReturn {
3314 AAWillReturnImpl(
const IRPosition &IRP, Attributor &
A)
3315 : AAWillReturn(IRP,
A) {}
3321 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3326 bool isImpliedByMustprogressAndReadonly(Attributor &
A,
bool KnownOnly) {
3327 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3332 return IsKnown || !KnownOnly;
3338 if (isImpliedByMustprogressAndReadonly(
A,
false))
3339 return ChangeStatus::UNCHANGED;
3345 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3351 bool IsKnownNoRecurse;
3353 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3356 bool UsedAssumedInformation =
false;
3357 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3358 UsedAssumedInformation))
3359 return indicatePessimisticFixpoint();
3361 return ChangeStatus::UNCHANGED;
3365 const std::string getAsStr(Attributor *
A)
const override {
3366 return getAssumed() ?
"willreturn" :
"may-noreturn";
3370struct AAWillReturnFunction final : AAWillReturnImpl {
3371 AAWillReturnFunction(
const IRPosition &IRP, Attributor &
A)
3372 : AAWillReturnImpl(IRP,
A) {}
3376 AAWillReturnImpl::initialize(
A);
3379 assert(
F &&
"Did expect an anchor function");
3380 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3381 indicatePessimisticFixpoint();
3389struct AAWillReturnCallSite final
3390 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3391 AAWillReturnCallSite(
const IRPosition &IRP, Attributor &
A)
3392 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3396 if (isImpliedByMustprogressAndReadonly(
A,
false))
3397 return ChangeStatus::UNCHANGED;
3399 return AACalleeToCallSite::updateImpl(
A);
3421 const ToTy *
To =
nullptr;
3448 if (!ES || ES->
empty()) {
3449 ExclusionSet = nullptr;
3450 }
else if (MakeUnique) {
3451 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3476 if (!PairDMI::isEqual({LHS->From, LHS->To}, {RHS->From, RHS->To}))
3478 return InstSetDMI::isEqual(LHS->ExclusionSet, RHS->ExclusionSet);
3482#define DefineKeys(ToTy) \
3484 ReachabilityQueryInfo<ToTy> \
3485 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3486 ReachabilityQueryInfo<ToTy>( \
3487 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3488 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3490 ReachabilityQueryInfo<ToTy> \
3491 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3492 ReachabilityQueryInfo<ToTy>( \
3493 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3494 DenseMapInfo<const ToTy *>::getTombstoneKey());
3503template <
typename BaseTy,
typename ToTy>
3504struct CachedReachabilityAA :
public BaseTy {
3505 using RQITy = ReachabilityQueryInfo<ToTy>;
3507 CachedReachabilityAA(
const IRPosition &IRP, Attributor &
A) : BaseTy(IRP,
A) {}
3510 bool isQueryAA()
const override {
return true; }
3515 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3516 RQITy *RQI = QueryVector[
u];
3517 if (RQI->Result == RQITy::Reachable::No &&
3519 Changed = ChangeStatus::CHANGED;
3525 bool IsTemporaryRQI) = 0;
3527 bool rememberResult(Attributor &
A,
typename RQITy::Reachable
Result,
3528 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3533 QueryCache.erase(&RQI);
3539 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3540 RQITy PlainRQI(RQI.From, RQI.To);
3541 if (!QueryCache.count(&PlainRQI)) {
3542 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3544 QueryVector.push_back(RQIPtr);
3545 QueryCache.insert(RQIPtr);
3550 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3551 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3552 "Did not expect empty set!");
3553 RQITy *RQIPtr =
new (
A.Allocator)
3554 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3555 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3557 assert(!QueryCache.count(RQIPtr));
3558 QueryVector.push_back(RQIPtr);
3559 QueryCache.insert(RQIPtr);
3562 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3563 A.registerForUpdate(*
this);
3564 return Result == RQITy::Reachable::Yes;
3567 const std::string getAsStr(Attributor *
A)
const override {
3569 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3572 bool checkQueryCache(Attributor &
A, RQITy &StackRQI,
3573 typename RQITy::Reachable &
Result) {
3574 if (!this->getState().isValidState()) {
3575 Result = RQITy::Reachable::Yes;
3581 if (StackRQI.ExclusionSet) {
3582 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3583 auto It = QueryCache.find(&PlainRQI);
3584 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3585 Result = RQITy::Reachable::No;
3590 auto It = QueryCache.find(&StackRQI);
3591 if (It != QueryCache.end()) {
3598 QueryCache.insert(&StackRQI);
3604 DenseSet<RQITy *> QueryCache;
3607struct AAIntraFnReachabilityFunction final
3608 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3609 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3610 AAIntraFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
3612 DT =
A.getInfoCache().getAnalysisResultForFunction<DominatorTreeAnalysis>(
3616 bool isAssumedReachable(
3617 Attributor &
A,
const Instruction &From,
const Instruction &To,
3619 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3623 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
3624 typename RQITy::Reachable
Result;
3625 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3626 return NonConstThis->isReachableImpl(
A, StackRQI,
3628 return Result == RQITy::Reachable::Yes;
3635 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3638 [&](
const auto &DeadEdge) {
3639 return LivenessAA->isEdgeDead(DeadEdge.first,
3643 return LivenessAA->isAssumedDead(BB);
3645 return ChangeStatus::UNCHANGED;
3649 return Base::updateImpl(
A);
3653 bool IsTemporaryRQI)
override {
3655 bool UsedExclusionSet =
false;
3660 while (IP && IP != &To) {
3661 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3662 UsedExclusionSet =
true;
3670 const BasicBlock *FromBB = RQI.From->getParent();
3671 const BasicBlock *ToBB = RQI.To->getParent();
3673 "Not an intra-procedural query!");
3677 if (FromBB == ToBB &&
3678 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3679 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3684 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3685 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3689 SmallPtrSet<const BasicBlock *, 16> ExclusionBlocks;
3690 if (RQI.ExclusionSet)
3691 for (
auto *
I : *RQI.ExclusionSet)
3692 if (
I->getFunction() == Fn)
3693 ExclusionBlocks.
insert(
I->getParent());
3696 if (ExclusionBlocks.
count(FromBB) &&
3699 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3702 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3703 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3704 DeadBlocks.insert(ToBB);
3705 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3709 SmallPtrSet<const BasicBlock *, 16> Visited;
3713 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> LocalDeadEdges;
3714 while (!Worklist.
empty()) {
3716 if (!Visited.
insert(BB).second)
3718 for (
const BasicBlock *SuccBB :
successors(BB)) {
3719 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3720 LocalDeadEdges.
insert({BB, SuccBB});
3725 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3728 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3731 if (ExclusionBlocks.
count(SuccBB)) {
3732 UsedExclusionSet =
true;
3739 DeadEdges.insert_range(LocalDeadEdges);
3740 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3745 void trackStatistics()
const override {}
3750 DenseSet<const BasicBlock *> DeadBlocks;
3754 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> DeadEdges;
3757 const DominatorTree *DT =
nullptr;
3765 bool IgnoreSubsumingPositions) {
3766 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3767 "Unexpected attribute kind");
3773 IgnoreSubsumingPositions =
true;
3784 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3785 IgnoreSubsumingPositions, Attribute::NoAlias))
3795 "Noalias is a pointer attribute");
3798 const std::string getAsStr(
Attributor *
A)
const override {
3799 return getAssumed() ?
"noalias" :
"may-alias";
3804struct AANoAliasFloating final : AANoAliasImpl {
3805 AANoAliasFloating(
const IRPosition &IRP, Attributor &
A)
3806 : AANoAliasImpl(IRP,
A) {}
3811 return indicatePessimisticFixpoint();
3815 void trackStatistics()
const override {
3821struct AANoAliasArgument final
3822 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3823 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3824 AANoAliasArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
3837 DepClassTy::OPTIONAL, IsKnownNoSycn))
3838 return Base::updateImpl(
A);
3843 return Base::updateImpl(
A);
3847 bool UsedAssumedInformation =
false;
3848 if (
A.checkForAllCallSites(
3849 [](AbstractCallSite ACS) { return !ACS.isCallbackCall(); }, *
this,
3850 true, UsedAssumedInformation))
3851 return Base::updateImpl(
A);
3859 return indicatePessimisticFixpoint();
3866struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3867 AANoAliasCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
3868 : AANoAliasImpl(IRP,
A) {}
3872 bool mayAliasWithArgument(Attributor &
A, AAResults *&AAR,
3873 const AAMemoryBehavior &MemBehaviorAA,
3874 const CallBase &CB,
unsigned OtherArgNo) {
3876 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3884 auto *CBArgMemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
3888 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3889 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3896 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3898 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3899 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3905 AAR =
A.getInfoCache().getAnalysisResultForFunction<AAManager>(
3909 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3911 "callsite arguments: "
3912 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3913 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3918 bool isKnownNoAliasDueToNoAliasPreservation(
3919 Attributor &
A, AAResults *&AAR,
const AAMemoryBehavior &MemBehaviorAA) {
3932 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3948 bool IsKnownNoCapture;
3951 DepClassTy::OPTIONAL, IsKnownNoCapture))
3957 A, *UserI, *getCtxI(), *
this,
nullptr,
3958 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3973 LLVM_DEBUG(
dbgs() <<
"[AANoAliasCSArg] Unknown user: " << *UserI <<
"\n");
3977 bool IsKnownNoCapture;
3978 const AANoCapture *NoCaptureAA =
nullptr;
3980 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3981 if (!IsAssumedNoCapture &&
3983 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3985 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3986 <<
" cannot be noalias as it is potentially captured\n");
3991 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3997 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3998 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
4008 auto *MemBehaviorAA =
4009 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
4011 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4012 return ChangeStatus::UNCHANGED;
4015 bool IsKnownNoAlias;
4018 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
4020 <<
" is not no-alias at the definition\n");
4021 return indicatePessimisticFixpoint();
4024 AAResults *AAR =
nullptr;
4025 if (MemBehaviorAA &&
4026 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
4028 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4029 return ChangeStatus::UNCHANGED;
4032 return indicatePessimisticFixpoint();
4040struct AANoAliasReturned final : AANoAliasImpl {
4041 AANoAliasReturned(
const IRPosition &IRP, Attributor &
A)
4042 : AANoAliasImpl(IRP,
A) {}
4047 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4058 bool IsKnownNoAlias;
4060 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4063 bool IsKnownNoCapture;
4064 const AANoCapture *NoCaptureAA =
nullptr;
4066 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4068 return IsAssumedNoCapture ||
4072 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4073 return indicatePessimisticFixpoint();
4075 return ChangeStatus::UNCHANGED;
4083struct AANoAliasCallSiteReturned final
4084 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4085 AANoAliasCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4086 : AACalleeToCallSite<AANoAlias, AANoAliasImpl>(IRP,
A) {}
4096struct AAIsDeadValueImpl :
public AAIsDead {
4097 AAIsDeadValueImpl(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4100 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4103 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4106 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4109 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4112 bool isAssumedDead(
const Instruction *
I)
const override {
4113 return I == getCtxI() && isAssumedDead();
4117 bool isKnownDead(
const Instruction *
I)
const override {
4118 return isAssumedDead(
I) && isKnownDead();
4122 const std::string getAsStr(Attributor *
A)
const override {
4123 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4127 bool areAllUsesAssumedDead(Attributor &
A,
Value &V) {
4129 if (
V.getType()->isVoidTy() ||
V.use_empty())
4135 if (!
A.isRunOn(*
I->getFunction()))
4137 bool UsedAssumedInformation =
false;
4138 std::optional<Constant *>
C =
4139 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4144 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4149 return A.checkForAllUses(UsePred, *
this, V,
false,
4150 DepClassTy::REQUIRED,
4155 bool isAssumedSideEffectFree(Attributor &
A, Instruction *
I) {
4165 bool IsKnownNoUnwind;
4167 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4175struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4176 AAIsDeadFloating(
const IRPosition &IRP, Attributor &
A)
4177 : AAIsDeadValueImpl(IRP,
A) {}
4181 AAIsDeadValueImpl::initialize(
A);
4184 indicatePessimisticFixpoint();
4189 if (!isAssumedSideEffectFree(
A,
I)) {
4191 indicatePessimisticFixpoint();
4193 removeAssumedBits(HAS_NO_EFFECT);
4197 bool isDeadFence(Attributor &
A, FenceInst &FI) {
4198 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
4200 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4202 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4206 bool isDeadStore(Attributor &
A, StoreInst &SI,
4207 SmallSetVector<Instruction *, 8> *AssumeOnlyInst =
nullptr) {
4209 if (
SI.isVolatile())
4215 bool UsedAssumedInformation =
false;
4216 if (!AssumeOnlyInst) {
4217 PotentialCopies.clear();
4219 UsedAssumedInformation)) {
4222 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4226 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4227 <<
" potential copies.\n");
4229 InformationCache &InfoCache =
A.getInfoCache();
4232 UsedAssumedInformation))
4236 auto &UserI = cast<Instruction>(*U.getUser());
4237 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4239 AssumeOnlyInst->insert(&UserI);
4242 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4248 <<
" is assumed live!\n");
4254 const std::string getAsStr(Attributor *
A)
const override {
4258 return "assumed-dead-store";
4261 return "assumed-dead-fence";
4262 return AAIsDeadValueImpl::getAsStr(
A);
4269 if (!isDeadStore(
A, *SI))
4270 return indicatePessimisticFixpoint();
4272 if (!isDeadFence(
A, *FI))
4273 return indicatePessimisticFixpoint();
4275 if (!isAssumedSideEffectFree(
A,
I))
4276 return indicatePessimisticFixpoint();
4277 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4278 return indicatePessimisticFixpoint();
4283 bool isRemovableStore()
const override {
4284 return isAssumed(IS_REMOVABLE) &&
isa<StoreInst>(&getAssociatedValue());
4289 Value &
V = getAssociatedValue();
4296 SmallSetVector<Instruction *, 8> AssumeOnlyInst;
4297 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4300 A.deleteAfterManifest(*
I);
4301 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4303 for (
auto *Usr : AOI->
users())
4305 A.deleteAfterManifest(*AOI);
4311 A.deleteAfterManifest(*FI);
4315 A.deleteAfterManifest(*
I);
4323 void trackStatistics()
const override {
4329 SmallSetVector<Value *, 4> PotentialCopies;
4332struct AAIsDeadArgument :
public AAIsDeadFloating {
4333 AAIsDeadArgument(
const IRPosition &IRP, Attributor &
A)
4334 : AAIsDeadFloating(IRP,
A) {}
4338 Argument &Arg = *getAssociatedArgument();
4339 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4340 if (
A.registerFunctionSignatureRewrite(
4344 return ChangeStatus::CHANGED;
4346 return ChangeStatus::UNCHANGED;
4353struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4354 AAIsDeadCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
4355 : AAIsDeadValueImpl(IRP,
A) {}
4359 AAIsDeadValueImpl::initialize(
A);
4361 indicatePessimisticFixpoint();
4370 Argument *Arg = getAssociatedArgument();
4372 return indicatePessimisticFixpoint();
4374 auto *ArgAA =
A.getAAFor<AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4376 return indicatePessimisticFixpoint();
4385 "Expected undef values to be filtered out!");
4387 if (
A.changeUseAfterManifest(U, UV))
4388 return ChangeStatus::CHANGED;
4389 return ChangeStatus::UNCHANGED;
4396struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4397 AAIsDeadCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4398 : AAIsDeadFloating(IRP,
A) {}
4401 bool isAssumedDead()
const override {
4402 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4407 AAIsDeadFloating::initialize(
A);
4409 indicatePessimisticFixpoint();
4414 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4420 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4421 IsAssumedSideEffectFree =
false;
4422 Changed = ChangeStatus::CHANGED;
4424 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4425 return indicatePessimisticFixpoint();
4430 void trackStatistics()
const override {
4431 if (IsAssumedSideEffectFree)
4438 const std::string getAsStr(Attributor *
A)
const override {
4439 return isAssumedDead()
4441 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4445 bool IsAssumedSideEffectFree =
true;
4448struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4449 AAIsDeadReturned(
const IRPosition &IRP, Attributor &
A)
4450 : AAIsDeadValueImpl(IRP,
A) {}
4455 bool UsedAssumedInformation =
false;
4456 A.checkForAllInstructions([](Instruction &) {
return true; }, *
this,
4457 {Instruction::Ret}, UsedAssumedInformation);
4459 auto PredForCallSite = [&](AbstractCallSite ACS) {
4460 if (ACS.isCallbackCall() || !ACS.getInstruction())
4462 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4465 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4466 UsedAssumedInformation))
4467 return indicatePessimisticFixpoint();
4469 return ChangeStatus::UNCHANGED;
4475 bool AnyChange =
false;
4476 UndefValue &UV = *
UndefValue::get(getAssociatedFunction()->getReturnType());
4483 bool UsedAssumedInformation =
false;
4484 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4485 UsedAssumedInformation);
4486 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4493struct AAIsDeadFunction :
public AAIsDead {
4494 AAIsDeadFunction(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4499 assert(
F &&
"Did expect an anchor function");
4500 if (!isAssumedDeadInternalFunction(
A)) {
4501 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4502 assumeLive(
A,
F->getEntryBlock());
4506 bool isAssumedDeadInternalFunction(Attributor &
A) {
4507 if (!getAnchorScope()->hasLocalLinkage())
4509 bool UsedAssumedInformation =
false;
4510 return A.checkForAllCallSites([](AbstractCallSite) {
return false; }, *
this,
4511 true, UsedAssumedInformation);
4515 const std::string getAsStr(Attributor *
A)
const override {
4516 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4517 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4518 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4519 std::to_string(KnownDeadEnds.size()) +
"]";
4524 assert(getState().isValidState() &&
4525 "Attempted to manifest an invalid state!");
4530 if (AssumedLiveBlocks.empty()) {
4531 A.deleteAfterManifest(
F);
4532 return ChangeStatus::CHANGED;
4538 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4540 KnownDeadEnds.set_union(ToBeExploredFrom);
4541 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4545 bool IsKnownNoReturn;
4553 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4555 A.changeToUnreachableAfterManifest(
4556 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4557 HasChanged = ChangeStatus::CHANGED;
4560 STATS_DECL(AAIsDead, BasicBlock,
"Number of dead basic blocks deleted.");
4561 for (BasicBlock &BB :
F)
4562 if (!AssumedLiveBlocks.count(&BB)) {
4563 A.deleteAfterManifest(BB);
4565 HasChanged = ChangeStatus::CHANGED;
4574 bool isEdgeDead(
const BasicBlock *From,
const BasicBlock *To)
const override {
4577 "Used AAIsDead of the wrong function");
4578 return isValidState() && !AssumedLiveEdges.count(std::make_pair(From, To));
4582 void trackStatistics()
const override {}
4585 bool isAssumedDead()
const override {
return false; }
4588 bool isKnownDead()
const override {
return false; }
4591 bool isAssumedDead(
const BasicBlock *BB)
const override {
4593 "BB must be in the same anchor scope function.");
4597 return !AssumedLiveBlocks.count(BB);
4601 bool isKnownDead(
const BasicBlock *BB)
const override {
4602 return getKnown() && isAssumedDead(BB);
4606 bool isAssumedDead(
const Instruction *
I)
const override {
4607 assert(
I->getParent()->getParent() == getAnchorScope() &&
4608 "Instruction must be in the same anchor scope function.");
4615 if (!AssumedLiveBlocks.count(
I->getParent()))
4621 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4629 bool isKnownDead(
const Instruction *
I)
const override {
4630 return getKnown() && isAssumedDead(
I);
4635 bool assumeLive(Attributor &
A,
const BasicBlock &BB) {
4636 if (!AssumedLiveBlocks.insert(&BB).second)
4643 for (
const Instruction &
I : BB)
4646 if (
F->hasLocalLinkage())
4647 A.markLiveInternalFunction(*
F);
4653 SmallSetVector<const Instruction *, 8> ToBeExploredFrom;
4656 SmallSetVector<const Instruction *, 8> KnownDeadEnds;
4659 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> AssumedLiveEdges;
4662 DenseSet<const BasicBlock *> AssumedLiveBlocks;
4666identifyAliveSuccessors(Attributor &
A,
const CallBase &CB,
4667 AbstractAttribute &AA,
4668 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4671 bool IsKnownNoReturn;
4674 return !IsKnownNoReturn;
4683identifyAliveSuccessors(Attributor &
A,
const InvokeInst &
II,
4684 AbstractAttribute &AA,
4685 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4686 bool UsedAssumedInformation =
4692 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4693 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4697 bool IsKnownNoUnwind;
4700 UsedAssumedInformation |= !IsKnownNoUnwind;
4702 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4705 return UsedAssumedInformation;
4709identifyAliveSuccessors(Attributor &
A,
const BranchInst &BI,
4710 AbstractAttribute &AA,
4711 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4712 bool UsedAssumedInformation =
false;
4716 std::optional<Constant *>
C =
4717 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4727 UsedAssumedInformation =
false;
4730 return UsedAssumedInformation;
4734identifyAliveSuccessors(Attributor &
A,
const SwitchInst &SI,
4735 AbstractAttribute &AA,
4736 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4737 bool UsedAssumedInformation =
false;
4741 UsedAssumedInformation)) {
4743 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4748 if (Values.
empty() ||
4749 (Values.
size() == 1 &&
4752 return UsedAssumedInformation;
4755 Type &Ty = *
SI.getCondition()->getType();
4756 SmallPtrSet<ConstantInt *, 8>
Constants;
4757 auto CheckForConstantInt = [&](
Value *
V) {
4765 if (!
all_of(Values, [&](AA::ValueAndContext &VAC) {
4766 return CheckForConstantInt(
VAC.getValue());
4768 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4770 return UsedAssumedInformation;
4773 unsigned MatchedCases = 0;
4774 for (
const auto &CaseIt :
SI.cases()) {
4775 if (
Constants.count(CaseIt.getCaseValue())) {
4777 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4784 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4785 return UsedAssumedInformation;
4791 if (AssumedLiveBlocks.empty()) {
4792 if (isAssumedDeadInternalFunction(
A))
4796 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4797 assumeLive(
A,
F->getEntryBlock());
4801 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4802 << getAnchorScope()->
size() <<
"] BBs and "
4803 << ToBeExploredFrom.size() <<
" exploration points and "
4804 << KnownDeadEnds.size() <<
" known dead ends\n");
4809 ToBeExploredFrom.end());
4810 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4813 while (!Worklist.
empty()) {
4820 I =
I->getNextNode();
4822 AliveSuccessors.
clear();
4824 bool UsedAssumedInformation =
false;
4825 switch (
I->getOpcode()) {
4829 "Expected non-terminators to be handled already!");
4830 for (
const BasicBlock *SuccBB :
successors(
I->getParent()))
4833 case Instruction::Call:
4835 *
this, AliveSuccessors);
4837 case Instruction::Invoke:
4839 *
this, AliveSuccessors);
4841 case Instruction::Br:
4843 *
this, AliveSuccessors);
4845 case Instruction::Switch:
4847 *
this, AliveSuccessors);
4851 if (UsedAssumedInformation) {
4852 NewToBeExploredFrom.insert(
I);
4853 }
else if (AliveSuccessors.
empty() ||
4854 (
I->isTerminator() &&
4855 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4856 if (KnownDeadEnds.insert(
I))
4861 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4862 << UsedAssumedInformation <<
"\n");
4864 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4865 if (!
I->isTerminator()) {
4866 assert(AliveSuccessors.size() == 1 &&
4867 "Non-terminator expected to have a single successor!");
4871 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4872 if (AssumedLiveEdges.insert(
Edge).second)
4874 if (assumeLive(
A, *AliveSuccessor->getParent()))
4881 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4882 llvm::any_of(NewToBeExploredFrom, [&](
const Instruction *
I) {
4883 return !ToBeExploredFrom.count(I);
4886 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4895 if (ToBeExploredFrom.empty() &&
4896 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4897 llvm::all_of(KnownDeadEnds, [](
const Instruction *DeadEndI) {
4898 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4900 return indicatePessimisticFixpoint();
4905struct AAIsDeadCallSite final : AAIsDeadFunction {
4906 AAIsDeadCallSite(
const IRPosition &IRP, Attributor &
A)
4907 : AAIsDeadFunction(IRP,
A) {}
4916 "supported for call sites yet!");
4921 return indicatePessimisticFixpoint();
4925 void trackStatistics()
const override {}
4932struct AADereferenceableImpl : AADereferenceable {
4933 AADereferenceableImpl(
const IRPosition &IRP, Attributor &
A)
4934 : AADereferenceable(IRP,
A) {}
4935 using StateType = DerefState;
4939 Value &
V = *getAssociatedValue().stripPointerCasts();
4941 A.getAttrs(getIRPosition(),
4942 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4945 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4948 bool IsKnownNonNull;
4950 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4952 bool CanBeNull, CanBeFreed;
4953 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4954 A.getDataLayout(), CanBeNull, CanBeFreed));
4956 if (Instruction *CtxI = getCtxI())
4957 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4962 StateType &getState()
override {
return *
this; }
4963 const StateType &getState()
const override {
return *
this; }
4967 void addAccessedBytesForUse(Attributor &
A,
const Use *U,
const Instruction *
I,
4968 DerefState &State) {
4969 const Value *UseV =
U->get();
4974 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4979 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4980 if (
Base &&
Base == &getAssociatedValue())
4981 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4985 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
4986 AADereferenceable::StateType &State) {
4987 bool IsNonNull =
false;
4988 bool TrackUse =
false;
4989 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4990 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4991 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4992 <<
" for instruction " << *
I <<
"\n");
4994 addAccessedBytesForUse(
A, U,
I, State);
4995 State.takeKnownDerefBytesMaximum(DerefBytes);
5002 bool IsKnownNonNull;
5004 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5005 if (IsAssumedNonNull &&
5006 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
5007 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
5008 return ChangeStatus::CHANGED;
5013 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5014 SmallVectorImpl<Attribute> &Attrs)
const override {
5016 bool IsKnownNonNull;
5018 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5019 if (IsAssumedNonNull)
5020 Attrs.emplace_back(Attribute::getWithDereferenceableBytes(
5021 Ctx, getAssumedDereferenceableBytes()));
5023 Attrs.emplace_back(Attribute::getWithDereferenceableOrNullBytes(
5024 Ctx, getAssumedDereferenceableBytes()));
5028 const std::string getAsStr(Attributor *
A)
const override {
5029 if (!getAssumedDereferenceableBytes())
5030 return "unknown-dereferenceable";
5031 bool IsKnownNonNull;
5032 bool IsAssumedNonNull =
false;
5035 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5036 return std::string(
"dereferenceable") +
5037 (IsAssumedNonNull ?
"" :
"_or_null") +
5038 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5039 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5040 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5041 (!
A ?
" [non-null is unknown]" :
"");
5046struct AADereferenceableFloating : AADereferenceableImpl {
5047 AADereferenceableFloating(
const IRPosition &IRP, Attributor &
A)
5048 : AADereferenceableImpl(IRP,
A) {}
5053 bool UsedAssumedInformation =
false;
5055 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5057 Values.
push_back({getAssociatedValue(), getCtxI()});
5060 Stripped = Values.
size() != 1 ||
5061 Values.
front().getValue() != &getAssociatedValue();
5064 const DataLayout &
DL =
A.getDataLayout();
5067 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5069 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5070 APInt
Offset(IdxWidth, 0);
5075 const auto *AA =
A.getAAFor<AADereferenceable>(
5077 int64_t DerefBytes = 0;
5078 if (!AA || (!Stripped &&
this == AA)) {
5081 bool CanBeNull, CanBeFreed;
5083 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5084 T.GlobalState.indicatePessimisticFixpoint();
5087 DerefBytes =
DS.DerefBytesState.getAssumed();
5088 T.GlobalState &=
DS.GlobalState;
5094 int64_t OffsetSExt =
Offset.getSExtValue();
5098 T.takeAssumedDerefBytesMinimum(
5099 std::max(int64_t(0), DerefBytes - OffsetSExt));
5104 T.takeKnownDerefBytesMaximum(
5105 std::max(int64_t(0), DerefBytes - OffsetSExt));
5106 T.indicatePessimisticFixpoint();
5107 }
else if (OffsetSExt > 0) {
5113 T.indicatePessimisticFixpoint();
5117 return T.isValidState();
5120 for (
const auto &VAC : Values)
5121 if (!VisitValueCB(*
VAC.getValue()))
5122 return indicatePessimisticFixpoint();
5128 void trackStatistics()
const override {
5134struct AADereferenceableReturned final
5135 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5137 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5138 AADereferenceableReturned(
const IRPosition &IRP, Attributor &
A)
5142 void trackStatistics()
const override {
5148struct AADereferenceableArgument final
5149 : AAArgumentFromCallSiteArguments<AADereferenceable,
5150 AADereferenceableImpl> {
5152 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5153 AADereferenceableArgument(
const IRPosition &IRP, Attributor &
A)
5157 void trackStatistics()
const override {
5163struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5164 AADereferenceableCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5165 : AADereferenceableFloating(IRP,
A) {}
5168 void trackStatistics()
const override {
5174struct AADereferenceableCallSiteReturned final
5175 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5176 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5177 AADereferenceableCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5181 void trackStatistics()
const override {
5190static unsigned getKnownAlignForUse(Attributor &
A, AAAlign &QueryingAA,
5191 Value &AssociatedValue,
const Use *U,
5192 const Instruction *
I,
bool &TrackUse) {
5201 if (
GEP->hasAllConstantIndices())
5217 MA = MaybeAlign(AlignAA->getKnownAlign());
5220 const DataLayout &
DL =
A.getDataLayout();
5221 const Value *UseV =
U->get();
5223 if (
SI->getPointerOperand() == UseV)
5224 MA =
SI->getAlign();
5226 if (LI->getPointerOperand() == UseV)
5227 MA = LI->getAlign();
5229 if (AI->getPointerOperand() == UseV)
5230 MA = AI->getAlign();
5232 if (AI->getPointerOperand() == UseV)
5233 MA = AI->getAlign();
5239 unsigned Alignment = MA->value();
5243 if (
Base == &AssociatedValue) {
5248 uint32_t
gcd = std::gcd(uint32_t(
abs((int32_t)
Offset)), Alignment);
5256struct AAAlignImpl : AAAlign {
5257 AAAlignImpl(
const IRPosition &IRP, Attributor &
A) : AAAlign(IRP,
A) {}
5262 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5264 takeKnownMaximum(Attr.getValueAsInt());
5266 Value &
V = *getAssociatedValue().stripPointerCasts();
5267 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5269 if (Instruction *CtxI = getCtxI())
5270 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5278 Value &AssociatedValue = getAssociatedValue();
5280 return ChangeStatus::UNCHANGED;
5282 for (
const Use &U : AssociatedValue.
uses()) {
5284 if (
SI->getPointerOperand() == &AssociatedValue)
5285 if (
SI->getAlign() < getAssumedAlign()) {
5287 "Number of times alignment added to a store");
5288 SI->setAlignment(getAssumedAlign());
5289 InstrChanged = ChangeStatus::CHANGED;
5292 if (LI->getPointerOperand() == &AssociatedValue)
5293 if (LI->getAlign() < getAssumedAlign()) {
5294 LI->setAlignment(getAssumedAlign());
5296 "Number of times alignment added to a load");
5297 InstrChanged = ChangeStatus::CHANGED;
5300 if (RMW->getPointerOperand() == &AssociatedValue) {
5301 if (RMW->getAlign() < getAssumedAlign()) {
5303 "Number of times alignment added to atomicrmw");
5305 RMW->setAlignment(getAssumedAlign());
5306 InstrChanged = ChangeStatus::CHANGED;
5310 if (CAS->getPointerOperand() == &AssociatedValue) {
5311 if (CAS->getAlign() < getAssumedAlign()) {
5313 "Number of times alignment added to cmpxchg");
5314 CAS->setAlignment(getAssumedAlign());
5315 InstrChanged = ChangeStatus::CHANGED;
5323 Align InheritAlign =
5324 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5325 if (InheritAlign >= getAssumedAlign())
5326 return InstrChanged;
5327 return Changed | InstrChanged;
5335 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5336 SmallVectorImpl<Attribute> &Attrs)
const override {
5337 if (getAssumedAlign() > 1)
5339 Attribute::getWithAlignment(Ctx,
Align(getAssumedAlign())));
5343 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
5344 AAAlign::StateType &State) {
5345 bool TrackUse =
false;
5347 unsigned int KnownAlign =
5348 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5349 State.takeKnownMaximum(KnownAlign);
5355 const std::string getAsStr(Attributor *
A)
const override {
5356 return "align<" + std::to_string(getKnownAlign().value()) +
"-" +
5357 std::to_string(getAssumedAlign().value()) +
">";
5362struct AAAlignFloating : AAAlignImpl {
5363 AAAlignFloating(
const IRPosition &IRP, Attributor &
A) : AAAlignImpl(IRP,
A) {}
5367 const DataLayout &
DL =
A.getDataLayout();
5370 bool UsedAssumedInformation =
false;
5372 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5374 Values.
push_back({getAssociatedValue(), getCtxI()});
5377 Stripped = Values.
size() != 1 ||
5378 Values.
front().getValue() != &getAssociatedValue();
5382 auto VisitValueCB = [&](
Value &
V) ->
bool {
5386 DepClassTy::REQUIRED);
5387 if (!AA || (!Stripped &&
this == AA)) {
5389 unsigned Alignment = 1;
5402 Alignment =
V.getPointerAlignment(
DL).value();
5405 T.takeKnownMaximum(Alignment);
5406 T.indicatePessimisticFixpoint();
5409 const AAAlign::StateType &
DS = AA->
getState();
5412 return T.isValidState();
5415 for (
const auto &VAC : Values) {
5416 if (!VisitValueCB(*
VAC.getValue()))
5417 return indicatePessimisticFixpoint();
5430struct AAAlignReturned final
5431 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5432 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5433 AAAlignReturned(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5440struct AAAlignArgument final
5441 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5442 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5443 AAAlignArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5450 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5451 return ChangeStatus::UNCHANGED;
5452 return Base::manifest(
A);
5459struct AAAlignCallSiteArgument final : AAAlignFloating {
5460 AAAlignCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5461 : AAAlignFloating(IRP,
A) {}
5468 if (Argument *Arg = getAssociatedArgument())
5469 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5470 return ChangeStatus::UNCHANGED;
5472 Align InheritAlign =
5473 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5474 if (InheritAlign >= getAssumedAlign())
5475 Changed = ChangeStatus::UNCHANGED;
5482 if (Argument *Arg = getAssociatedArgument()) {
5485 const auto *ArgAlignAA =
A.getAAFor<AAAlign>(
5488 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5498struct AAAlignCallSiteReturned final
5499 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5500 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5501 AAAlignCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5511struct AANoReturnImpl :
public AANoReturn {
5512 AANoReturnImpl(
const IRPosition &IRP, Attributor &
A) : AANoReturn(IRP,
A) {}
5518 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5523 const std::string getAsStr(Attributor *
A)
const override {
5524 return getAssumed() ?
"noreturn" :
"may-return";
5529 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5530 bool UsedAssumedInformation =
false;
5531 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5532 {(unsigned)Instruction::Ret},
5533 UsedAssumedInformation))
5534 return indicatePessimisticFixpoint();
5535 return ChangeStatus::UNCHANGED;
5539struct AANoReturnFunction final : AANoReturnImpl {
5540 AANoReturnFunction(
const IRPosition &IRP, Attributor &
A)
5541 : AANoReturnImpl(IRP,
A) {}
5548struct AANoReturnCallSite final
5549 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5550 AANoReturnCallSite(
const IRPosition &IRP, Attributor &
A)
5551 : AACalleeToCallSite<AANoReturn, AANoReturnImpl>(IRP,
A) {}
5562struct AAInstanceInfoImpl :
public AAInstanceInfo {
5563 AAInstanceInfoImpl(
const IRPosition &IRP, Attributor &
A)
5564 : AAInstanceInfo(IRP,
A) {}
5568 Value &
V = getAssociatedValue();
5570 if (
C->isThreadDependent())
5571 indicatePessimisticFixpoint();
5573 indicateOptimisticFixpoint();
5579 indicateOptimisticFixpoint();
5584 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
5587 indicatePessimisticFixpoint();
5597 Value &
V = getAssociatedValue();
5600 Scope =
I->getFunction();
5603 if (!
Scope->hasLocalLinkage())
5607 return indicateOptimisticFixpoint();
5609 bool IsKnownNoRecurse;
5615 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5630 if (!Callee || !
Callee->hasLocalLinkage())
5634 const auto *ArgInstanceInfoAA =
A.getAAFor<AAInstanceInfo>(
5636 DepClassTy::OPTIONAL);
5637 if (!ArgInstanceInfoAA ||
5638 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5643 A, *CB, *Scope, *
this,
nullptr,
5644 [Scope](
const Function &Fn) {
return &Fn !=
Scope; }))
5651 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5653 auto *
Ptr =
SI->getPointerOperand()->stripPointerCasts();
5661 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5662 DepClassTy::OPTIONAL,
5663 true, EquivalentUseCB))
5664 return indicatePessimisticFixpoint();
5670 const std::string getAsStr(Attributor *
A)
const override {
5671 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5675 void trackStatistics()
const override {}
5679struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5680 AAInstanceInfoFloating(
const IRPosition &IRP, Attributor &
A)
5681 : AAInstanceInfoImpl(IRP,
A) {}
5685struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5686 AAInstanceInfoArgument(
const IRPosition &IRP, Attributor &
A)
5687 : AAInstanceInfoFloating(IRP,
A) {}
5691struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5692 AAInstanceInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5693 : AAInstanceInfoImpl(IRP,
A) {}
5701 Argument *Arg = getAssociatedArgument();
5703 return indicatePessimisticFixpoint();
5706 A.getAAFor<AAInstanceInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
5708 return indicatePessimisticFixpoint();
5714struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5715 AAInstanceInfoReturned(
const IRPosition &IRP, Attributor &
A)
5716 : AAInstanceInfoImpl(IRP,
A) {
5732struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5733 AAInstanceInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5734 : AAInstanceInfoFloating(IRP,
A) {}
5741 bool IgnoreSubsumingPositions) {
5742 assert(ImpliedAttributeKind == Attribute::Captures &&
5743 "Unexpected attribute kind");
5753 V.getType()->getPointerAddressSpace() == 0)) {
5758 A.getAttrs(IRP, {Attribute::Captures}, Attrs,
5768 {Attribute::Captures, Attribute::ByVal}, Attrs,
5805 bool ReadOnly =
F.onlyReadsMemory();
5806 bool NoThrow =
F.doesNotThrow();
5807 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5808 if (ReadOnly && NoThrow && IsVoidReturn) {
5821 if (NoThrow && IsVoidReturn)
5826 if (!NoThrow || ArgNo < 0 ||
5827 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5830 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5831 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5832 if (U ==
unsigned(ArgNo))
5859 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5860 SmallVectorImpl<Attribute> &Attrs)
const override {
5861 if (!isAssumedNoCaptureMaybeReturned())
5864 if (isArgumentPosition()) {
5865 if (isAssumedNoCapture())
5866 Attrs.emplace_back(Attribute::get(Ctx, Attribute::Captures));
5868 Attrs.emplace_back(Attribute::get(Ctx,
"no-capture-maybe-returned"));
5873 const std::string getAsStr(Attributor *
A)
const override {
5874 if (isKnownNoCapture())
5875 return "known not-captured";
5876 if (isAssumedNoCapture())
5877 return "assumed not-captured";
5878 if (isKnownNoCaptureMaybeReturned())
5879 return "known not-captured-maybe-returned";
5880 if (isAssumedNoCaptureMaybeReturned())
5881 return "assumed not-captured-maybe-returned";
5882 return "assumed-captured";
5887 bool checkUse(Attributor &
A, AANoCapture::StateType &State,
const Use &U,
5890 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5896 return isCapturedIn(State,
true,
true,
5903 return isCapturedIn(State,
true,
true,
5909 return isCapturedIn(State,
false,
false,
5911 return isCapturedIn(State,
true,
true,
5919 return isCapturedIn(State,
true,
true,
5926 bool IsKnownNoCapture;
5927 const AANoCapture *ArgNoCaptureAA =
nullptr;
5929 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
5931 if (IsAssumedNoCapture)
5932 return isCapturedIn(State,
false,
false,
5936 return isCapturedIn(State,
false,
false,
5941 return isCapturedIn(State,
true,
true,
5948 static bool isCapturedIn(AANoCapture::StateType &State,
bool CapturedInMem,
5949 bool CapturedInInt,
bool CapturedInRet) {
5950 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
5951 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
5963 const IRPosition &IRP = getIRPosition();
5967 return indicatePessimisticFixpoint();
5974 return indicatePessimisticFixpoint();
5982 T.addKnownBits(NOT_CAPTURED_IN_MEM);
5984 addKnownBits(NOT_CAPTURED_IN_MEM);
5991 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
5995 UsedAssumedInformation))
5997 bool SeenConstant =
false;
5998 for (
const AA::ValueAndContext &VAC : Values) {
6002 SeenConstant =
true;
6004 VAC.getValue() == getAssociatedArgument())
6010 bool IsKnownNoUnwind;
6013 bool IsVoidTy =
F->getReturnType()->isVoidTy();
6014 bool UsedAssumedInformation =
false;
6015 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
6016 T.addKnownBits(NOT_CAPTURED_IN_RET);
6017 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
6019 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
6020 addKnownBits(NOT_CAPTURED_IN_RET);
6021 if (isKnown(NOT_CAPTURED_IN_MEM))
6022 return indicateOptimisticFixpoint();
6027 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
6036 return checkUse(
A,
T, U, Follow);
6039 if (!
A.checkForAllUses(UseCheck, *
this, *V))
6040 return indicatePessimisticFixpoint();
6043 auto Assumed = S.getAssumed();
6044 S.intersectAssumedBits(
T.getAssumed());
6045 if (!isAssumedNoCaptureMaybeReturned())
6046 return indicatePessimisticFixpoint();
6052struct AANoCaptureArgument final : AANoCaptureImpl {
6053 AANoCaptureArgument(
const IRPosition &IRP, Attributor &
A)
6054 : AANoCaptureImpl(IRP,
A) {}
6061struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
6062 AANoCaptureCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6063 : AANoCaptureImpl(IRP,
A) {}
6071 Argument *Arg = getAssociatedArgument();
6073 return indicatePessimisticFixpoint();
6075 bool IsKnownNoCapture;
6076 const AANoCapture *ArgAA =
nullptr;
6078 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6080 return ChangeStatus::UNCHANGED;
6082 return indicatePessimisticFixpoint();
6087 void trackStatistics()
const override {
6093struct AANoCaptureFloating final : AANoCaptureImpl {
6094 AANoCaptureFloating(
const IRPosition &IRP, Attributor &
A)
6095 : AANoCaptureImpl(IRP,
A) {}
6098 void trackStatistics()
const override {
6104struct AANoCaptureReturned final : AANoCaptureImpl {
6105 AANoCaptureReturned(
const IRPosition &IRP, Attributor &
A)
6106 : AANoCaptureImpl(IRP,
A) {
6121 void trackStatistics()
const override {}
6125struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6126 AANoCaptureCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6127 : AANoCaptureImpl(IRP,
A) {}
6133 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6137 void trackStatistics()
const override {
6154 dbgs() <<
"[ValueSimplify] is assumed to be "
6157 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6169 if (getAssociatedValue().
getType()->isVoidTy())
6170 indicatePessimisticFixpoint();
6171 if (
A.hasSimplificationCallback(getIRPosition()))
6172 indicatePessimisticFixpoint();
6176 const std::string getAsStr(Attributor *
A)
const override {
6178 dbgs() <<
"SAV: " << (bool)SimplifiedAssociatedValue <<
" ";
6179 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6180 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6182 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6187 void trackStatistics()
const override {}
6190 std::optional<Value *>
6191 getAssumedSimplifiedValue(Attributor &
A)
const override {
6192 return SimplifiedAssociatedValue;
6199 static Value *ensureType(Attributor &
A,
Value &V,
Type &Ty, Instruction *CtxI,
6203 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6205 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6214 static Value *reproduceInst(Attributor &
A,
6215 const AbstractAttribute &QueryingAA,
6216 Instruction &
I,
Type &Ty, Instruction *CtxI,
6218 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6219 if (
Check && (
I.mayReadFromMemory() ||
6224 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6226 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6248 static Value *reproduceValue(Attributor &
A,
6249 const AbstractAttribute &QueryingAA,
Value &V,
6250 Type &Ty, Instruction *CtxI,
bool Check,
6252 if (
const auto &NewV = VMap.
lookup(&V))
6254 bool UsedAssumedInformation =
false;
6255 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6257 if (!SimpleV.has_value())
6261 EffectiveV = *SimpleV;
6266 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6268 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6269 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6275 Value *manifestReplacementValue(Attributor &
A, Instruction *CtxI)
const {
6276 Value *NewV = SimplifiedAssociatedValue
6277 ? *SimplifiedAssociatedValue
6279 if (NewV && NewV != &getAssociatedValue()) {
6283 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6285 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6293 bool checkAndUpdate(Attributor &
A,
const AbstractAttribute &QueryingAA,
6294 const IRPosition &IRP,
bool Simplify =
true) {
6295 bool UsedAssumedInformation =
false;
6298 QueryingValueSimplified =
A.getAssumedSimplified(
6300 return unionAssumed(QueryingValueSimplified);
6304 template <
typename AAType>
bool askSimplifiedValueFor(Attributor &
A) {
6305 if (!getAssociatedValue().
getType()->isIntegerTy())
6310 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6314 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6317 SimplifiedAssociatedValue = std::nullopt;
6318 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6321 if (
auto *
C = *COpt) {
6322 SimplifiedAssociatedValue =
C;
6323 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6329 bool askSimplifiedValueForOtherAAs(Attributor &
A) {
6330 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6332 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6340 for (
auto &U : getAssociatedValue().uses()) {
6345 IP =
PHI->getIncomingBlock(U)->getTerminator();
6346 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6348 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6349 if (
A.changeUseAfterManifest(U, *NewV))
6350 Changed = ChangeStatus::CHANGED;
6354 return Changed | AAValueSimplify::manifest(
A);
6359 SimplifiedAssociatedValue = &getAssociatedValue();
6360 return AAValueSimplify::indicatePessimisticFixpoint();
6364struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6365 AAValueSimplifyArgument(
const IRPosition &IRP, Attributor &
A)
6366 : AAValueSimplifyImpl(IRP,
A) {}
6369 AAValueSimplifyImpl::initialize(
A);
6370 if (
A.hasAttr(getIRPosition(),
6371 {Attribute::InAlloca, Attribute::Preallocated,
6372 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6374 indicatePessimisticFixpoint();
6381 Argument *Arg = getAssociatedArgument();
6387 return indicatePessimisticFixpoint();
6390 auto Before = SimplifiedAssociatedValue;
6392 auto PredForCallSite = [&](AbstractCallSite ACS) {
6393 const IRPosition &ACSArgPos =
6404 bool UsedAssumedInformation =
false;
6405 std::optional<Constant *> SimpleArgOp =
6406 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6413 return unionAssumed(*SimpleArgOp);
6418 bool UsedAssumedInformation =
false;
6419 if (hasCallBaseContext() &&
6420 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6422 AbstractCallSite(&getCallBaseContext()->getCalledOperandUse()));
6424 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6425 UsedAssumedInformation);
6428 if (!askSimplifiedValueForOtherAAs(
A))
6429 return indicatePessimisticFixpoint();
6432 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6433 : ChangeStatus ::CHANGED;
6437 void trackStatistics()
const override {
6442struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6443 AAValueSimplifyReturned(
const IRPosition &IRP, Attributor &
A)
6444 : AAValueSimplifyImpl(IRP,
A) {}
6447 std::optional<Value *>
6448 getAssumedSimplifiedValue(Attributor &
A)
const override {
6449 if (!isValidState())
6451 return SimplifiedAssociatedValue;
6456 auto Before = SimplifiedAssociatedValue;
6460 return checkAndUpdate(
6465 bool UsedAssumedInformation =
false;
6466 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6467 UsedAssumedInformation))
6468 if (!askSimplifiedValueForOtherAAs(
A))
6469 return indicatePessimisticFixpoint();
6472 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6473 : ChangeStatus ::CHANGED;
6479 return ChangeStatus::UNCHANGED;
6483 void trackStatistics()
const override {
6488struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6489 AAValueSimplifyFloating(
const IRPosition &IRP, Attributor &
A)
6490 : AAValueSimplifyImpl(IRP,
A) {}
6494 AAValueSimplifyImpl::initialize(
A);
6495 Value &
V = getAnchorValue();
6499 indicatePessimisticFixpoint();
6504 auto Before = SimplifiedAssociatedValue;
6505 if (!askSimplifiedValueForOtherAAs(
A))
6506 return indicatePessimisticFixpoint();
6509 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6510 : ChangeStatus ::CHANGED;
6514 void trackStatistics()
const override {
6519struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6520 AAValueSimplifyFunction(
const IRPosition &IRP, Attributor &
A)
6521 : AAValueSimplifyImpl(IRP,
A) {}
6525 SimplifiedAssociatedValue =
nullptr;
6526 indicateOptimisticFixpoint();
6531 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6534 void trackStatistics()
const override {
6539struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6540 AAValueSimplifyCallSite(
const IRPosition &IRP, Attributor &
A)
6541 : AAValueSimplifyFunction(IRP,
A) {}
6543 void trackStatistics()
const override {
6548struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6549 AAValueSimplifyCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6550 : AAValueSimplifyImpl(IRP,
A) {}
6553 AAValueSimplifyImpl::initialize(
A);
6554 Function *Fn = getAssociatedFunction();
6555 assert(Fn &&
"Did expect an associted function");
6556 for (Argument &Arg : Fn->
args()) {
6561 checkAndUpdate(
A, *
this, IRP))
6562 indicateOptimisticFixpoint();
6564 indicatePessimisticFixpoint();
6572 return indicatePessimisticFixpoint();
6575 void trackStatistics()
const override {
6580struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6581 AAValueSimplifyCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6582 : AAValueSimplifyFloating(IRP,
A) {}
6588 auto *FloatAA =
A.lookupAAFor<AAValueSimplify>(
6590 if (FloatAA && FloatAA->getState().isValidState())
6593 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6595 ->getArgOperandUse(getCallSiteArgNo());
6596 if (
A.changeUseAfterManifest(U, *NewV))
6597 Changed = ChangeStatus::CHANGED;
6600 return Changed | AAValueSimplify::manifest(
A);
6603 void trackStatistics()
const override {
6611struct AAHeapToStackFunction final :
public AAHeapToStack {
6613 struct AllocationInfo {
6625 } Status = STACK_DUE_TO_USE;
6629 bool HasPotentiallyFreeingUnknownUses =
false;
6633 bool MoveAllocaIntoEntry =
true;
6636 SmallSetVector<CallBase *, 1> PotentialFreeCalls{};
6639 struct DeallocationInfo {
6647 bool MightFreeUnknownObjects =
false;
6650 SmallSetVector<CallBase *, 1> PotentialAllocationCalls{};
6653 AAHeapToStackFunction(
const IRPosition &IRP, Attributor &
A)
6654 : AAHeapToStack(IRP,
A) {}
6656 ~AAHeapToStackFunction() {
6659 for (
auto &It : AllocationInfos)
6660 It.second->~AllocationInfo();
6661 for (
auto &It : DeallocationInfos)
6662 It.second->~DeallocationInfo();
6666 AAHeapToStack::initialize(
A);
6669 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6676 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6683 auto *I8Ty = Type::getInt8Ty(CB->
getParent()->getContext());
6685 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6686 AllocationInfos[CB] = AI;
6688 TLI->getLibFunc(*CB, AI->LibraryFunctionId);
6694 bool UsedAssumedInformation =
false;
6695 bool Success =
A.checkForAllCallLikeInstructions(
6696 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6700 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6703 [](
const IRPosition &,
const AbstractAttribute *,
6704 bool &) -> std::optional<Value *> {
return nullptr; };
6705 for (
const auto &It : AllocationInfos)
6708 for (
const auto &It : DeallocationInfos)
6713 const std::string getAsStr(Attributor *
A)
const override {
6714 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6715 for (
const auto &It : AllocationInfos) {
6716 if (It.second->Status == AllocationInfo::INVALID)
6717 ++NumInvalidMallocs;
6721 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6722 std::to_string(NumInvalidMallocs);
6726 void trackStatistics()
const override {
6728 MallocCalls, Function,
6729 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6730 for (
const auto &It : AllocationInfos)
6731 if (It.second->Status != AllocationInfo::INVALID)
6735 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6737 if (AllocationInfo *AI =
6738 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6739 return AI->Status != AllocationInfo::INVALID;
6743 bool isAssumedHeapToStackRemovedFree(CallBase &CB)
const override {
6744 if (!isValidState())
6747 for (
const auto &It : AllocationInfos) {
6748 AllocationInfo &AI = *It.second;
6749 if (AI.Status == AllocationInfo::INVALID)
6752 if (AI.PotentialFreeCalls.count(&CB))
6760 assert(getState().isValidState() &&
6761 "Attempted to manifest an invalid state!");
6765 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6767 for (
auto &It : AllocationInfos) {
6768 AllocationInfo &AI = *It.second;
6769 if (AI.Status == AllocationInfo::INVALID)
6772 for (CallBase *FreeCall : AI.PotentialFreeCalls) {
6773 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6774 A.deleteAfterManifest(*FreeCall);
6775 HasChanged = ChangeStatus::CHANGED;
6778 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6781 auto Remark = [&](OptimizationRemark
OR) {
6783 if (TLI->getLibFunc(*AI.CB, IsAllocShared))
6784 if (IsAllocShared == LibFunc___kmpc_alloc_shared)
6785 return OR <<
"Moving globalized variable to the stack.";
6786 return OR <<
"Moving memory allocation from the heap to the stack.";
6788 if (AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
6789 A.emitRemark<OptimizationRemark>(AI.CB,
"OMP110",
Remark);
6791 A.emitRemark<OptimizationRemark>(AI.CB,
"HeapToStack",
Remark);
6793 const DataLayout &
DL =
A.getInfoCache().getDL();
6795 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6797 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6799 LLVMContext &Ctx = AI.CB->getContext();
6800 ObjectSizeOpts Opts;
6801 ObjectSizeOffsetEvaluator Eval(
DL, TLI, Ctx, Opts);
6802 SizeOffsetValue SizeOffsetPair = Eval.compute(AI.CB);
6809 ?
F->getEntryBlock().begin()
6810 : AI.CB->getIterator();
6813 if (MaybeAlign RetAlign = AI.CB->getRetAlign())
6814 Alignment = std::max(Alignment, *RetAlign);
6816 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *Align);
6817 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6818 "Expected an alignment during manifest!");
6820 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6824 unsigned AS =
DL.getAllocaAddrSpace();
6826 new AllocaInst(Type::getInt8Ty(
F->getContext()), AS,
Size, Alignment,
6827 AI.CB->getName() +
".h2s", IP);
6829 if (Alloca->
getType() != AI.CB->getType())
6830 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6831 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6833 auto *I8Ty = Type::getInt8Ty(
F->getContext());
6836 "Must be able to materialize initial memory state of allocation");
6841 auto *NBB =
II->getNormalDest();
6843 A.deleteAfterManifest(*AI.CB);
6845 A.deleteAfterManifest(*AI.CB);
6854 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6856 HasChanged = ChangeStatus::CHANGED;
6862 std::optional<APInt> getAPInt(Attributor &
A,
const AbstractAttribute &AA,
6864 bool UsedAssumedInformation =
false;
6865 std::optional<Constant *> SimpleV =
6866 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6868 return APInt(64, 0);
6870 return CI->getValue();
6871 return std::nullopt;
6874 std::optional<APInt>
getSize(Attributor &
A,
const AbstractAttribute &AA,
6875 AllocationInfo &AI) {
6876 auto Mapper = [&](
const Value *
V) ->
const Value * {
6877 bool UsedAssumedInformation =
false;
6878 if (std::optional<Constant *> SimpleV =
6879 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6886 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6892 MapVector<CallBase *, AllocationInfo *> AllocationInfos;
6896 MapVector<CallBase *, DeallocationInfo *> DeallocationInfos;
6901ChangeStatus AAHeapToStackFunction::updateImpl(Attributor &
A) {
6904 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6906 const auto *LivenessAA =
6909 MustBeExecutedContextExplorer *Explorer =
6910 A.getInfoCache().getMustBeExecutedContextExplorer();
6912 bool StackIsAccessibleByOtherThreads =
6913 A.getInfoCache().stackIsAccessibleByOtherThreads();
6916 A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(*F);
6917 std::optional<bool> MayContainIrreducibleControl;
6919 if (&
F->getEntryBlock() == &BB)
6921 if (!MayContainIrreducibleControl.has_value())
6923 if (*MayContainIrreducibleControl)
6932 bool HasUpdatedFrees =
false;
6934 auto UpdateFrees = [&]() {
6935 HasUpdatedFrees =
true;
6937 for (
auto &It : DeallocationInfos) {
6938 DeallocationInfo &DI = *It.second;
6941 if (DI.MightFreeUnknownObjects)
6945 bool UsedAssumedInformation =
false;
6946 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
6953 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
6954 DI.MightFreeUnknownObjects =
true;
6967 DI.MightFreeUnknownObjects =
true;
6971 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
6973 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
6975 DI.MightFreeUnknownObjects =
true;
6979 DI.PotentialAllocationCalls.insert(ObjCB);
6983 auto FreeCheck = [&](AllocationInfo &AI) {
6987 if (!StackIsAccessibleByOtherThreads) {
6992 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
6993 "other threads and function is not nosync:\n");
6997 if (!HasUpdatedFrees)
7001 if (AI.PotentialFreeCalls.size() != 1) {
7003 << AI.PotentialFreeCalls.size() <<
"\n");
7006 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
7007 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
7010 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
7011 << *UniqueFree <<
"\n");
7014 if (DI->MightFreeUnknownObjects) {
7016 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
7019 if (DI->PotentialAllocationCalls.empty())
7021 if (DI->PotentialAllocationCalls.size() > 1) {
7023 << DI->PotentialAllocationCalls.size()
7024 <<
" different allocations\n");
7027 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
7030 <<
"[H2S] unique free call not known to free this allocation but "
7031 << **DI->PotentialAllocationCalls.begin() <<
"\n");
7036 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared) {
7038 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
7039 LLVM_DEBUG(
dbgs() <<
"[H2S] unique free call might not be executed "
7040 "with the allocation "
7041 << *UniqueFree <<
"\n");
7048 auto UsesCheck = [&](AllocationInfo &AI) {
7049 bool ValidUsesOnly =
true;
7051 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
7056 if (
SI->getValueOperand() ==
U.get()) {
7058 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
7059 ValidUsesOnly =
false;
7068 if (DeallocationInfos.count(CB)) {
7069 AI.PotentialFreeCalls.insert(CB);
7076 bool IsKnownNoCapture;
7085 if (!IsAssumedNoCapture ||
7086 (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7087 !IsAssumedNoFree)) {
7088 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7091 auto Remark = [&](OptimizationRemarkMissed ORM) {
7093 <<
"Could not move globalized variable to the stack. "
7094 "Variable is potentially captured in call. Mark "
7095 "parameter as `__attribute__((noescape))` to override.";
7098 if (ValidUsesOnly &&
7099 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
7100 A.emitRemark<OptimizationRemarkMissed>(CB,
"OMP113",
Remark);
7103 ValidUsesOnly =
false;
7116 ValidUsesOnly =
false;
7119 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7121 [&](
const Use &OldU,
const Use &NewU) {
7122 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7123 return !SI || StackIsAccessibleByOtherThreads ||
7124 AA::isAssumedThreadLocalObject(
7125 A, *SI->getPointerOperand(), *this);
7128 return ValidUsesOnly;
7133 for (
auto &It : AllocationInfos) {
7134 AllocationInfo &AI = *It.second;
7135 if (AI.Status == AllocationInfo::INVALID)
7139 std::optional<APInt> APAlign = getAPInt(
A, *
this, *Align);
7143 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7145 AI.Status = AllocationInfo::INVALID;
7150 !APAlign->isPowerOf2()) {
7151 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7153 AI.Status = AllocationInfo::INVALID;
7160 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7165 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7167 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7171 AI.Status = AllocationInfo::INVALID;
7177 switch (AI.Status) {
7178 case AllocationInfo::STACK_DUE_TO_USE:
7181 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7183 case AllocationInfo::STACK_DUE_TO_FREE:
7186 AI.Status = AllocationInfo::INVALID;
7189 case AllocationInfo::INVALID:
7196 bool IsGlobalizedLocal =
7197 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared;
7198 if (AI.MoveAllocaIntoEntry &&
7199 (!
Size.has_value() ||
7200 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7201 AI.MoveAllocaIntoEntry =
false;
7210struct AAPrivatizablePtrImpl :
public AAPrivatizablePtr {
7211 AAPrivatizablePtrImpl(
const IRPosition &IRP, Attributor &
A)
7212 : AAPrivatizablePtr(IRP,
A), PrivatizableType(std::nullopt) {}
7215 AAPrivatizablePtr::indicatePessimisticFixpoint();
7216 PrivatizableType =
nullptr;
7217 return ChangeStatus::CHANGED;
7223 virtual std::optional<Type *> identifyPrivatizableType(Attributor &
A) = 0;
7227 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7228 std::optional<Type *>
T1) {
7238 std::optional<Type *> getPrivatizableType()
const override {
7239 return PrivatizableType;
7242 const std::string getAsStr(Attributor *
A)
const override {
7243 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7247 std::optional<Type *> PrivatizableType;
7252struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7253 AAPrivatizablePtrArgument(
const IRPosition &IRP, Attributor &
A)
7254 : AAPrivatizablePtrImpl(IRP,
A) {}
7257 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7260 bool UsedAssumedInformation =
false;
7262 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7264 if (!
Attrs.empty() &&
7265 A.checkForAllCallSites([](AbstractCallSite ACS) { return true; }, *
this,
7266 true, UsedAssumedInformation))
7267 return Attrs[0].getValueAsType();
7269 std::optional<Type *> Ty;
7270 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7278 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7287 A.getAAFor<AAPrivatizablePtr>(*
this, ACSArgPos, DepClassTy::REQUIRED);
7290 std::optional<Type *> CSTy = PrivCSArgAA->getPrivatizableType();
7293 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7297 dbgs() <<
"<nullptr>";
7302 Ty = combineTypes(Ty, CSTy);
7305 dbgs() <<
" : New Type: ";
7307 (*Ty)->print(
dbgs());
7309 dbgs() <<
"<nullptr>";
7318 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7319 UsedAssumedInformation))
7326 PrivatizableType = identifyPrivatizableType(
A);
7327 if (!PrivatizableType)
7328 return ChangeStatus::UNCHANGED;
7329 if (!*PrivatizableType)
7330 return indicatePessimisticFixpoint();
7335 DepClassTy::OPTIONAL);
7338 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7341 return indicatePessimisticFixpoint();
7347 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7351 Function &Fn = *getIRPosition().getAnchorScope();
7353 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(Fn);
7355 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7357 return indicatePessimisticFixpoint();
7360 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7367 bool UsedAssumedInformation =
false;
7368 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7369 UsedAssumedInformation)) {
7371 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7373 return indicatePessimisticFixpoint();
7377 Argument *Arg = getAssociatedArgument();
7378 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7380 return indicatePessimisticFixpoint();
7387 auto IsCompatiblePrivArgOfCallback = [&](CallBase &CB) {
7390 for (
const Use *U : CallbackUses) {
7391 AbstractCallSite CBACS(U);
7392 assert(CBACS && CBACS.isCallbackCall());
7393 for (Argument &CBArg : CBACS.getCalledFunction()->args()) {
7394 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7398 <<
"[AAPrivatizablePtr] Argument " << *Arg
7399 <<
"check if can be privatized in the context of its parent ("
7401 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7403 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7404 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7405 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7407 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7408 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7411 if (CBArgNo !=
int(ArgNo))
7413 const auto *CBArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7415 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7416 auto CBArgPrivTy = CBArgPrivAA->getPrivatizableType();
7419 if (*CBArgPrivTy == PrivatizableType)
7424 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7425 <<
" cannot be privatized in the context of its parent ("
7427 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7429 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7430 <<
").\n[AAPrivatizablePtr] for which the argument "
7431 "privatization is not compatible.\n";
7441 auto IsCompatiblePrivArgOfDirectCS = [&](AbstractCallSite ACS) {
7445 "Expected a direct call operand for callback call operand");
7450 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7451 <<
" check if be privatized in the context of its parent ("
7453 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7455 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7458 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7459 const auto *DCArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7461 DepClassTy::REQUIRED);
7462 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7463 auto DCArgPrivTy = DCArgPrivAA->getPrivatizableType();
7466 if (*DCArgPrivTy == PrivatizableType)
7472 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7473 <<
" cannot be privatized in the context of its parent ("
7475 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7478 <<
").\n[AAPrivatizablePtr] for which the argument "
7479 "privatization is not compatible.\n";
7487 auto IsCompatiblePrivArgOfOtherCallSite = [&](AbstractCallSite ACS) {
7491 return IsCompatiblePrivArgOfDirectCS(ACS);
7495 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7496 UsedAssumedInformation))
7497 return indicatePessimisticFixpoint();
7499 return ChangeStatus::UNCHANGED;
7505 identifyReplacementTypes(
Type *PrivType,
7506 SmallVectorImpl<Type *> &ReplacementTypes) {
7509 assert(PrivType &&
"Expected privatizable type!");
7513 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7514 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7516 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7517 PrivArrayType->getElementType());
7526 static void createInitialization(
Type *PrivType,
Value &
Base, Function &
F,
7528 assert(PrivType &&
"Expected privatizable type!");
7531 const DataLayout &
DL =
F.getDataLayout();
7535 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7536 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7539 new StoreInst(
F.getArg(ArgNo + u),
Ptr, IP);
7542 Type *PointeeTy = PrivArrayType->getElementType();
7543 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7544 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7546 new StoreInst(
F.getArg(ArgNo + u),
Ptr, IP);
7549 new StoreInst(
F.getArg(ArgNo), &
Base, IP);
7555 void createReplacementValues(Align Alignment,
Type *PrivType,
7557 SmallVectorImpl<Value *> &ReplacementValues) {
7559 assert(PrivType &&
"Expected privatizable type!");
7567 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7568 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7569 Type *PointeeTy = PrivStructType->getElementType(u);
7573 L->setAlignment(Alignment);
7577 Type *PointeeTy = PrivArrayType->getElementType();
7578 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7579 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7582 L->setAlignment(Alignment);
7587 L->setAlignment(Alignment);
7594 if (!PrivatizableType)
7595 return ChangeStatus::UNCHANGED;
7596 assert(*PrivatizableType &&
"Expected privatizable type!");
7602 bool UsedAssumedInformation =
false;
7603 if (!
A.checkForAllInstructions(
7604 [&](Instruction &
I) {
7605 CallInst &CI = cast<CallInst>(I);
7606 if (CI.isTailCall())
7607 TailCalls.push_back(&CI);
7610 *
this, {Instruction::Call}, UsedAssumedInformation))
7611 return ChangeStatus::UNCHANGED;
7613 Argument *Arg = getAssociatedArgument();
7616 const auto *AlignAA =
7623 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7625 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7627 const DataLayout &
DL = IP->getDataLayout();
7628 unsigned AS =
DL.getAllocaAddrSpace();
7629 Instruction *AI =
new AllocaInst(*PrivatizableType, AS,
7630 Arg->
getName() +
".priv", IP);
7631 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7632 ArgIt->getArgNo(), IP);
7635 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7639 for (CallInst *CI : TailCalls)
7640 CI->setTailCall(
false);
7647 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7648 AbstractCallSite ACS, SmallVectorImpl<Value *> &NewArgOperands) {
7651 createReplacementValues(
7652 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7653 *PrivatizableType, ACS,
7661 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7664 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7665 std::move(FnRepairCB),
7666 std::move(ACSRepairCB)))
7667 return ChangeStatus::CHANGED;
7668 return ChangeStatus::UNCHANGED;
7672 void trackStatistics()
const override {
7677struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7678 AAPrivatizablePtrFloating(
const IRPosition &IRP, Attributor &
A)
7679 : AAPrivatizablePtrImpl(IRP,
A) {}
7684 indicatePessimisticFixpoint();
7689 "updateImpl will not be called");
7693 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7696 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7703 return AI->getAllocatedType();
7705 auto *PrivArgAA =
A.getAAFor<AAPrivatizablePtr>(
7707 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7708 return PrivArgAA->getPrivatizableType();
7711 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7712 "alloca nor privatizable argument: "
7718 void trackStatistics()
const override {
7723struct AAPrivatizablePtrCallSiteArgument final
7724 :
public AAPrivatizablePtrFloating {
7725 AAPrivatizablePtrCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
7726 : AAPrivatizablePtrFloating(IRP,
A) {}
7730 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7731 indicateOptimisticFixpoint();
7736 PrivatizableType = identifyPrivatizableType(
A);
7737 if (!PrivatizableType)
7738 return ChangeStatus::UNCHANGED;
7739 if (!*PrivatizableType)
7740 return indicatePessimisticFixpoint();
7742 const IRPosition &IRP = getIRPosition();
7743 bool IsKnownNoCapture;
7745 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7746 if (!IsAssumedNoCapture) {
7747 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7748 return indicatePessimisticFixpoint();
7751 bool IsKnownNoAlias;
7753 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7754 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7755 return indicatePessimisticFixpoint();
7760 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7761 return indicatePessimisticFixpoint();
7764 return ChangeStatus::UNCHANGED;
7768 void trackStatistics()
const override {
7773struct AAPrivatizablePtrCallSiteReturned final
7774 :
public AAPrivatizablePtrFloating {
7775 AAPrivatizablePtrCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
7776 : AAPrivatizablePtrFloating(IRP,
A) {}
7781 indicatePessimisticFixpoint();
7785 void trackStatistics()
const override {
7790struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7791 AAPrivatizablePtrReturned(
const IRPosition &IRP, Attributor &
A)
7792 : AAPrivatizablePtrFloating(IRP,
A) {}
7797 indicatePessimisticFixpoint();
7801 void trackStatistics()
const override {
7811struct AAMemoryBehaviorImpl :
public AAMemoryBehavior {
7812 AAMemoryBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
7813 : AAMemoryBehavior(IRP,
A) {}
7817 intersectAssumedBits(BEST_STATE);
7818 getKnownStateFromValue(
A, getIRPosition(), getState());
7819 AAMemoryBehavior::initialize(
A);
7823 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
7824 BitIntegerState &State,
7825 bool IgnoreSubsumingPositions =
false) {
7827 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7829 switch (Attr.getKindAsEnum()) {
7830 case Attribute::ReadNone:
7833 case Attribute::ReadOnly:
7836 case Attribute::WriteOnly:
7845 if (!
I->mayReadFromMemory())
7847 if (!
I->mayWriteToMemory())
7853 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
7854 SmallVectorImpl<Attribute> &Attrs)
const override {
7857 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadNone));
7859 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadOnly));
7860 else if (isAssumedWriteOnly())
7861 Attrs.push_back(Attribute::get(Ctx, Attribute::WriteOnly));
7867 const IRPosition &IRP = getIRPosition();
7869 if (
A.hasAttr(IRP, Attribute::ReadNone,
7871 return ChangeStatus::UNCHANGED;
7880 return ChangeStatus::UNCHANGED;
7883 A.removeAttrs(IRP, AttrKinds);
7886 A.removeAttrs(IRP, Attribute::Writable);
7893 const std::string getAsStr(Attributor *
A)
const override {
7898 if (isAssumedWriteOnly())
7900 return "may-read/write";
7904 static const Attribute::AttrKind AttrKinds[3];
7908 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7911struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7912 AAMemoryBehaviorFloating(
const IRPosition &IRP, Attributor &
A)
7913 : AAMemoryBehaviorImpl(IRP,
A) {}
7919 void trackStatistics()
const override {
7924 else if (isAssumedWriteOnly())
7931 bool followUsersOfUseIn(Attributor &
A,
const Use &U,
7932 const Instruction *UserI);
7935 void analyzeUseIn(Attributor &
A,
const Use &U,
const Instruction *UserI);
7939struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
7940 AAMemoryBehaviorArgument(
const IRPosition &IRP, Attributor &
A)
7941 : AAMemoryBehaviorFloating(IRP,
A) {}
7945 intersectAssumedBits(BEST_STATE);
7946 const IRPosition &IRP = getIRPosition();
7950 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
7952 getKnownStateFromValue(
A, IRP, getState(),
7959 return ChangeStatus::UNCHANGED;
7963 if (
A.hasAttr(getIRPosition(),
7964 {Attribute::InAlloca, Attribute::Preallocated})) {
7965 removeKnownBits(NO_WRITES);
7966 removeAssumedBits(NO_WRITES);
7968 A.removeAttrs(getIRPosition(), AttrKinds);
7969 return AAMemoryBehaviorFloating::manifest(
A);
7973 void trackStatistics()
const override {
7978 else if (isAssumedWriteOnly())
7983struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
7984 AAMemoryBehaviorCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
7985 : AAMemoryBehaviorArgument(IRP,
A) {}
7991 Argument *Arg = getAssociatedArgument();
7993 indicatePessimisticFixpoint();
7997 addKnownBits(NO_WRITES);
7998 removeKnownBits(NO_READS);
7999 removeAssumedBits(NO_READS);
8001 AAMemoryBehaviorArgument::initialize(
A);
8002 if (getAssociatedFunction()->isDeclaration())
8003 indicatePessimisticFixpoint();
8012 Argument *Arg = getAssociatedArgument();
8015 A.getAAFor<AAMemoryBehavior>(*
this, ArgPos, DepClassTy::REQUIRED);
8017 return indicatePessimisticFixpoint();
8022 void trackStatistics()
const override {
8027 else if (isAssumedWriteOnly())
8033struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
8034 AAMemoryBehaviorCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
8035 : AAMemoryBehaviorFloating(IRP,
A) {}
8039 AAMemoryBehaviorImpl::initialize(
A);
8044 return ChangeStatus::UNCHANGED;
8048 void trackStatistics()
const override {}
8052struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
8053 AAMemoryBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
8054 : AAMemoryBehaviorImpl(IRP,
A) {}
8070 else if (isAssumedWriteOnly())
8073 A.removeAttrs(getIRPosition(), AttrKinds);
8076 for (Argument &Arg :
F.args())
8078 return A.manifestAttrs(getIRPosition(),
8079 Attribute::getWithMemoryEffects(
F.getContext(), ME));
8083 void trackStatistics()
const override {
8088 else if (isAssumedWriteOnly())
8094struct AAMemoryBehaviorCallSite final
8095 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8096 AAMemoryBehaviorCallSite(
const IRPosition &IRP, Attributor &
A)
8097 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl>(IRP,
A) {}
8108 else if (isAssumedWriteOnly())
8111 A.removeAttrs(getIRPosition(), AttrKinds);
8114 for (Use &U : CB.
args())
8116 Attribute::Writable);
8117 return A.manifestAttrs(
8118 getIRPosition(), Attribute::getWithMemoryEffects(CB.
getContext(), ME));
8122 void trackStatistics()
const override {
8127 else if (isAssumedWriteOnly())
8132ChangeStatus AAMemoryBehaviorFunction::updateImpl(Attributor &
A) {
8135 auto AssumedState = getAssumed();
8142 const auto *MemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
8144 if (MemBehaviorAA) {
8145 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8146 return !isAtFixpoint();
8151 if (
I.mayReadFromMemory())
8152 removeAssumedBits(NO_READS);
8153 if (
I.mayWriteToMemory())
8154 removeAssumedBits(NO_WRITES);
8155 return !isAtFixpoint();
8158 bool UsedAssumedInformation =
false;
8159 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8160 UsedAssumedInformation))
8161 return indicatePessimisticFixpoint();
8167ChangeStatus AAMemoryBehaviorFloating::updateImpl(Attributor &
A) {
8169 const IRPosition &IRP = getIRPosition();
8180 const auto *FnMemAA =
8183 FnMemAssumedState = FnMemAA->getAssumed();
8184 S.addKnownBits(FnMemAA->getKnown());
8185 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8191 auto AssumedState = S.getAssumed();
8197 bool IsKnownNoCapture;
8198 const AANoCapture *ArgNoCaptureAA =
nullptr;
8203 if (!IsAssumedNoCapture &&
8205 S.intersectAssumedBits(FnMemAssumedState);
8211 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8213 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8221 Follow = followUsersOfUseIn(
A, U, UserI);
8225 analyzeUseIn(
A, U, UserI);
8227 return !isAtFixpoint();
8230 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8231 return indicatePessimisticFixpoint();
8237bool AAMemoryBehaviorFloating::followUsersOfUseIn(Attributor &
A,
const Use &U,
8238 const Instruction *UserI) {
8256 if (
U.get()->getType()->isPointerTy()) {
8258 bool IsKnownNoCapture;
8267void AAMemoryBehaviorFloating::analyzeUseIn(Attributor &
A,
const Use &U,
8268 const Instruction *UserI) {
8275 case Instruction::Load:
8277 removeAssumedBits(NO_READS);
8280 case Instruction::Store:
8285 removeAssumedBits(NO_WRITES);
8287 indicatePessimisticFixpoint();
8290 case Instruction::Call:
8291 case Instruction::CallBr:
8292 case Instruction::Invoke: {
8299 indicatePessimisticFixpoint();
8306 removeAssumedBits(NO_READS);
8313 if (
U.get()->getType()->isPointerTy())
8317 const auto *MemBehaviorAA =
8323 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8331 removeAssumedBits(NO_READS);
8333 removeAssumedBits(NO_WRITES);
8345 return "all memory";
8348 std::string S =
"memory:";
8354 S +=
"internal global,";
8356 S +=
"external global,";
8360 S +=
"inaccessible,";
8374 AccessKind2Accesses.fill(
nullptr);
8377 ~AAMemoryLocationImpl() {
8380 for (AccessSet *AS : AccessKind2Accesses)
8387 intersectAssumedBits(BEST_STATE);
8388 getKnownStateFromValue(
A, getIRPosition(), getState());
8389 AAMemoryLocation::initialize(
A);
8393 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
8394 BitIntegerState &State,
8395 bool IgnoreSubsumingPositions =
false) {
8404 bool UseArgMemOnly =
true;
8406 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8410 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8419 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8424 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8428 A.manifestAttrs(IRP,
8429 Attribute::getWithMemoryEffects(
8438 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8442 A.manifestAttrs(IRP,
8443 Attribute::getWithMemoryEffects(
8453 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
8454 SmallVectorImpl<Attribute> &Attrs)
const override {
8461 else if (isAssumedInaccessibleMemOnly())
8462 Attrs.push_back(Attribute::getWithMemoryEffects(
8464 else if (isAssumedArgMemOnly())
8467 else if (isAssumedInaccessibleOrArgMemOnly())
8468 Attrs.push_back(Attribute::getWithMemoryEffects(
8478 const IRPosition &IRP = getIRPosition();
8482 if (DeducedAttrs.
size() != 1)
8483 return ChangeStatus::UNCHANGED;
8486 return A.manifestAttrs(IRP, Attribute::getWithMemoryEffects(
8491 bool checkForAllAccessesToMemoryKind(
8493 MemoryLocationsKind)>
8495 MemoryLocationsKind RequestedMLK)
const override {
8496 if (!isValidState())
8499 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8500 if (AssumedMLK == NO_LOCATIONS)
8504 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8505 CurMLK *= 2, ++Idx) {
8506 if (CurMLK & RequestedMLK)
8509 if (
const AccessSet *
Accesses = AccessKind2Accesses[Idx])
8510 for (
const AccessInfo &AI : *
Accesses)
8511 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8524 MemoryLocationsKind KnownMLK = getKnown();
8526 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8527 if (!(CurMLK & KnownMLK))
8528 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr,
Changed,
8529 getAccessKindFromInst(
I));
8530 return AAMemoryLocation::indicatePessimisticFixpoint();
8550 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8554 return LHS.Ptr <
RHS.Ptr;
8555 if (
LHS.Kind !=
RHS.Kind)
8556 return LHS.Kind <
RHS.Kind;
8563 using AccessSet = SmallSet<AccessInfo, 2, AccessInfo>;
8564 std::array<AccessSet *, llvm::ConstantLog2<VALID_STATE>()>
8565 AccessKind2Accesses;
8570 categorizeArgumentPointerLocations(Attributor &
A, CallBase &CB,
8571 AAMemoryLocation::StateType &AccessedLocs,
8576 categorizeAccessedLocations(Attributor &
A, Instruction &
I,
bool &
Changed);
8579 AccessKind getAccessKindFromInst(
const Instruction *
I) {
8582 AK =
I->mayReadFromMemory() ? READ :
NONE;
8591 void updateStateAndAccessesMap(AAMemoryLocation::StateType &State,
8592 MemoryLocationsKind MLK,
const Instruction *
I,
8601 if (MLK == NO_UNKOWN_MEM)
8603 State.removeAssumedBits(MLK);
8608 void categorizePtrValue(Attributor &
A,
const Instruction &
I,
const Value &
Ptr,
8609 AAMemoryLocation::StateType &State,
bool &
Changed,
8610 unsigned AccessAS = 0);
8616void AAMemoryLocationImpl::categorizePtrValue(
8617 Attributor &
A,
const Instruction &
I,
const Value &
Ptr,
8619 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8623 auto Pred = [&](
Value &Obj) {
8626 MemoryLocationsKind MLK = NO_LOCATIONS;
8645 MLK = NO_ARGUMENT_MEM;
8651 if (GVar->isConstant())
8654 if (GV->hasLocalLinkage())
8655 MLK = NO_GLOBAL_INTERNAL_MEM;
8657 MLK = NO_GLOBAL_EXTERNAL_MEM;
8665 bool IsKnownNoAlias;
8669 MLK = NO_MALLOCED_MEM;
8671 MLK = NO_UNKOWN_MEM;
8673 MLK = NO_UNKOWN_MEM;
8676 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8677 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8678 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8680 getAccessKindFromInst(&
I));
8685 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
8689 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8690 updateStateAndAccessesMap(
State, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8691 getAccessKindFromInst(&
I));
8696 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8700void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8703 for (
unsigned ArgNo = 0,
E = CB.
arg_size(); ArgNo <
E; ++ArgNo) {
8712 const auto *ArgOpMemLocationAA =
8715 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8720 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs,
Changed);
8725AAMemoryLocationImpl::categorizeAccessedLocations(Attributor &
A, Instruction &
I,
8727 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8731 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8736 const auto *CBMemLocationAA =
A.getAAFor<AAMemoryLocation>(
8739 <<
" [" << CBMemLocationAA <<
"]\n");
8740 if (!CBMemLocationAA) {
8741 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8742 Changed, getAccessKindFromInst(&
I));
8743 return NO_UNKOWN_MEM;
8746 if (CBMemLocationAA->isAssumedReadNone())
8747 return NO_LOCATIONS;
8749 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8750 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8751 Changed, getAccessKindFromInst(&
I));
8752 return AccessedLocs.getAssumed();
8755 uint32_t CBAssumedNotAccessedLocs =
8756 CBMemLocationAA->getAssumedNotAccessedLocation();
8759 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8760 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8762 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8763 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8765 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr,
Changed,
8766 getAccessKindFromInst(&
I));
8771 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8772 if (HasGlobalAccesses) {
8775 updateStateAndAccessesMap(AccessedLocs, MLK, &
I,
Ptr,
Changed,
8776 getAccessKindFromInst(&
I));
8779 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8780 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8781 return AccessedLocs.getWorstState();
8785 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8786 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8789 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8791 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs,
Changed);
8794 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8795 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8797 return AccessedLocs.getAssumed();
8802 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8803 <<
I <<
" [" << *
Ptr <<
"]\n");
8805 Ptr->getType()->getPointerAddressSpace());
8806 return AccessedLocs.getAssumed();
8809 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8811 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8812 getAccessKindFromInst(&
I));
8813 return AccessedLocs.getAssumed();
8817struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8818 AAMemoryLocationFunction(
const IRPosition &IRP, Attributor &
A)
8819 : AAMemoryLocationImpl(IRP,
A) {}
8824 const auto *MemBehaviorAA =
8825 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
8828 return indicateOptimisticFixpoint();
8830 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8831 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8832 return ChangeStatus::UNCHANGED;
8836 auto AssumedState = getAssumed();
8840 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I,
Changed);
8841 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8842 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8843 removeAssumedBits(inverseLocation(MLK,
false,
false));
8846 return getAssumedNotAccessedLocation() != VALID_STATE;
8849 bool UsedAssumedInformation =
false;
8850 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8851 UsedAssumedInformation))
8852 return indicatePessimisticFixpoint();
8854 Changed |= AssumedState != getAssumed();
8855 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8859 void trackStatistics()
const override {
8862 else if (isAssumedArgMemOnly())
8864 else if (isAssumedInaccessibleMemOnly())
8866 else if (isAssumedInaccessibleOrArgMemOnly())
8872struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8873 AAMemoryLocationCallSite(
const IRPosition &IRP, Attributor &
A)
8874 : AAMemoryLocationImpl(IRP,
A) {}
8885 A.getAAFor<AAMemoryLocation>(*
this, FnPos, DepClassTy::REQUIRED);
8887 return indicatePessimisticFixpoint();
8891 updateStateAndAccessesMap(getState(), MLK,
I,
Ptr,
Changed,
8892 getAccessKindFromInst(
I));
8895 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8896 return indicatePessimisticFixpoint();
8897 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8901 void trackStatistics()
const override {
8911struct AADenormalFPMathImpl :
public AADenormalFPMath {
8912 AADenormalFPMathImpl(
const IRPosition &IRP, Attributor &
A)
8913 : AADenormalFPMath(IRP,
A) {}
8915 const std::string getAsStr(Attributor *
A)
const override {
8916 std::string Str(
"AADenormalFPMath[");
8917 raw_string_ostream OS(Str);
8919 DenormalState Known = getKnown();
8920 if (Known.Mode.isValid())
8921 OS <<
"denormal-fp-math=" << Known.Mode;
8925 if (Known.ModeF32.isValid())
8926 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
8932struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
8933 AADenormalFPMathFunction(
const IRPosition &IRP, Attributor &
A)
8934 : AADenormalFPMathImpl(IRP,
A) {}
8938 DenormalMode
Mode =
F->getDenormalModeRaw();
8939 DenormalMode ModeF32 =
F->getDenormalModeF32Raw();
8946 Known = DenormalState{
Mode, ModeF32};
8954 auto CheckCallSite = [=, &Change, &
A](AbstractCallSite CS) {
8957 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
8959 const auto *CallerInfo =
A.getAAFor<AADenormalFPMath>(
8965 CallerInfo->getState());
8969 bool AllCallSitesKnown =
true;
8970 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
8971 return indicatePessimisticFixpoint();
8973 if (Change == ChangeStatus::CHANGED && isModeFixed())
8979 LLVMContext &Ctx = getAssociatedFunction()->getContext();
8984 AttrToRemove.
push_back(
"denormal-fp-math");
8987 Attribute::get(Ctx,
"denormal-fp-math", Known.Mode.str()));
8990 if (Known.ModeF32 != Known.Mode) {
8992 Attribute::get(Ctx,
"denormal-fp-math-f32", Known.ModeF32.str()));
8994 AttrToRemove.
push_back(
"denormal-fp-math-f32");
8997 auto &IRP = getIRPosition();
9000 return A.removeAttrs(IRP, AttrToRemove) |
9001 A.manifestAttrs(IRP, AttrToAdd,
true);
9004 void trackStatistics()
const override {
9013struct AAValueConstantRangeImpl : AAValueConstantRange {
9014 using StateType = IntegerRangeState;
9015 AAValueConstantRangeImpl(
const IRPosition &IRP, Attributor &
A)
9016 : AAValueConstantRange(IRP,
A) {}
9020 if (
A.hasSimplificationCallback(getIRPosition())) {
9021 indicatePessimisticFixpoint();
9026 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
9029 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
9033 const std::string getAsStr(Attributor *
A)
const override {
9035 llvm::raw_string_ostream OS(Str);
9037 getKnown().print(OS);
9039 getAssumed().print(OS);
9046 const SCEV *getSCEV(Attributor &
A,
const Instruction *
I =
nullptr)
const {
9047 if (!getAnchorScope())
9050 ScalarEvolution *SE =
9051 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9054 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
9060 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9069 ConstantRange getConstantRangeFromSCEV(Attributor &
A,
9070 const Instruction *
I =
nullptr)
const {
9071 if (!getAnchorScope())
9074 ScalarEvolution *SE =
9075 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9078 const SCEV *S = getSCEV(
A,
I);
9088 getConstantRangeFromLVI(Attributor &
A,
9089 const Instruction *CtxI =
nullptr)
const {
9090 if (!getAnchorScope())
9093 LazyValueInfo *LVI =
9094 A.getInfoCache().getAnalysisResultForFunction<LazyValueAnalysis>(
9109 bool isValidCtxInstructionForOutsideAnalysis(Attributor &
A,
9110 const Instruction *CtxI,
9111 bool AllowAACtxI)
const {
9112 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9124 InformationCache &InfoCache =
A.getInfoCache();
9125 const DominatorTree *DT =
9136 getKnownConstantRange(Attributor &
A,
9137 const Instruction *CtxI =
nullptr)
const override {
9138 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9142 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9143 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9144 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9149 getAssumedConstantRange(Attributor &
A,
9150 const Instruction *CtxI =
nullptr)
const override {
9155 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9157 return getAssumed();
9159 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9160 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9161 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9166 getMDNodeForConstantRange(
Type *Ty, LLVMContext &Ctx,
9167 const ConstantRange &AssumedConstantRange) {
9169 Ty, AssumedConstantRange.
getLower())),
9171 Ty, AssumedConstantRange.
getUpper()))};
9176 static bool isBetterRange(
const ConstantRange &Assumed,
9177 const Instruction &
I) {
9181 std::optional<ConstantRange> Known;
9185 }
else if (MDNode *KnownRanges =
I.getMetadata(LLVMContext::MD_range)) {
9191 if (KnownRanges->getNumOperands() > 2)
9194 ConstantInt *
Lower =
9196 ConstantInt *
Upper =
9199 Known.emplace(
Lower->getValue(),
Upper->getValue());
9201 return !Known || (*Known != Assumed && Known->contains(Assumed));
9206 setRangeMetadataIfisBetterRange(Instruction *
I,
9207 const ConstantRange &AssumedConstantRange) {
9208 if (isBetterRange(AssumedConstantRange, *
I)) {
9209 I->setMetadata(LLVMContext::MD_range,
9210 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9211 AssumedConstantRange));
9218 setRangeRetAttrIfisBetterRange(Attributor &
A,
const IRPosition &IRP,
9220 const ConstantRange &AssumedConstantRange) {
9221 if (isBetterRange(AssumedConstantRange, *
I)) {
9222 A.manifestAttrs(IRP,
9223 Attribute::get(
I->getContext(), Attribute::Range,
9224 AssumedConstantRange),
9234 ConstantRange AssumedConstantRange = getAssumedConstantRange(
A);
9237 auto &
V = getAssociatedValue();
9241 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9242 "not the context instruction");
9244 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9245 Changed = ChangeStatus::CHANGED;
9247 if (setRangeRetAttrIfisBetterRange(
A, getIRPosition(),
I,
9248 AssumedConstantRange))
9249 Changed = ChangeStatus::CHANGED;
9257struct AAValueConstantRangeArgument final
9258 : AAArgumentFromCallSiteArguments<
9259 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9261 using Base = AAArgumentFromCallSiteArguments<
9262 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9264 AAValueConstantRangeArgument(
const IRPosition &IRP, Attributor &
A)
9268 void trackStatistics()
const override {
9273struct AAValueConstantRangeReturned
9274 : AAReturnedFromReturnedValues<AAValueConstantRange,
9275 AAValueConstantRangeImpl,
9276 AAValueConstantRangeImpl::StateType,
9279 AAReturnedFromReturnedValues<AAValueConstantRange,
9280 AAValueConstantRangeImpl,
9281 AAValueConstantRangeImpl::StateType,
9283 AAValueConstantRangeReturned(
const IRPosition &IRP, Attributor &
A)
9288 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9289 indicatePessimisticFixpoint();
9293 void trackStatistics()
const override {
9298struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9299 AAValueConstantRangeFloating(
const IRPosition &IRP, Attributor &
A)
9300 : AAValueConstantRangeImpl(IRP,
A) {}
9304 AAValueConstantRangeImpl::initialize(
A);
9308 Value &
V = getAssociatedValue();
9311 unionAssumed(ConstantRange(
C->getValue()));
9312 indicateOptimisticFixpoint();
9318 unionAssumed(ConstantRange(APInt(
getBitWidth(), 0)));
9319 indicateOptimisticFixpoint();
9331 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9342 indicatePessimisticFixpoint();
9345 << getAssociatedValue() <<
"\n");
9348 bool calculateBinaryOperator(
9349 Attributor &
A, BinaryOperator *BinOp, IntegerRangeState &
T,
9350 const Instruction *CtxI,
9351 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9356 bool UsedAssumedInformation =
false;
9357 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9360 if (!SimplifiedLHS.has_value())
9362 if (!*SimplifiedLHS)
9364 LHS = *SimplifiedLHS;
9366 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9369 if (!SimplifiedRHS.has_value())
9371 if (!*SimplifiedRHS)
9373 RHS = *SimplifiedRHS;
9379 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9381 DepClassTy::REQUIRED);
9385 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9387 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9389 DepClassTy::REQUIRED);
9393 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9395 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9397 T.unionAssumed(AssumedRange);
9401 return T.isValidState();
9404 bool calculateCastInst(
9405 Attributor &
A, CastInst *CastI, IntegerRangeState &
T,
9406 const Instruction *CtxI,
9407 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9413 bool UsedAssumedInformation =
false;
9414 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9417 if (!SimplifiedOpV.has_value())
9419 if (!*SimplifiedOpV)
9421 OpV = *SimplifiedOpV;
9426 auto *OpAA =
A.getAAFor<AAValueConstantRange>(
9428 DepClassTy::REQUIRED);
9432 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9434 return T.isValidState();
9438 calculateCmpInst(Attributor &
A, CmpInst *CmpI, IntegerRangeState &
T,
9439 const Instruction *CtxI,
9440 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9445 bool UsedAssumedInformation =
false;
9446 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9449 if (!SimplifiedLHS.has_value())
9451 if (!*SimplifiedLHS)
9453 LHS = *SimplifiedLHS;
9455 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9458 if (!SimplifiedRHS.has_value())
9460 if (!*SimplifiedRHS)
9462 RHS = *SimplifiedRHS;
9468 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9470 DepClassTy::REQUIRED);
9474 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9476 DepClassTy::REQUIRED);
9480 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9481 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9484 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9487 bool MustTrue =
false, MustFalse =
false;
9489 auto AllowedRegion =
9492 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9498 assert((!MustTrue || !MustFalse) &&
9499 "Either MustTrue or MustFalse should be false!");
9502 T.unionAssumed(ConstantRange(APInt( 1, 1)));
9504 T.unionAssumed(ConstantRange(APInt( 1, 0)));
9506 T.unionAssumed(ConstantRange( 1,
true));
9508 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9509 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9510 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9514 return T.isValidState();
9526 bool UsedAssumedInformation =
false;
9527 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9530 if (!SimplifiedOpV.has_value())
9532 if (!*SimplifiedOpV)
9534 Value *VPtr = *SimplifiedOpV;
9537 const auto *AA =
A.getAAFor<AAValueConstantRange>(
9539 DepClassTy::REQUIRED);
9543 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9547 return T.isValidState();
9552 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9555 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9558 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9564 T.indicatePessimisticFixpoint();
9571 for (
const AAValueConstantRange *QueriedAA : QuerriedAAs) {
9572 if (QueriedAA !=
this)
9575 if (
T.getAssumed() == getState().getAssumed())
9577 T.indicatePessimisticFixpoint();
9580 return T.isValidState();
9583 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9584 return indicatePessimisticFixpoint();
9589 return ChangeStatus::UNCHANGED;
9590 if (++NumChanges > MaxNumChanges) {
9591 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9592 <<
" but only " << MaxNumChanges
9593 <<
" are allowed to avoid cyclic reasoning.");
9594 return indicatePessimisticFixpoint();
9596 return ChangeStatus::CHANGED;
9600 void trackStatistics()
const override {
9609 static constexpr int MaxNumChanges = 5;
9612struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9613 AAValueConstantRangeFunction(
const IRPosition &IRP, Attributor &
A)
9614 : AAValueConstantRangeImpl(IRP,
A) {}
9618 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9626struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9627 AAValueConstantRangeCallSite(
const IRPosition &IRP, Attributor &
A)
9628 : AAValueConstantRangeFunction(IRP,
A) {}
9634struct AAValueConstantRangeCallSiteReturned
9635 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9636 AAValueConstantRangeImpl::StateType,
9638 AAValueConstantRangeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
9639 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9640 AAValueConstantRangeImpl::StateType,
9647 if (std::optional<ConstantRange>
Range = CI->getRange())
9648 intersectKnown(*
Range);
9651 AAValueConstantRangeImpl::initialize(
A);
9655 void trackStatistics()
const override {
9659struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9660 AAValueConstantRangeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
9661 : AAValueConstantRangeFloating(IRP,
A) {}
9665 return ChangeStatus::UNCHANGED;
9669 void trackStatistics()
const override {
9678struct AAPotentialConstantValuesImpl : AAPotentialConstantValues {
9681 AAPotentialConstantValuesImpl(
const IRPosition &IRP, Attributor &
A)
9682 : AAPotentialConstantValues(IRP,
A) {}
9686 if (
A.hasSimplificationCallback(getIRPosition()))
9687 indicatePessimisticFixpoint();
9689 AAPotentialConstantValues::initialize(
A);
9692 bool fillSetWithConstantValues(Attributor &
A,
const IRPosition &IRP, SetTy &S,
9693 bool &ContainsUndef,
bool ForSelf) {
9695 bool UsedAssumedInformation =
false;
9697 UsedAssumedInformation)) {
9704 auto *PotentialValuesAA =
A.getAAFor<AAPotentialConstantValues>(
9705 *
this, IRP, DepClassTy::REQUIRED);
9706 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9708 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9709 S = PotentialValuesAA->getState().getAssumedSet();
9716 ContainsUndef =
false;
9717 for (
auto &It : Values) {
9719 ContainsUndef =
true;
9725 S.insert(CI->getValue());
9727 ContainsUndef &= S.empty();
9733 const std::string getAsStr(Attributor *
A)
const override {
9735 llvm::raw_string_ostream OS(Str);
9742 return indicatePessimisticFixpoint();
9746struct AAPotentialConstantValuesArgument final
9747 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9748 AAPotentialConstantValuesImpl,
9749 PotentialConstantIntValuesState> {
9750 using Base = AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9751 AAPotentialConstantValuesImpl,
9753 AAPotentialConstantValuesArgument(
const IRPosition &IRP, Attributor &
A)
9757 void trackStatistics()
const override {
9762struct AAPotentialConstantValuesReturned
9763 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9764 AAPotentialConstantValuesImpl> {
9765 using Base = AAReturnedFromReturnedValues<AAPotentialConstantValues,
9766 AAPotentialConstantValuesImpl>;
9767 AAPotentialConstantValuesReturned(
const IRPosition &IRP, Attributor &
A)
9771 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9772 indicatePessimisticFixpoint();
9773 Base::initialize(
A);
9777 void trackStatistics()
const override {
9782struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9783 AAPotentialConstantValuesFloating(
const IRPosition &IRP, Attributor &
A)
9784 : AAPotentialConstantValuesImpl(IRP,
A) {}
9788 AAPotentialConstantValuesImpl::initialize(
A);
9792 Value &
V = getAssociatedValue();
9795 unionAssumed(
C->getValue());
9796 indicateOptimisticFixpoint();
9801 unionAssumedWithUndef();
9802 indicateOptimisticFixpoint();
9812 indicatePessimisticFixpoint();
9815 << getAssociatedValue() <<
"\n");
9818 static bool calculateICmpInst(
const ICmpInst *ICI,
const APInt &
LHS,
9823 static APInt calculateCastInst(
const CastInst *CI,
const APInt &Src,
9824 uint32_t ResultBitWidth) {
9829 case Instruction::Trunc:
9830 return Src.trunc(ResultBitWidth);
9831 case Instruction::SExt:
9832 return Src.sext(ResultBitWidth);
9833 case Instruction::ZExt:
9834 return Src.zext(ResultBitWidth);
9835 case Instruction::BitCast:
9840 static APInt calculateBinaryOperator(
const BinaryOperator *BinOp,
9841 const APInt &
LHS,
const APInt &
RHS,
9842 bool &SkipOperation,
bool &Unsupported) {
9849 switch (BinOpcode) {
9853 case Instruction::Add:
9855 case Instruction::Sub:
9857 case Instruction::Mul:
9859 case Instruction::UDiv:
9861 SkipOperation =
true;
9865 case Instruction::SDiv:
9867 SkipOperation =
true;
9871 case Instruction::URem:
9873 SkipOperation =
true;
9877 case Instruction::SRem:
9879 SkipOperation =
true;
9883 case Instruction::Shl:
9885 case Instruction::LShr:
9887 case Instruction::AShr:
9889 case Instruction::And:
9891 case Instruction::Or:
9893 case Instruction::Xor:
9898 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9899 const APInt &
LHS,
const APInt &
RHS) {
9900 bool SkipOperation =
false;
9903 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9908 unionAssumed(Result);
9909 return isValidState();
9912 ChangeStatus updateWithICmpInst(Attributor &
A, ICmpInst *ICI) {
9913 auto AssumedBefore = getAssumed();
9917 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9918 SetTy LHSAAPVS, RHSAAPVS;
9920 LHSContainsUndef,
false) ||
9922 RHSContainsUndef,
false))
9923 return indicatePessimisticFixpoint();
9926 bool MaybeTrue =
false, MaybeFalse =
false;
9928 if (LHSContainsUndef && RHSContainsUndef) {
9931 unionAssumedWithUndef();
9932 }
else if (LHSContainsUndef) {
9933 for (
const APInt &R : RHSAAPVS) {
9934 bool CmpResult = calculateICmpInst(ICI, Zero, R);
9935 MaybeTrue |= CmpResult;
9936 MaybeFalse |= !CmpResult;
9937 if (MaybeTrue & MaybeFalse)
9938 return indicatePessimisticFixpoint();
9940 }
else if (RHSContainsUndef) {
9941 for (
const APInt &L : LHSAAPVS) {
9942 bool CmpResult = calculateICmpInst(ICI, L, Zero);
9943 MaybeTrue |= CmpResult;
9944 MaybeFalse |= !CmpResult;
9945 if (MaybeTrue & MaybeFalse)
9946 return indicatePessimisticFixpoint();
9949 for (
const APInt &L : LHSAAPVS) {
9950 for (
const APInt &R : RHSAAPVS) {
9951 bool CmpResult = calculateICmpInst(ICI, L, R);
9952 MaybeTrue |= CmpResult;
9953 MaybeFalse |= !CmpResult;
9954 if (MaybeTrue & MaybeFalse)
9955 return indicatePessimisticFixpoint();
9960 unionAssumed(APInt( 1, 1));
9962 unionAssumed(APInt( 1, 0));
9963 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9964 : ChangeStatus::CHANGED;
9967 ChangeStatus updateWithSelectInst(Attributor &
A, SelectInst *SI) {
9968 auto AssumedBefore = getAssumed();
9972 bool UsedAssumedInformation =
false;
9973 std::optional<Constant *>
C =
A.getAssumedConstant(
9974 *
SI->getCondition(), *
this, UsedAssumedInformation);
9977 bool OnlyLeft =
false, OnlyRight =
false;
9978 if (
C && *
C && (*C)->isOneValue())
9980 else if (
C && *
C && (*C)->isZeroValue())
9983 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9984 SetTy LHSAAPVS, RHSAAPVS;
9987 LHSContainsUndef,
false))
9988 return indicatePessimisticFixpoint();
9992 RHSContainsUndef,
false))
9993 return indicatePessimisticFixpoint();
9995 if (OnlyLeft || OnlyRight) {
9997 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
9998 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
10001 unionAssumedWithUndef();
10003 for (
const auto &It : *OpAA)
10007 }
else if (LHSContainsUndef && RHSContainsUndef) {
10009 unionAssumedWithUndef();
10011 for (
const auto &It : LHSAAPVS)
10013 for (
const auto &It : RHSAAPVS)
10016 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10017 : ChangeStatus::CHANGED;
10020 ChangeStatus updateWithCastInst(Attributor &
A, CastInst *CI) {
10021 auto AssumedBefore = getAssumed();
10023 return indicatePessimisticFixpoint();
10028 bool SrcContainsUndef =
false;
10031 SrcContainsUndef,
false))
10032 return indicatePessimisticFixpoint();
10034 if (SrcContainsUndef)
10035 unionAssumedWithUndef();
10037 for (
const APInt &S : SrcPVS) {
10038 APInt
T = calculateCastInst(CI, S, ResultBitWidth);
10042 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10043 : ChangeStatus::CHANGED;
10046 ChangeStatus updateWithBinaryOperator(Attributor &
A, BinaryOperator *BinOp) {
10047 auto AssumedBefore = getAssumed();
10051 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10052 SetTy LHSAAPVS, RHSAAPVS;
10054 LHSContainsUndef,
false) ||
10056 RHSContainsUndef,
false))
10057 return indicatePessimisticFixpoint();
10062 if (LHSContainsUndef && RHSContainsUndef) {
10063 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
10064 return indicatePessimisticFixpoint();
10065 }
else if (LHSContainsUndef) {
10066 for (
const APInt &R : RHSAAPVS) {
10067 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
10068 return indicatePessimisticFixpoint();
10070 }
else if (RHSContainsUndef) {
10071 for (
const APInt &L : LHSAAPVS) {
10072 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
10073 return indicatePessimisticFixpoint();
10076 for (
const APInt &L : LHSAAPVS) {
10077 for (
const APInt &R : RHSAAPVS) {
10078 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
10079 return indicatePessimisticFixpoint();
10083 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10084 : ChangeStatus::CHANGED;
10087 ChangeStatus updateWithInstruction(Attributor &
A, Instruction *Inst) {
10088 auto AssumedBefore = getAssumed();
10090 bool ContainsUndef;
10092 ContainsUndef,
true))
10093 return indicatePessimisticFixpoint();
10094 if (ContainsUndef) {
10095 unionAssumedWithUndef();
10097 for (
const auto &It : Incoming)
10100 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10101 : ChangeStatus::CHANGED;
10106 Value &
V = getAssociatedValue();
10110 return updateWithICmpInst(
A, ICI);
10113 return updateWithSelectInst(
A, SI);
10116 return updateWithCastInst(
A, CI);
10119 return updateWithBinaryOperator(
A, BinOp);
10122 return updateWithInstruction(
A,
I);
10124 return indicatePessimisticFixpoint();
10128 void trackStatistics()
const override {
10133struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10134 AAPotentialConstantValuesFunction(
const IRPosition &IRP, Attributor &
A)
10135 : AAPotentialConstantValuesImpl(IRP,
A) {}
10140 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10145 void trackStatistics()
const override {
10150struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10151 AAPotentialConstantValuesCallSite(
const IRPosition &IRP, Attributor &
A)
10152 : AAPotentialConstantValuesFunction(IRP,
A) {}
10155 void trackStatistics()
const override {
10160struct AAPotentialConstantValuesCallSiteReturned
10161 : AACalleeToCallSite<AAPotentialConstantValues,
10162 AAPotentialConstantValuesImpl> {
10163 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10165 : AACalleeToCallSite<AAPotentialConstantValues,
10166 AAPotentialConstantValuesImpl>(IRP,
A) {}
10169 void trackStatistics()
const override {
10174struct AAPotentialConstantValuesCallSiteArgument
10175 : AAPotentialConstantValuesFloating {
10176 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10178 : AAPotentialConstantValuesFloating(IRP,
A) {}
10182 AAPotentialConstantValuesImpl::initialize(
A);
10183 if (isAtFixpoint())
10186 Value &
V = getAssociatedValue();
10189 unionAssumed(
C->getValue());
10190 indicateOptimisticFixpoint();
10195 unionAssumedWithUndef();
10196 indicateOptimisticFixpoint();
10203 Value &
V = getAssociatedValue();
10204 auto AssumedBefore = getAssumed();
10205 auto *AA =
A.getAAFor<AAPotentialConstantValues>(
10208 return indicatePessimisticFixpoint();
10209 const auto &S = AA->getAssumed();
10211 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10212 : ChangeStatus::CHANGED;
10216 void trackStatistics()
const override {
10225 bool IgnoreSubsumingPositions) {
10226 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10227 "Unexpected attribute kind");
10228 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10229 Attribute::NoUndef))
10249 Value &V = getAssociatedValue();
10251 indicatePessimisticFixpoint();
10252 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10256 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10257 AANoUndef::StateType &State) {
10258 const Value *UseV =
U->get();
10259 const DominatorTree *DT =
nullptr;
10260 AssumptionCache *AC =
nullptr;
10261 InformationCache &InfoCache =
A.getInfoCache();
10262 if (Function *
F = getAnchorScope()) {
10267 bool TrackUse =
false;
10276 const std::string getAsStr(Attributor *
A)
const override {
10277 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10284 bool UsedAssumedInformation =
false;
10285 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10286 UsedAssumedInformation))
10287 return ChangeStatus::UNCHANGED;
10291 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10294 return ChangeStatus::UNCHANGED;
10295 return AANoUndef::manifest(
A);
10299struct AANoUndefFloating :
public AANoUndefImpl {
10300 AANoUndefFloating(
const IRPosition &IRP, Attributor &
A)
10301 : AANoUndefImpl(IRP,
A) {}
10305 AANoUndefImpl::initialize(
A);
10306 if (!getState().isAtFixpoint() && getAnchorScope() &&
10307 !getAnchorScope()->isDeclaration())
10308 if (Instruction *CtxI = getCtxI())
10309 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10314 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10315 bool IsKnownNoUndef;
10317 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10321 bool UsedAssumedInformation =
false;
10322 Value *AssociatedValue = &getAssociatedValue();
10324 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10329 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10337 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10338 return indicatePessimisticFixpoint();
10339 return ChangeStatus::UNCHANGED;
10342 for (
const auto &VAC : Values)
10344 return indicatePessimisticFixpoint();
10346 return ChangeStatus::UNCHANGED;
10353struct AANoUndefReturned final
10354 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10355 AANoUndefReturned(
const IRPosition &IRP, Attributor &
A)
10356 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10362struct AANoUndefArgument final
10363 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10364 AANoUndefArgument(
const IRPosition &IRP, Attributor &
A)
10365 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10371struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10372 AANoUndefCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10373 : AANoUndefFloating(IRP,
A) {}
10379struct AANoUndefCallSiteReturned final
10380 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10381 AANoUndefCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10382 : AACalleeToCallSite<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10390struct AANoFPClassImpl : AANoFPClass {
10391 AANoFPClassImpl(
const IRPosition &IRP, Attributor &
A) : AANoFPClass(IRP,
A) {}
10394 const IRPosition &IRP = getIRPosition();
10398 indicateOptimisticFixpoint();
10403 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10404 for (
const auto &Attr : Attrs) {
10408 const DataLayout &
DL =
A.getDataLayout();
10414 if (Instruction *CtxI = getCtxI())
10415 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10419 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10420 AANoFPClass::StateType &State) {
10431 if (
auto *NoFPAA =
A.getAAFor<AANoFPClass>(*
this, IRP, DepClassTy::NONE))
10432 State.addKnownBits(NoFPAA->getState().getKnown());
10436 const std::string getAsStr(Attributor *
A)
const override {
10437 std::string
Result =
"nofpclass";
10438 raw_string_ostream OS(Result);
10439 OS << getKnownNoFPClass() <<
'/' << getAssumedNoFPClass();
10443 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
10444 SmallVectorImpl<Attribute> &Attrs)
const override {
10445 Attrs.emplace_back(Attribute::getWithNoFPClass(Ctx, getAssumedNoFPClass()));
10449struct AANoFPClassFloating :
public AANoFPClassImpl {
10450 AANoFPClassFloating(
const IRPosition &IRP, Attributor &
A)
10451 : AANoFPClassImpl(IRP,
A) {}
10456 bool UsedAssumedInformation =
false;
10457 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10459 Values.
push_back({getAssociatedValue(), getCtxI()});
10465 DepClassTy::REQUIRED);
10466 if (!AA ||
this == AA) {
10467 T.indicatePessimisticFixpoint();
10469 const AANoFPClass::StateType &S =
10470 static_cast<const AANoFPClass::StateType &
>(AA->
getState());
10473 return T.isValidState();
10476 for (
const auto &VAC : Values)
10477 if (!VisitValueCB(*
VAC.getValue(),
VAC.getCtxI()))
10478 return indicatePessimisticFixpoint();
10484 void trackStatistics()
const override {
10489struct AANoFPClassReturned final
10490 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10491 AANoFPClassImpl::StateType, false,
10492 Attribute::None, false> {
10493 AANoFPClassReturned(
const IRPosition &IRP, Attributor &
A)
10494 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10495 AANoFPClassImpl::StateType,
false,
10499 void trackStatistics()
const override {
10504struct AANoFPClassArgument final
10505 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10506 AANoFPClassArgument(
const IRPosition &IRP, Attributor &
A)
10507 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10513struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10514 AANoFPClassCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10515 : AANoFPClassFloating(IRP,
A) {}
10518 void trackStatistics()
const override {
10523struct AANoFPClassCallSiteReturned final
10524 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10525 AANoFPClassCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10526 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10529 void trackStatistics()
const override {
10534struct AACallEdgesImpl :
public AACallEdges {
10535 AACallEdgesImpl(
const IRPosition &IRP, Attributor &
A) : AACallEdges(IRP,
A) {}
10537 const SetVector<Function *> &getOptimisticEdges()
const override {
10538 return CalledFunctions;
10541 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10543 bool hasNonAsmUnknownCallee()
const override {
10544 return HasUnknownCalleeNonAsm;
10547 const std::string getAsStr(Attributor *
A)
const override {
10548 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10549 std::to_string(CalledFunctions.size()) +
"]";
10552 void trackStatistics()
const override {}
10555 void addCalledFunction(Function *Fn,
ChangeStatus &Change) {
10556 if (CalledFunctions.insert(Fn)) {
10557 Change = ChangeStatus::CHANGED;
10563 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10564 if (!HasUnknownCallee)
10565 Change = ChangeStatus::CHANGED;
10566 if (NonAsm && !HasUnknownCalleeNonAsm)
10567 Change = ChangeStatus::CHANGED;
10568 HasUnknownCalleeNonAsm |= NonAsm;
10569 HasUnknownCallee =
true;
10574 SetVector<Function *> CalledFunctions;
10577 bool HasUnknownCallee =
false;
10580 bool HasUnknownCalleeNonAsm =
false;
10583struct AACallEdgesCallSite :
public AACallEdgesImpl {
10584 AACallEdgesCallSite(
const IRPosition &IRP, Attributor &
A)
10585 : AACallEdgesImpl(IRP,
A) {}
10592 addCalledFunction(Fn, Change);
10594 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10595 setHasUnknownCallee(
true, Change);
10606 VisitValue(*V, CtxI);
10610 bool UsedAssumedInformation =
false;
10616 for (
auto &VAC : Values)
10617 VisitValue(*
VAC.getValue(),
VAC.getCtxI());
10623 if (
IA->hasSideEffects() &&
10626 setHasUnknownCallee(
false, Change);
10632 if (
auto *IndirectCallAA =
A.getAAFor<AAIndirectCallInfo>(
10633 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10634 if (IndirectCallAA->foreachCallee(
10635 [&](Function *Fn) { return VisitValue(*Fn, CB); }))
10644 for (
const Use *U : CallbackUses)
10645 ProcessCalledOperand(
U->get(), CB);
10651struct AACallEdgesFunction :
public AACallEdgesImpl {
10652 AACallEdgesFunction(
const IRPosition &IRP, Attributor &
A)
10653 : AACallEdgesImpl(IRP,
A) {}
10662 auto *CBEdges =
A.getAAFor<AACallEdges>(
10666 if (CBEdges->hasNonAsmUnknownCallee())
10667 setHasUnknownCallee(
true, Change);
10668 if (CBEdges->hasUnknownCallee())
10669 setHasUnknownCallee(
false, Change);
10671 for (Function *
F : CBEdges->getOptimisticEdges())
10672 addCalledFunction(
F, Change);
10678 bool UsedAssumedInformation =
false;
10679 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10680 UsedAssumedInformation,
10684 setHasUnknownCallee(
true, Change);
10693struct AAInterFnReachabilityFunction
10694 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10695 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10696 AAInterFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
10699 bool instructionCanReach(
10700 Attributor &
A,
const Instruction &From,
const Function &To,
10703 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10705 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
10706 typename RQITy::Reachable
Result;
10707 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10708 return NonConstThis->isReachableImpl(
A, StackRQI,
10710 return Result == RQITy::Reachable::Yes;
10714 bool IsTemporaryRQI)
override {
10716 &RQI.From->getFunction()->getEntryBlock().front();
10717 if (EntryI != RQI.From &&
10718 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10719 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10722 auto CheckReachableCallBase = [&](CallBase *CB) {
10723 auto *CBEdges =
A.getAAFor<AACallEdges>(
10725 if (!CBEdges || !CBEdges->getState().isValidState())
10728 if (CBEdges->hasUnknownCallee())
10731 for (Function *Fn : CBEdges->getOptimisticEdges()) {
10742 if (Fn == getAnchorScope()) {
10743 if (EntryI == RQI.From)
10748 const AAInterFnReachability *InterFnReachability =
10750 DepClassTy::OPTIONAL);
10753 if (!InterFnReachability ||
10761 const auto *IntraFnReachability =
A.getAAFor<AAIntraFnReachability>(
10763 DepClassTy::OPTIONAL);
10771 return IntraFnReachability && !IntraFnReachability->isAssumedReachable(
10772 A, *RQI.From, CBInst, RQI.ExclusionSet);
10775 bool UsedExclusionSet =
true;
10776 bool UsedAssumedInformation =
false;
10777 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10778 UsedAssumedInformation,
10780 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10783 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10787 void trackStatistics()
const override {}
10791template <
typename AAType>
10792static std::optional<Constant *>
10795 if (!Ty.isIntegerTy())
10803 std::optional<Constant *> COpt =
AA->getAssumedConstant(
A);
10805 if (!COpt.has_value()) {
10807 return std::nullopt;
10809 if (
auto *
C = *COpt) {
10820 std::optional<Value *> V;
10821 for (
auto &It : Values) {
10823 if (V.has_value() && !*V)
10826 if (!V.has_value())
10840 if (
A.hasSimplificationCallback(getIRPosition())) {
10841 indicatePessimisticFixpoint();
10844 Value *Stripped = getAssociatedValue().stripPointerCasts();
10846 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10848 indicateOptimisticFixpoint();
10851 AAPotentialValues::initialize(
A);
10855 const std::string getAsStr(Attributor *
A)
const override {
10857 llvm::raw_string_ostream OS(Str);
10862 template <
typename AAType>
10863 static std::optional<Value *> askOtherAA(Attributor &
A,
10864 const AbstractAttribute &AA,
10865 const IRPosition &IRP,
Type &Ty) {
10870 return std::nullopt;
10877 virtual void addValue(Attributor &
A, StateType &State,
Value &V,
10879 Function *AnchorScope)
const {
10883 for (
const auto &U : CB->
args()) {
10893 Type &Ty = *getAssociatedType();
10894 std::optional<Value *> SimpleV =
10895 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10896 if (SimpleV.has_value() && !*SimpleV) {
10897 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
10898 *
this, ValIRP, DepClassTy::OPTIONAL);
10899 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10900 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
10901 State.unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
10902 if (PotentialConstantsAA->undefIsContained())
10907 if (!SimpleV.has_value())
10919 State.unionAssumed({{*VPtr, CtxI}, S});
10925 AA::ValueAndContext
I;
10929 return II.I ==
I &&
II.S == S;
10932 return std::tie(
I, S) < std::tie(
II.I,
II.S);
10936 bool recurseForValue(Attributor &
A,
const IRPosition &IRP,
AA::ValueScope S) {
10937 SmallMapVector<AA::ValueAndContext, int, 8> ValueScopeMap;
10942 bool UsedAssumedInformation =
false;
10944 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
10945 UsedAssumedInformation))
10948 for (
auto &It : Values)
10949 ValueScopeMap[It] += CS;
10951 for (
auto &It : ValueScopeMap)
10952 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
10958 void giveUpOnIntraprocedural(Attributor &
A) {
10959 auto NewS = StateType::getBestState(getState());
10960 for (
const auto &It : getAssumedSet()) {
10963 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
10966 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10974 getState() = StateType::getBestState(getState());
10975 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
10976 AAPotentialValues::indicateOptimisticFixpoint();
10977 return ChangeStatus::CHANGED;
10982 return indicatePessimisticFixpoint();
10990 if (!getAssumedSimplifiedValues(
A, Values, S))
10992 Value &OldV = getAssociatedValue();
10995 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
10996 if (!NewV || NewV == &OldV)
11001 if (
A.changeAfterManifest(getIRPosition(), *NewV))
11002 return ChangeStatus::CHANGED;
11004 return ChangeStatus::UNCHANGED;
11007 bool getAssumedSimplifiedValues(
11008 Attributor &
A, SmallVectorImpl<AA::ValueAndContext> &Values,
11009 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
11010 if (!isValidState())
11012 bool UsedAssumedInformation =
false;
11013 for (
const auto &It : getAssumedSet())
11014 if (It.second & S) {
11015 if (RecurseForSelectAndPHI && (
isa<PHINode>(It.first.getValue()) ||
11017 if (
A.getAssumedSimplifiedValues(
11019 this, Values, S, UsedAssumedInformation))
11024 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11029struct AAPotentialValuesFloating : AAPotentialValuesImpl {
11030 AAPotentialValuesFloating(
const IRPosition &IRP, Attributor &
A)
11031 : AAPotentialValuesImpl(IRP,
A) {}
11035 auto AssumedBefore = getAssumed();
11037 genericValueTraversal(
A, &getAssociatedValue());
11039 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11040 : ChangeStatus::CHANGED;
11044 struct LivenessInfo {
11045 const AAIsDead *LivenessAA =
nullptr;
11046 bool AnyDead =
false;
11056 SmallVectorImpl<ItemInfo> &Worklist) {
11059 bool UsedAssumedInformation =
false;
11061 auto GetSimplifiedValues = [&](
Value &
V,
11063 if (!
A.getAssumedSimplifiedValues(
11067 Values.
push_back(AA::ValueAndContext{
V,
II.I.getCtxI()});
11069 return Values.
empty();
11071 if (GetSimplifiedValues(*
LHS, LHSValues))
11073 if (GetSimplifiedValues(*
RHS, RHSValues))
11078 InformationCache &InfoCache =
A.getInfoCache();
11085 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11090 const DataLayout &
DL =
A.getDataLayout();
11091 SimplifyQuery Q(
DL, TLI, DT, AC, CmpI);
11093 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11096 nullptr,
II.S, getAnchorScope());
11102 if (&LHSV == &RHSV &&
11104 Constant *NewV = ConstantInt::get(Type::getInt1Ty(Ctx),
11106 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11113 if (TypedLHS && TypedRHS) {
11115 if (NewV && NewV != &Cmp) {
11116 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11128 if (!LHSIsNull && !RHSIsNull)
11134 assert((LHSIsNull || RHSIsNull) &&
11135 "Expected nullptr versus non-nullptr comparison at this point");
11138 unsigned PtrIdx = LHSIsNull;
11139 bool IsKnownNonNull;
11142 DepClassTy::REQUIRED, IsKnownNonNull);
11143 if (!IsAssumedNonNull)
11149 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11154 for (
auto &LHSValue : LHSValues)
11155 for (
auto &RHSValue : RHSValues)
11156 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11161 bool handleSelectInst(Attributor &
A, SelectInst &SI, ItemInfo
II,
11162 SmallVectorImpl<ItemInfo> &Worklist) {
11164 bool UsedAssumedInformation =
false;
11166 std::optional<Constant *>
C =
11167 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11168 bool NoValueYet = !
C.has_value();
11176 }
else if (&SI == &getAssociatedValue()) {
11181 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11183 if (!SimpleV.has_value())
11186 addValue(
A, getState(), **SimpleV, CtxI,
II.S, getAnchorScope());
11194 bool handleLoadInst(Attributor &
A, LoadInst &LI, ItemInfo
II,
11195 SmallVectorImpl<ItemInfo> &Worklist) {
11196 SmallSetVector<Value *, 4> PotentialCopies;
11197 SmallSetVector<Instruction *, 4> PotentialValueOrigins;
11198 bool UsedAssumedInformation =
false;
11200 PotentialValueOrigins, *
this,
11201 UsedAssumedInformation,
11203 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11204 "loaded values for load instruction "
11212 InformationCache &InfoCache =
A.getInfoCache();
11214 if (!
llvm::all_of(PotentialValueOrigins, [&](Instruction *
I) {
11218 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11220 UsedAssumedInformation,
11222 return A.isAssumedDead(*
I,
this,
nullptr,
11223 UsedAssumedInformation,
11226 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11227 "and we cannot delete all the stores: "
11238 bool AllLocal = ScopeIsLocal;
11243 if (!DynamicallyUnique) {
11244 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11245 "values are dynamically unique: "
11250 for (
auto *PotentialCopy : PotentialCopies) {
11252 Worklist.
push_back({{*PotentialCopy, CtxI},
II.S});
11257 if (!AllLocal && ScopeIsLocal)
11262 bool handlePHINode(
11263 Attributor &
A, PHINode &
PHI, ItemInfo
II,
11264 SmallVectorImpl<ItemInfo> &Worklist,
11265 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11266 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11267 LivenessInfo &LI = LivenessAAs[&
F];
11268 if (!LI.LivenessAA)
11274 if (&
PHI == &getAssociatedValue()) {
11275 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11277 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
11278 *
PHI.getFunction());
11282 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11284 if (LI.LivenessAA &&
11285 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11304 bool UsedAssumedInformation =
false;
11305 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11307 if (!SimpleV.has_value())
11311 addValue(
A, getState(), **SimpleV, &
PHI,
II.S, getAnchorScope());
11318 bool handleGenericInst(Attributor &
A, Instruction &
I, ItemInfo
II,
11319 SmallVectorImpl<ItemInfo> &Worklist) {
11320 bool SomeSimplified =
false;
11321 bool UsedAssumedInformation =
false;
11323 SmallVector<Value *, 8> NewOps(
I.getNumOperands());
11326 const auto &SimplifiedOp =
A.getAssumedSimplified(
11331 if (!SimplifiedOp.has_value())
11335 NewOps[Idx] = *SimplifiedOp;
11339 SomeSimplified |= (NewOps[Idx] !=
Op);
11345 if (!SomeSimplified)
11348 InformationCache &InfoCache =
A.getInfoCache();
11352 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11355 const DataLayout &
DL =
I.getDataLayout();
11356 SimplifyQuery Q(
DL, TLI, DT, AC, &
I);
11358 if (!NewV || NewV == &
I)
11361 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11368 Attributor &
A, Instruction &
I, ItemInfo
II,
11369 SmallVectorImpl<ItemInfo> &Worklist,
11370 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11373 CI->getPredicate(),
II, Worklist);
11375 switch (
I.getOpcode()) {
11376 case Instruction::Select:
11378 case Instruction::PHI:
11380 case Instruction::Load:
11383 return handleGenericInst(
A,
I,
II, Worklist);
11388 void genericValueTraversal(Attributor &
A,
Value *InitialV) {
11389 SmallMapVector<const Function *, LivenessInfo, 4> LivenessAAs;
11391 SmallSet<ItemInfo, 16> Visited;
11410 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11411 << Iteration <<
"!\n");
11412 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11418 Value *NewV =
nullptr;
11419 if (
V->getType()->isPointerTy()) {
11425 for (Argument &Arg :
Callee->args())
11432 if (NewV && NewV != V) {
11433 Worklist.
push_back({{*NewV, CtxI}, S});
11447 if (V == InitialV && CtxI == getCtxI()) {
11448 indicatePessimisticFixpoint();
11452 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11453 }
while (!Worklist.
empty());
11457 for (
auto &It : LivenessAAs)
11458 if (It.second.AnyDead)
11459 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11463 void trackStatistics()
const override {
11468struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11469 using Base = AAPotentialValuesImpl;
11470 AAPotentialValuesArgument(
const IRPosition &IRP, Attributor &
A)
11477 indicatePessimisticFixpoint();
11482 auto AssumedBefore = getAssumed();
11484 unsigned ArgNo = getCalleeArgNo();
11486 bool UsedAssumedInformation =
false;
11488 auto CallSitePred = [&](AbstractCallSite ACS) {
11490 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11493 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11495 UsedAssumedInformation))
11498 return isValidState();
11501 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11503 UsedAssumedInformation))
11504 return indicatePessimisticFixpoint();
11506 Function *Fn = getAssociatedFunction();
11507 bool AnyNonLocal =
false;
11508 for (
auto &It : Values) {
11510 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11515 return indicatePessimisticFixpoint();
11519 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11525 AnyNonLocal =
true;
11527 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11529 giveUpOnIntraprocedural(
A);
11531 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11532 : ChangeStatus::CHANGED;
11536 void trackStatistics()
const override {
11541struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11542 using Base = AAPotentialValuesFloating;
11543 AAPotentialValuesReturned(
const IRPosition &IRP, Attributor &
A)
11549 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11550 indicatePessimisticFixpoint();
11554 for (Argument &Arg :
F->args())
11557 ReturnedArg = &Arg;
11560 if (!
A.isFunctionIPOAmendable(*
F) ||
11561 A.hasSimplificationCallback(getIRPosition())) {
11563 indicatePessimisticFixpoint();
11565 indicateOptimisticFixpoint();
11571 auto AssumedBefore = getAssumed();
11572 bool UsedAssumedInformation =
false;
11575 Function *AnchorScope = getAnchorScope();
11581 UsedAssumedInformation,
11587 bool AllInterAreIntra =
false;
11590 llvm::all_of(Values, [&](
const AA::ValueAndContext &VAC) {
11594 for (
const AA::ValueAndContext &VAC : Values) {
11595 addValue(
A, getState(), *
VAC.getValue(),
11596 VAC.getCtxI() ?
VAC.getCtxI() : CtxI,
11599 if (AllInterAreIntra)
11606 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11609 bool AddValues =
true;
11612 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11616 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11619 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11620 UsedAssumedInformation,
11622 return indicatePessimisticFixpoint();
11625 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11626 : ChangeStatus::CHANGED;
11631 return ChangeStatus::UNCHANGED;
11633 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11635 return ChangeStatus::UNCHANGED;
11636 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11638 return ChangeStatus::UNCHANGED;
11643 "Number of function with unique return");
11646 {Attribute::get(Arg->
getContext(), Attribute::Returned)});
11651 Value *RetOp = RetI.getOperand(0);
11655 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11656 Changed = ChangeStatus::CHANGED;
11659 bool UsedAssumedInformation =
false;
11660 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11661 UsedAssumedInformation,
11667 return AAPotentialValues::indicatePessimisticFixpoint();
11671 void trackStatistics()
const override{
11678struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11679 AAPotentialValuesFunction(
const IRPosition &IRP, Attributor &
A)
11680 : AAPotentialValuesImpl(IRP,
A) {}
11689 void trackStatistics()
const override {
11694struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11695 AAPotentialValuesCallSite(
const IRPosition &IRP, Attributor &
A)
11696 : AAPotentialValuesFunction(IRP,
A) {}
11699 void trackStatistics()
const override {
11704struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11705 AAPotentialValuesCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
11706 : AAPotentialValuesImpl(IRP,
A) {}
11710 auto AssumedBefore = getAssumed();
11714 return indicatePessimisticFixpoint();
11716 bool UsedAssumedInformation =
false;
11720 UsedAssumedInformation))
11721 return indicatePessimisticFixpoint();
11728 Values, S, UsedAssumedInformation))
11731 for (
auto &It : Values) {
11732 Value *
V = It.getValue();
11733 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11734 V, *CB, *
this, UsedAssumedInformation);
11735 if (!CallerV.has_value()) {
11739 V = *CallerV ? *CallerV :
V;
11745 giveUpOnIntraprocedural(
A);
11748 addValue(
A, getState(), *V, CB, S, getAnchorScope());
11753 return indicatePessimisticFixpoint();
11755 return indicatePessimisticFixpoint();
11756 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11757 : ChangeStatus::CHANGED;
11761 return AAPotentialValues::indicatePessimisticFixpoint();
11765 void trackStatistics()
const override {
11770struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11771 AAPotentialValuesCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
11772 : AAPotentialValuesFloating(IRP,
A) {}
11775 void trackStatistics()
const override {
11783struct AAAssumptionInfoImpl :
public AAAssumptionInfo {
11784 AAAssumptionInfoImpl(
const IRPosition &IRP, Attributor &
A,
11785 const DenseSet<StringRef> &Known)
11786 : AAAssumptionInfo(IRP,
A, Known) {}
11791 if (getKnown().isUniversal())
11792 return ChangeStatus::UNCHANGED;
11794 const IRPosition &IRP = getIRPosition();
11796 getAssumed().getSet().
end());
11798 return A.manifestAttrs(IRP,
11805 bool hasAssumption(
const StringRef Assumption)
const override {
11806 return isValidState() && setContains(Assumption);
11810 const std::string getAsStr(Attributor *
A)
const override {
11811 const SetContents &Known = getKnown();
11812 const SetContents &Assumed = getAssumed();
11816 const std::string KnownStr =
llvm::join(Set,
",");
11818 std::string AssumedStr =
"Universal";
11819 if (!Assumed.isUniversal()) {
11820 Set.assign(Assumed.getSet().begin(), Assumed.getSet().end());
11823 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11838struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11839 AAAssumptionInfoFunction(
const IRPosition &IRP, Attributor &
A)
11840 : AAAssumptionInfoImpl(IRP,
A,
11847 auto CallSitePred = [&](AbstractCallSite ACS) {
11848 const auto *AssumptionAA =
A.getAAFor<AAAssumptionInfo>(
11850 DepClassTy::REQUIRED);
11854 Changed |= getIntersection(AssumptionAA->getAssumed());
11855 return !getAssumed().empty() || !getKnown().empty();
11858 bool UsedAssumedInformation =
false;
11863 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11864 UsedAssumedInformation))
11865 return indicatePessimisticFixpoint();
11867 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11870 void trackStatistics()
const override {}
11874struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11876 AAAssumptionInfoCallSite(
const IRPosition &IRP, Attributor &
A)
11877 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11882 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11888 auto *AssumptionAA =
11889 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11891 return indicatePessimisticFixpoint();
11892 bool Changed = getIntersection(AssumptionAA->getAssumed());
11893 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11897 void trackStatistics()
const override {}
11902 DenseSet<StringRef> getInitialAssumptions(
const IRPosition &IRP) {
11909 return Assumptions;
11924struct AAUnderlyingObjectsImpl
11930 const std::string getAsStr(
Attributor *
A)
const override {
11931 if (!isValidState())
11932 return "<invalid>";
11935 OS <<
"underlying objects: inter " << InterAssumedUnderlyingObjects.size()
11936 <<
" objects, intra " << IntraAssumedUnderlyingObjects.size()
11938 if (!InterAssumedUnderlyingObjects.empty()) {
11939 OS <<
"inter objects:\n";
11940 for (
auto *Obj : InterAssumedUnderlyingObjects)
11941 OS << *Obj <<
'\n';
11943 if (!IntraAssumedUnderlyingObjects.empty()) {
11944 OS <<
"intra objects:\n";
11945 for (
auto *Obj : IntraAssumedUnderlyingObjects)
11946 OS << *Obj <<
'\n';
11952 void trackStatistics()
const override {}
11956 auto &
Ptr = getAssociatedValue();
11958 bool UsedAssumedInformation =
false;
11959 auto DoUpdate = [&](SmallSetVector<Value *, 8> &UnderlyingObjects,
11961 SmallPtrSet<Value *, 8> SeenObjects;
11965 Scope, UsedAssumedInformation))
11970 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
11971 auto &
VAC = Values[
I];
11972 auto *Obj =
VAC.getValue();
11974 if (!SeenObjects.
insert(UO ? UO : Obj).second)
11976 if (UO && UO != Obj) {
11982 const auto *OtherAA =
A.getAAFor<AAUnderlyingObjects>(
11984 auto Pred = [&](
Value &
V) {
11992 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
11994 "The forall call should not return false at this position");
12000 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope,
12001 UsedAssumedInformation);
12007 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
12009 handleIndirect(
A, *
PHI->getIncomingValue(u), UnderlyingObjects,
12010 Scope, UsedAssumedInformation);
12024 if (!UsedAssumedInformation)
12025 indicateOptimisticFixpoint();
12026 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12029 bool forallUnderlyingObjects(
12030 function_ref<
bool(
Value &)> Pred,
12032 if (!isValidState())
12033 return Pred(getAssociatedValue());
12036 ? IntraAssumedUnderlyingObjects
12037 : InterAssumedUnderlyingObjects;
12038 for (
Value *Obj : AssumedUnderlyingObjects)
12048 bool handleIndirect(Attributor &
A,
Value &V,
12049 SmallSetVector<Value *, 8> &UnderlyingObjects,
12052 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
12054 auto Pred = [&](
Value &
V) {
12058 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
12060 "The forall call should not return false at this position");
12066 SmallSetVector<Value *, 8> IntraAssumedUnderlyingObjects;
12068 SmallSetVector<Value *, 8> InterAssumedUnderlyingObjects;
12071struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
12072 AAUnderlyingObjectsFloating(
const IRPosition &IRP, Attributor &
A)
12073 : AAUnderlyingObjectsImpl(IRP,
A) {}
12076struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12077 AAUnderlyingObjectsArgument(
const IRPosition &IRP, Attributor &
A)
12078 : AAUnderlyingObjectsImpl(IRP,
A) {}
12081struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12082 AAUnderlyingObjectsCallSite(
const IRPosition &IRP, Attributor &
A)
12083 : AAUnderlyingObjectsImpl(IRP,
A) {}
12086struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12087 AAUnderlyingObjectsCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12088 : AAUnderlyingObjectsImpl(IRP,
A) {}
12091struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12092 AAUnderlyingObjectsReturned(
const IRPosition &IRP, Attributor &
A)
12093 : AAUnderlyingObjectsImpl(IRP,
A) {}
12096struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12097 AAUnderlyingObjectsCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12098 : AAUnderlyingObjectsImpl(IRP,
A) {}
12101struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12102 AAUnderlyingObjectsFunction(
const IRPosition &IRP, Attributor &
A)
12103 : AAUnderlyingObjectsImpl(IRP,
A) {}
12109struct AAGlobalValueInfoFloating :
public AAGlobalValueInfo {
12110 AAGlobalValueInfoFloating(
const IRPosition &IRP, Attributor &
A)
12111 : AAGlobalValueInfo(IRP,
A) {}
12116 bool checkUse(Attributor &
A,
const Use &U,
bool &Follow,
12117 SmallVectorImpl<const Value *> &Worklist) {
12124 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12125 << *UInst <<
"\n");
12128 int Idx = &
Cmp->getOperandUse(0) == &
U;
12131 return U == &getAnchorValue();
12136 auto CallSitePred = [&](AbstractCallSite ACS) {
12137 Worklist.
push_back(ACS.getInstruction());
12140 bool UsedAssumedInformation =
false;
12142 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12144 UsedAssumedInformation))
12162 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12171 unsigned NumUsesBefore =
Uses.size();
12173 SmallPtrSet<const Value *, 8> Visited;
12177 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12185 return checkUse(
A, U, Follow, Worklist);
12187 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12188 Uses.insert(&OldU);
12192 while (!Worklist.
empty()) {
12194 if (!Visited.
insert(V).second)
12196 if (!
A.checkForAllUses(UsePred, *
this, *V,
12198 DepClassTy::OPTIONAL,
12199 true, EquivalentUseCB)) {
12200 return indicatePessimisticFixpoint();
12204 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12205 : ChangeStatus::CHANGED;
12208 bool isPotentialUse(
const Use &U)
const override {
12209 return !isValidState() ||
Uses.contains(&U);
12214 return ChangeStatus::UNCHANGED;
12218 const std::string getAsStr(Attributor *
A)
const override {
12219 return "[" + std::to_string(
Uses.size()) +
" uses]";
12222 void trackStatistics()
const override {
12228 SmallPtrSet<const Use *, 8>
Uses;
12234struct AAIndirectCallInfoCallSite :
public AAIndirectCallInfo {
12235 AAIndirectCallInfoCallSite(
const IRPosition &IRP, Attributor &
A)
12236 : AAIndirectCallInfo(IRP,
A) {}
12240 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12241 if (!MD && !
A.isClosedWorldModule())
12245 for (
const auto &
Op : MD->operands())
12247 PotentialCallees.insert(Callee);
12248 }
else if (
A.isClosedWorldModule()) {
12250 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12251 PotentialCallees.insert_range(IndirectlyCallableFunctions);
12254 if (PotentialCallees.empty())
12255 indicateOptimisticFixpoint();
12263 SmallSetVector<Function *, 4> AssumedCalleesNow;
12264 bool AllCalleesKnownNow = AllCalleesKnown;
12266 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12267 bool &UsedAssumedInformation) {
12268 const auto *GIAA =
A.getAAFor<AAGlobalValueInfo>(
12270 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12272 UsedAssumedInformation = !GIAA->isAtFixpoint();
12276 auto AddPotentialCallees = [&]() {
12277 for (
auto *PotentialCallee : PotentialCallees) {
12278 bool UsedAssumedInformation =
false;
12279 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12280 AssumedCalleesNow.
insert(PotentialCallee);
12286 bool UsedAssumedInformation =
false;
12289 AA::ValueScope::AnyScope,
12290 UsedAssumedInformation)) {
12291 if (PotentialCallees.empty())
12292 return indicatePessimisticFixpoint();
12293 AddPotentialCallees();
12298 auto CheckPotentialCallee = [&](
Function &Fn) {
12299 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12302 auto &CachedResult = FilterResults[&Fn];
12303 if (CachedResult.has_value())
12304 return CachedResult.value();
12306 bool UsedAssumedInformation =
false;
12307 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12308 if (!UsedAssumedInformation)
12309 CachedResult =
false;
12318 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12319 bool IsKnown =
false;
12322 DepClassTy::OPTIONAL, IsKnown)) {
12324 CachedResult =
false;
12329 CachedResult =
true;
12335 for (
auto &VAC : Values) {
12339 VAC.getValue()->getType()->getPointerAddressSpace() == 0)
12343 if (CheckPotentialCallee(*VACFn))
12344 AssumedCalleesNow.
insert(VACFn);
12347 if (!PotentialCallees.empty()) {
12348 AddPotentialCallees();
12351 AllCalleesKnownNow =
false;
12354 if (AssumedCalleesNow == AssumedCallees &&
12355 AllCalleesKnown == AllCalleesKnownNow)
12356 return ChangeStatus::UNCHANGED;
12358 std::swap(AssumedCallees, AssumedCalleesNow);
12359 AllCalleesKnown = AllCalleesKnownNow;
12360 return ChangeStatus::CHANGED;
12366 if (!AllCalleesKnown && AssumedCallees.empty())
12367 return ChangeStatus::UNCHANGED;
12370 bool UsedAssumedInformation =
false;
12371 if (
A.isAssumedDead(*CB,
this,
nullptr,
12372 UsedAssumedInformation))
12373 return ChangeStatus::UNCHANGED;
12377 if (
FP->getType()->getPointerAddressSpace())
12378 FP =
new AddrSpaceCastInst(
FP, PointerType::get(
FP->getContext(), 0),
12388 if (AssumedCallees.empty()) {
12389 assert(AllCalleesKnown &&
12390 "Expected all callees to be known if there are none.");
12391 A.changeToUnreachableAfterManifest(CB);
12392 return ChangeStatus::CHANGED;
12396 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12397 auto *NewCallee = AssumedCallees.front();
12400 NumIndirectCallsPromoted++;
12401 return ChangeStatus::CHANGED;
12408 A.deleteAfterManifest(*CB);
12409 return ChangeStatus::CHANGED;
12419 bool SpecializedForAnyCallees =
false;
12420 bool SpecializedForAllCallees = AllCalleesKnown;
12421 ICmpInst *LastCmp =
nullptr;
12424 for (Function *NewCallee : AssumedCallees) {
12425 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee,
12426 AssumedCallees.size())) {
12427 SkippedAssumedCallees.
push_back(NewCallee);
12428 SpecializedForAllCallees =
false;
12431 SpecializedForAnyCallees =
true;
12437 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12438 A.registerManifestAddedBasicBlock(*IP->getParent());
12444 A.registerManifestAddedBasicBlock(*ElseBB);
12446 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12451 CastInst *RetBC =
nullptr;
12452 CallInst *NewCall =
nullptr;
12457 NumIndirectCallsPromoted++;
12465 auto AttachCalleeMetadata = [&](CallBase &IndirectCB) {
12466 if (!AllCalleesKnown)
12467 return ChangeStatus::UNCHANGED;
12468 MDBuilder MDB(IndirectCB.getContext());
12469 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12470 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12471 return ChangeStatus::CHANGED;
12474 if (!SpecializedForAnyCallees)
12475 return AttachCalleeMetadata(*CB);
12478 if (SpecializedForAllCallees) {
12481 new UnreachableInst(IP->getContext(), IP);
12482 IP->eraseFromParent();
12485 CBClone->setName(CB->
getName());
12486 CBClone->insertBefore(*IP->getParent(), IP);
12487 NewCalls.
push_back({CBClone,
nullptr});
12488 AttachCalleeMetadata(*CBClone);
12495 CB->
getParent()->getFirstInsertionPt());
12496 for (
auto &It : NewCalls) {
12497 CallBase *NewCall = It.first;
12498 Instruction *CallRet = It.second ? It.second : It.first;
12510 A.deleteAfterManifest(*CB);
12511 Changed = ChangeStatus::CHANGED;
12517 const std::string getAsStr(Attributor *
A)
const override {
12518 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12519 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12523 void trackStatistics()
const override {
12524 if (AllCalleesKnown) {
12526 Eliminated, CallSites,
12527 "Number of indirect call sites eliminated via specialization")
12530 "Number of indirect call sites specialized")
12534 bool foreachCallee(function_ref<
bool(Function *)> CB)
const override {
12535 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12540 DenseMap<Function *, std::optional<bool>> FilterResults;
12544 SmallSetVector<Function *, 4> PotentialCallees;
12548 SmallSetVector<Function *, 4> AssumedCallees;
12552 bool AllCalleesKnown =
true;
12559struct AAInvariantLoadPointerImpl
12560 :
public StateWrapper<BitIntegerState<uint8_t, 15>,
12561 AAInvariantLoadPointer> {
12565 IS_NOALIAS = 1 << 0,
12568 IS_NOEFFECT = 1 << 1,
12570 IS_LOCALLY_INVARIANT = 1 << 2,
12572 IS_LOCALLY_CONSTRAINED = 1 << 3,
12574 IS_BEST_STATE = IS_NOALIAS | IS_NOEFFECT | IS_LOCALLY_INVARIANT |
12575 IS_LOCALLY_CONSTRAINED,
12577 static_assert(getBestState() == IS_BEST_STATE,
"Unexpected best state");
12580 StateWrapper<BitIntegerState<uint8_t, 15>, AAInvariantLoadPointer>;
12584 AAInvariantLoadPointerImpl(
const IRPosition &IRP, Attributor &
A)
12587 bool isKnownInvariant()
const final {
12588 return isKnownLocallyInvariant() && isKnown(IS_LOCALLY_CONSTRAINED);
12591 bool isKnownLocallyInvariant()
const final {
12592 if (isKnown(IS_LOCALLY_INVARIANT))
12594 return isKnown(IS_NOALIAS | IS_NOEFFECT);
12597 bool isAssumedInvariant()
const final {
12598 return isAssumedLocallyInvariant() && isAssumed(IS_LOCALLY_CONSTRAINED);
12601 bool isAssumedLocallyInvariant()
const final {
12602 if (isAssumed(IS_LOCALLY_INVARIANT))
12604 return isAssumed(IS_NOALIAS | IS_NOEFFECT);
12611 if (requiresNoAlias() && !isAssumed(IS_NOALIAS))
12612 return indicatePessimisticFixpoint();
12616 Changed |= updateLocalInvariance(
A);
12622 if (!isKnownInvariant())
12623 return ChangeStatus::UNCHANGED;
12626 const Value *
Ptr = &getAssociatedValue();
12627 const auto TagInvariantLoads = [&](
const Use &
U,
bool &) {
12628 if (
U.get() !=
Ptr)
12636 if (!
A.isRunOn(
I->getFunction()))
12639 if (
I->hasMetadata(LLVMContext::MD_invariant_load))
12643 LI->setMetadata(LLVMContext::MD_invariant_load,
12645 Changed = ChangeStatus::CHANGED;
12650 (void)
A.checkForAllUses(TagInvariantLoads, *
this, *
Ptr);
12655 const std::string getAsStr(Attributor *)
const override {
12656 if (isKnownInvariant())
12657 return "load-invariant pointer";
12658 return "non-invariant pointer";
12662 void trackStatistics()
const override {}
12666 bool requiresNoAlias()
const {
12667 switch (getPositionKind()) {
12673 case IRP_CALL_SITE:
12675 case IRP_CALL_SITE_RETURNED: {
12680 case IRP_ARGUMENT: {
12681 const Function *
F = getAssociatedFunction();
12682 assert(
F &&
"no associated function for argument");
12688 bool isExternal()
const {
12689 const Function *
F = getAssociatedFunction();
12693 getPositionKind() != IRP_CALL_SITE_RETURNED;
12697 if (isKnown(IS_NOALIAS) || !isAssumed(IS_NOALIAS))
12698 return ChangeStatus::UNCHANGED;
12701 if (
const auto *ANoAlias =
A.getOrCreateAAFor<AANoAlias>(
12702 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12703 if (ANoAlias->isKnownNoAlias()) {
12704 addKnownBits(IS_NOALIAS);
12705 return ChangeStatus::CHANGED;
12708 if (!ANoAlias->isAssumedNoAlias()) {
12709 removeAssumedBits(IS_NOALIAS);
12710 return ChangeStatus::CHANGED;
12713 return ChangeStatus::UNCHANGED;
12718 if (
const Argument *Arg = getAssociatedArgument()) {
12720 addKnownBits(IS_NOALIAS);
12721 return ChangeStatus::UNCHANGED;
12726 removeAssumedBits(IS_NOALIAS);
12727 return ChangeStatus::CHANGED;
12730 return ChangeStatus::UNCHANGED;
12734 if (isKnown(IS_NOEFFECT) || !isAssumed(IS_NOEFFECT))
12735 return ChangeStatus::UNCHANGED;
12737 if (!getAssociatedFunction())
12738 return indicatePessimisticFixpoint();
12741 return indicatePessimisticFixpoint();
12743 const auto HasNoEffectLoads = [&](
const Use &
U,
bool &) {
12745 return !LI || !LI->mayHaveSideEffects();
12747 if (!
A.checkForAllUses(HasNoEffectLoads, *
this, getAssociatedValue()))
12748 return indicatePessimisticFixpoint();
12750 if (
const auto *AMemoryBehavior =
A.getOrCreateAAFor<AAMemoryBehavior>(
12751 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12754 if (!AMemoryBehavior->isAssumedReadOnly())
12755 return indicatePessimisticFixpoint();
12757 if (AMemoryBehavior->isKnownReadOnly()) {
12758 addKnownBits(IS_NOEFFECT);
12759 return ChangeStatus::UNCHANGED;
12762 return ChangeStatus::UNCHANGED;
12765 if (
const Argument *Arg = getAssociatedArgument()) {
12767 addKnownBits(IS_NOEFFECT);
12768 return ChangeStatus::UNCHANGED;
12773 return indicatePessimisticFixpoint();
12776 return ChangeStatus::UNCHANGED;
12780 if (isKnown(IS_LOCALLY_INVARIANT) || !isAssumed(IS_LOCALLY_INVARIANT))
12781 return ChangeStatus::UNCHANGED;
12784 const auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
12785 getIRPosition(),
this, DepClassTy::REQUIRED);
12787 return ChangeStatus::UNCHANGED;
12789 bool UsedAssumedInformation =
false;
12790 const auto IsLocallyInvariantLoadIfPointer = [&](
const Value &
V) {
12791 if (!
V.getType()->isPointerTy())
12793 const auto *IsInvariantLoadPointer =
12795 DepClassTy::REQUIRED);
12797 if (!IsInvariantLoadPointer)
12800 if (IsInvariantLoadPointer->isKnownLocallyInvariant())
12802 if (!IsInvariantLoadPointer->isAssumedLocallyInvariant())
12805 UsedAssumedInformation =
true;
12808 if (!AUO->forallUnderlyingObjects(IsLocallyInvariantLoadIfPointer))
12809 return indicatePessimisticFixpoint();
12815 if (!IsLocallyInvariantLoadIfPointer(*Arg))
12816 return indicatePessimisticFixpoint();
12821 if (!UsedAssumedInformation) {
12823 addKnownBits(IS_LOCALLY_INVARIANT);
12824 return ChangeStatus::CHANGED;
12827 return ChangeStatus::UNCHANGED;
12831struct AAInvariantLoadPointerFloating final : AAInvariantLoadPointerImpl {
12832 AAInvariantLoadPointerFloating(
const IRPosition &IRP, Attributor &
A)
12833 : AAInvariantLoadPointerImpl(IRP,
A) {}
12836struct AAInvariantLoadPointerReturned final : AAInvariantLoadPointerImpl {
12837 AAInvariantLoadPointerReturned(
const IRPosition &IRP, Attributor &
A)
12838 : AAInvariantLoadPointerImpl(IRP,
A) {}
12841 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12845struct AAInvariantLoadPointerCallSiteReturned final
12846 : AAInvariantLoadPointerImpl {
12847 AAInvariantLoadPointerCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12848 : AAInvariantLoadPointerImpl(IRP,
A) {}
12851 const Function *
F = getAssociatedFunction();
12852 assert(
F &&
"no associated function for return from call");
12854 if (!
F->isDeclaration() && !
F->isIntrinsic())
12855 return AAInvariantLoadPointerImpl::initialize(
A);
12860 return AAInvariantLoadPointerImpl::initialize(
A);
12862 if (
F->onlyReadsMemory() &&
F->hasNoSync())
12863 return AAInvariantLoadPointerImpl::initialize(
A);
12867 indicatePessimisticFixpoint();
12871struct AAInvariantLoadPointerArgument final : AAInvariantLoadPointerImpl {
12872 AAInvariantLoadPointerArgument(
const IRPosition &IRP, Attributor &
A)
12873 : AAInvariantLoadPointerImpl(IRP,
A) {}
12876 const Function *
F = getAssociatedFunction();
12877 assert(
F &&
"no associated function for argument");
12880 addKnownBits(IS_LOCALLY_CONSTRAINED);
12884 if (!
F->hasLocalLinkage())
12885 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12889struct AAInvariantLoadPointerCallSiteArgument final
12890 : AAInvariantLoadPointerImpl {
12891 AAInvariantLoadPointerCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12892 : AAInvariantLoadPointerImpl(IRP,
A) {}
12899template <
typename InstType>
12900static bool makeChange(Attributor &
A, InstType *MemInst,
const Use &U,
12901 Value *OriginalValue, PointerType *NewPtrTy,
12902 bool UseOriginalValue) {
12903 if (
U.getOperandNo() != InstType::getPointerOperandIndex())
12906 if (MemInst->isVolatile()) {
12907 auto *
TTI =
A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(
12908 *MemInst->getFunction());
12909 unsigned NewAS = NewPtrTy->getPointerAddressSpace();
12914 if (UseOriginalValue) {
12915 A.changeUseAfterManifest(
const_cast<Use &
>(U), *OriginalValue);
12919 Instruction *CastInst =
new AddrSpaceCastInst(OriginalValue, NewPtrTy);
12921 A.changeUseAfterManifest(
const_cast<Use &
>(U), *CastInst);
12925struct AAAddressSpaceImpl :
public AAAddressSpace {
12926 AAAddressSpaceImpl(
const IRPosition &IRP, Attributor &
A)
12927 : AAAddressSpace(IRP,
A) {}
12930 assert(isValidState() &&
"the AA is invalid");
12931 return AssumedAddressSpace;
12936 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
12937 "Associated value is not a pointer");
12939 if (!
A.getInfoCache().getFlatAddressSpace().has_value()) {
12940 indicatePessimisticFixpoint();
12944 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
12945 unsigned AS = getAssociatedType()->getPointerAddressSpace();
12946 if (AS != FlatAS) {
12947 [[maybe_unused]]
bool R = takeAddressSpace(AS);
12948 assert(R &&
"The take should happen");
12949 indicateOptimisticFixpoint();
12954 uint32_t OldAddressSpace = AssumedAddressSpace;
12955 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
12957 auto CheckAddressSpace = [&](
Value &Obj) {
12963 unsigned ObjAS = Obj.getType()->getPointerAddressSpace();
12964 if (ObjAS != FlatAS)
12965 return takeAddressSpace(ObjAS);
12979 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(*F);
12981 if (AssumedAS != ~0U)
12982 return takeAddressSpace(AssumedAS);
12986 return takeAddressSpace(FlatAS);
12989 auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(getIRPosition(),
this,
12990 DepClassTy::REQUIRED);
12991 if (!AUO->forallUnderlyingObjects(CheckAddressSpace))
12992 return indicatePessimisticFixpoint();
12994 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
12995 : ChangeStatus::CHANGED;
13002 if (NewAS == InvalidAddressSpace ||
13004 return ChangeStatus::UNCHANGED;
13006 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13008 Value *AssociatedValue = &getAssociatedValue();
13009 Value *OriginalValue = peelAddrspacecast(AssociatedValue, FlatAS);
13012 PointerType::get(getAssociatedType()->
getContext(), NewAS);
13013 bool UseOriginalValue =
13018 auto Pred = [&](
const Use &
U,
bool &) {
13019 if (
U.get() != AssociatedValue)
13030 makeChange(
A, LI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13033 makeChange(
A, SI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13036 makeChange(
A, RMW, U, OriginalValue, NewPtrTy, UseOriginalValue);
13039 makeChange(
A, CmpX, U, OriginalValue, NewPtrTy, UseOriginalValue);
13046 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
13049 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13053 const std::string getAsStr(Attributor *
A)
const override {
13054 if (!isValidState())
13055 return "addrspace(<invalid>)";
13056 return "addrspace(" +
13057 (AssumedAddressSpace == InvalidAddressSpace
13059 : std::to_string(AssumedAddressSpace)) +
13064 uint32_t AssumedAddressSpace = InvalidAddressSpace;
13066 bool takeAddressSpace(uint32_t AS) {
13067 if (AssumedAddressSpace == InvalidAddressSpace) {
13068 AssumedAddressSpace = AS;
13071 return AssumedAddressSpace == AS;
13074 static Value *peelAddrspacecast(
Value *V,
unsigned FlatAS) {
13076 assert(
I->getSrcAddressSpace() != FlatAS &&
13077 "there should not be flat AS -> non-flat AS");
13078 return I->getPointerOperand();
13081 if (
C->getOpcode() == Instruction::AddrSpaceCast) {
13082 assert(
C->getOperand(0)->getType()->getPointerAddressSpace() !=
13084 "there should not be flat AS -> non-flat AS X");
13085 return C->getOperand(0);
13091struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
13092 AAAddressSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13093 : AAAddressSpaceImpl(IRP,
A) {}
13095 void trackStatistics()
const override {
13100struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
13101 AAAddressSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13102 : AAAddressSpaceImpl(IRP,
A) {}
13108 (void)indicatePessimisticFixpoint();
13111 void trackStatistics()
const override {
13116struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
13117 AAAddressSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13118 : AAAddressSpaceImpl(IRP,
A) {}
13120 void trackStatistics()
const override {
13125struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
13126 AAAddressSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13127 : AAAddressSpaceImpl(IRP,
A) {}
13132struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
13133 AAAddressSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13134 : AAAddressSpaceImpl(IRP,
A) {}
13140 (void)indicatePessimisticFixpoint();
13143 void trackStatistics()
const override {
13158struct AANoAliasAddrSpaceImpl :
public AANoAliasAddrSpace {
13159 AANoAliasAddrSpaceImpl(
const IRPosition &IRP, Attributor &
A)
13160 : AANoAliasAddrSpace(IRP,
A) {}
13163 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
13164 "Associated value is not a pointer");
13168 std::optional<unsigned> FlatAS =
A.getInfoCache().getFlatAddressSpace();
13169 if (!FlatAS.has_value()) {
13170 indicatePessimisticFixpoint();
13176 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13177 if (AS != *FlatAS) {
13179 indicateOptimisticFixpoint();
13184 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13185 uint32_t OldAssumed = getAssumed();
13187 auto CheckAddressSpace = [&](
Value &Obj) {
13191 unsigned AS = Obj.getType()->getPointerAddressSpace();
13195 removeAS(Obj.getType()->getPointerAddressSpace());
13199 const AAUnderlyingObjects *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
13200 getIRPosition(),
this, DepClassTy::REQUIRED);
13202 return indicatePessimisticFixpoint();
13204 return OldAssumed == getAssumed() ? ChangeStatus::UNCHANGED
13205 : ChangeStatus::CHANGED;
13210 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13212 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13213 if (AS != FlatAS ||
Map.empty())
13214 return ChangeStatus::UNCHANGED;
13216 LLVMContext &Ctx = getAssociatedValue().getContext();
13217 MDNode *NoAliasASNode =
nullptr;
13218 MDBuilder MDB(Ctx);
13220 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13223 unsigned Upper =
I.stop();
13224 unsigned Lower =
I.start();
13225 if (!NoAliasASNode) {
13226 NoAliasASNode = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13229 MDNode *ASRange = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13233 Value *AssociatedValue = &getAssociatedValue();
13236 auto AddNoAliasAttr = [&](
const Use &
U,
bool &) {
13237 if (
U.get() != AssociatedValue)
13240 if (!Inst || Inst->
hasMetadata(LLVMContext::MD_noalias_addrspace))
13247 Inst->
setMetadata(LLVMContext::MD_noalias_addrspace, NoAliasASNode);
13251 (void)
A.checkForAllUses(AddNoAliasAttr, *
this, *AssociatedValue,
13253 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13257 const std::string getAsStr(Attributor *
A)
const override {
13258 if (!isValidState())
13259 return "<invalid>";
13261 raw_string_ostream OS(Str);
13262 OS <<
"CanNotBeAddrSpace(";
13263 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13264 unsigned Upper =
I.stop();
13265 unsigned Lower =
I.start();
13266 OS <<
' ' <<
'[' <<
Upper <<
',' <<
Lower + 1 <<
')';
13273 void removeAS(
unsigned AS) {
13274 RangeMap::iterator
I =
Map.find(AS);
13276 if (
I !=
Map.end()) {
13277 unsigned Upper =
I.stop();
13278 unsigned Lower =
I.start();
13282 if (AS != ~((
unsigned)0) && AS + 1 <=
Upper)
13284 if (AS != 0 &&
Lower <= AS - 1)
13289 void resetASRanges(Attributor &
A) {
13291 Map.insert(0,
A.getInfoCache().getMaxAddrSpace(),
true);
13295struct AANoAliasAddrSpaceFloating final : AANoAliasAddrSpaceImpl {
13296 AANoAliasAddrSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13297 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13299 void trackStatistics()
const override {
13304struct AANoAliasAddrSpaceReturned final : AANoAliasAddrSpaceImpl {
13305 AANoAliasAddrSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13306 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13308 void trackStatistics()
const override {
13313struct AANoAliasAddrSpaceCallSiteReturned final : AANoAliasAddrSpaceImpl {
13314 AANoAliasAddrSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13315 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13317 void trackStatistics()
const override {
13322struct AANoAliasAddrSpaceArgument final : AANoAliasAddrSpaceImpl {
13323 AANoAliasAddrSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13324 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13326 void trackStatistics()
const override {
13331struct AANoAliasAddrSpaceCallSiteArgument final : AANoAliasAddrSpaceImpl {
13332 AANoAliasAddrSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13333 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13335 void trackStatistics()
const override {
13342struct AAAllocationInfoImpl :
public AAAllocationInfo {
13343 AAAllocationInfoImpl(
const IRPosition &IRP, Attributor &
A)
13344 : AAAllocationInfo(IRP,
A) {}
13346 std::optional<TypeSize> getAllocatedSize()
const override {
13347 assert(isValidState() &&
"the AA is invalid");
13348 return AssumedAllocatedSize;
13351 std::optional<TypeSize> findInitialAllocationSize(Instruction *
I,
13352 const DataLayout &
DL) {
13355 switch (
I->getOpcode()) {
13356 case Instruction::Alloca: {
13361 return std::nullopt;
13367 const IRPosition &IRP = getIRPosition();
13372 return indicatePessimisticFixpoint();
13374 bool IsKnownNoCapture;
13376 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
13377 return indicatePessimisticFixpoint();
13379 const AAPointerInfo *PI =
13380 A.getOrCreateAAFor<AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
13383 return indicatePessimisticFixpoint();
13386 return indicatePessimisticFixpoint();
13388 const DataLayout &
DL =
A.getDataLayout();
13389 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
13392 if (!AllocationSize)
13393 return indicatePessimisticFixpoint();
13397 if (*AllocationSize == 0)
13398 return indicatePessimisticFixpoint();
13404 return indicatePessimisticFixpoint();
13406 if (BinSize == 0) {
13407 auto NewAllocationSize = std::make_optional<TypeSize>(0,
false);
13408 if (!changeAllocationSize(NewAllocationSize))
13409 return ChangeStatus::UNCHANGED;
13410 return ChangeStatus::CHANGED;
13414 const auto &It = PI->
begin();
13417 if (It->first.Offset != 0)
13418 return indicatePessimisticFixpoint();
13420 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
13422 if (SizeOfBin >= *AllocationSize)
13423 return indicatePessimisticFixpoint();
13425 auto NewAllocationSize = std::make_optional<TypeSize>(SizeOfBin * 8,
false);
13427 if (!changeAllocationSize(NewAllocationSize))
13428 return ChangeStatus::UNCHANGED;
13430 return ChangeStatus::CHANGED;
13436 assert(isValidState() &&
13437 "Manifest should only be called if the state is valid.");
13441 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
13443 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
13445 switch (
I->getOpcode()) {
13447 case Instruction::Alloca: {
13451 Type *CharType = Type::getInt8Ty(
I->getContext());
13453 auto *NumBytesToValue =
13454 ConstantInt::get(
I->getContext(), APInt(32, NumBytesToAllocate));
13457 insertPt = std::next(insertPt);
13458 AllocaInst *NewAllocaInst =
13463 return ChangeStatus::CHANGED;
13471 return ChangeStatus::UNCHANGED;
13475 const std::string getAsStr(Attributor *
A)
const override {
13476 if (!isValidState())
13477 return "allocationinfo(<invalid>)";
13478 return "allocationinfo(" +
13479 (AssumedAllocatedSize == HasNoAllocationSize
13481 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
13486 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
13490 bool changeAllocationSize(std::optional<TypeSize>
Size) {
13491 if (AssumedAllocatedSize == HasNoAllocationSize ||
13492 AssumedAllocatedSize !=
Size) {
13493 AssumedAllocatedSize =
Size;
13500struct AAAllocationInfoFloating : AAAllocationInfoImpl {
13501 AAAllocationInfoFloating(
const IRPosition &IRP, Attributor &
A)
13502 : AAAllocationInfoImpl(IRP,
A) {}
13504 void trackStatistics()
const override {
13509struct AAAllocationInfoReturned : AAAllocationInfoImpl {
13510 AAAllocationInfoReturned(
const IRPosition &IRP, Attributor &
A)
13511 : AAAllocationInfoImpl(IRP,
A) {}
13517 (void)indicatePessimisticFixpoint();
13520 void trackStatistics()
const override {
13525struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
13526 AAAllocationInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13527 : AAAllocationInfoImpl(IRP,
A) {}
13529 void trackStatistics()
const override {
13534struct AAAllocationInfoArgument : AAAllocationInfoImpl {
13535 AAAllocationInfoArgument(
const IRPosition &IRP, Attributor &
A)
13536 : AAAllocationInfoImpl(IRP,
A) {}
13538 void trackStatistics()
const override {
13543struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
13544 AAAllocationInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13545 : AAAllocationInfoImpl(IRP,
A) {}
13550 (void)indicatePessimisticFixpoint();
13553 void trackStatistics()
const override {
13602#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
13603 case IRPosition::PK: \
13604 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
13606#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
13607 case IRPosition::PK: \
13608 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
13612#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13613 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13614 CLASS *AA = nullptr; \
13615 switch (IRP.getPositionKind()) { \
13616 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13617 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13618 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13619 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13620 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13621 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13622 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13623 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13628#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13629 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13630 CLASS *AA = nullptr; \
13631 switch (IRP.getPositionKind()) { \
13632 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13633 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
13634 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13635 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13636 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13637 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13638 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13639 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13644#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
13645 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13646 CLASS *AA = nullptr; \
13647 switch (IRP.getPositionKind()) { \
13648 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
13650 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
13656#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13657 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13658 CLASS *AA = nullptr; \
13659 switch (IRP.getPositionKind()) { \
13660 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13661 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13662 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13663 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13664 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13665 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13666 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13667 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13672#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13673 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13674 CLASS *AA = nullptr; \
13675 switch (IRP.getPositionKind()) { \
13676 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13677 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13678 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13679 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13680 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13681 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13682 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13683 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13688#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13689 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13690 CLASS *AA = nullptr; \
13691 switch (IRP.getPositionKind()) { \
13692 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13693 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13694 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13695 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13696 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13697 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13698 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13699 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13751#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13752#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13753#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13754#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13755#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13756#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13757#undef SWITCH_PK_CREATE
13758#undef SWITCH_PK_INV
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefAnalysis InstSet & ToRemove
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis false
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static bool isReachableImpl(SmallVectorImpl< BasicBlock * > &Worklist, const StopSetT &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT, const LoopInfo *LI)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
DXIL Forward Handle Accesses
This file defines DenseMapInfo traits for DenseMap.
Machine Check Debug Module
This file implements a map that provides insertion order iteration.
static unsigned getAddressSpace(const Value *V, unsigned MaxLookup)
ConstantRange Range(APInt(BitWidth, Low), APInt(BitWidth, High))
uint64_t IntrinsicInst * II
static StringRef getName(Value *V)
static cl::opt< RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode > Mode("regalloc-enable-advisor", cl::Hidden, cl::init(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Default), cl::desc("Enable regalloc advisor mode"), cl::values(clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Default, "default", "Default"), clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Release, "release", "precompiled"), clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Development, "development", "for training")))
dot regions Print regions of function to dot true view regions View regions of function(with no function bodies)"
Remove Loads Into Fake Uses
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
std::pair< BasicBlock *, BasicBlock * > Edge
BaseType
A given derived pointer can have multiple base pointers through phi/selects.
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, ArrayRef< StringLiteral > StandardNames)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
LLVM_ABI AACallGraphNode * operator*() const
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static LLVM_ABI void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
LLVM_ABI std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
LLVM_ABI bool hasNoAliasAttr() const
Return true if this argument has the noalias attribute.
LLVM_ABI bool onlyReadsMemory() const
Return true if this argument has the readonly or readnone attribute.
LLVM_ABI bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
LLVM_ABI bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
LLVM_ABI bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
Functions, function parameters, and return types can have attributes to indicate how they should be t...
static LLVM_ABI Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
LLVM_ABI FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
LLVM_ABI Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
LLVM_ABI MemoryEffects getMemoryEffects() const
Returns memory effects.
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static LLVM_ABI Attribute getWithCaptureInfo(LLVMContext &Context, CaptureInfo CI)
static bool isEnumAttrKind(AttrKind Kind)
LLVM_ABI CaptureInfo getCaptureInfo() const
Returns information from captures attribute.
LLVM Basic Block Representation.
LLVM_ABI const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Function * getParent() const
Return the enclosing method, or null if none.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Instruction & front() const
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
Conditional or Unconditional Branch instruction.
unsigned getNumSuccessors() const
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
BasicBlock * getSuccessor(unsigned i) const
Value * getCondition() const
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isMustTailCall() const
Tests if this call site must be tail call optimized.
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
LLVM_ABI std::optional< ConstantRange > getRange() const
If this return value has a range attribute, return the value range of the argument.
Value * getArgOperand(unsigned i) const
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
LLVM_ABI Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
LLVM_ABI Function * getCaller()
Helper to get the caller (the parent function).
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
static CaptureInfo none()
Create CaptureInfo that does not capture any components of the pointer.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
LLVM_ABI bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
static LLVM_ABI Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
static LLVM_ABI ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
LLVM_ABI bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
LLVM_ABI bool isEmptySet() const
Return true if this set contains no members.
bool isSingleElement() const
Return true if this set contains exactly one member.
static LLVM_ABI ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
LLVM_ABI bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
LLVM_ABI bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
static LLVM_ABI bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
LLVM_ABI bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
LLVM_ABI bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
LLVM_ABI void insertBefore(InstListType::iterator InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified position.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
LLVM_ABI bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
LLVM_ABI bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
LLVM_ABI void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
LLVM_ABI const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
This is an important class for using LLVM in a threaded context.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
static LLVM_ABI MDNode * getMostGenericRange(MDNode *A, MDNode *B)
static MemoryEffectsBase readOnly()
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase none()
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
static LLVM_ABI std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
LLVM_ABI const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
LLVM_ABI const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
LLVM_ABI unsigned getSmallConstantMaxTripCount(const Loop *L, SmallVectorImpl< const SCEVPredicate * > *Predicates=nullptr)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
LLVM_ABI unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
LLVM_ABI unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
const Use & getOperandUse(unsigned i) const
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
LLVM_ABI bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
LLVM_ABI void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVM_ABI const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr, bool LookThroughIntToPtr=false) const
Accumulate the constant offset this value has compared to a base pointer.
LLVM_ABI LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< use_iterator > uses()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
SetVector< Function * >::iterator I
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
Abstract Attribute helper functions.
LLVM_ABI bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
LLVM_ABI bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
LLVM_ABI std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
LLVM_ABI bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
LLVM_ABI bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
LLVM_ABI bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
LLVM_ABI bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
SmallPtrSet< Instruction *, 4 > InstExclusionSetTy
LLVM_ABI bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
LLVM_ABI bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
LLVM_ABI bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
LLVM_ABI bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool hasAssumedIRAttr(Attributor &A, const AbstractAttribute *QueryingAA, const IRPosition &IRP, DepClassTy DepClass, bool &IsKnown, bool IgnoreSubsumingPositions=false, const AAType **AAPtr=nullptr)
Helper to avoid creating an AA for IR Attributes that might already be set.
LLVM_ABI bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
LLVM_ABI Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
@ C
The default llvm calling convention, compatible with C.
@ BasicBlock
Various leaf nodes.
@ Unsupported
This operation is completely unsupported on the target.
@ Undef
Value of the register doesn't matter.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > dyn_extract_or_null(Y &&MD)
Extract a Value from Metadata, if any, allowing null.
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > extract(Y &&MD)
Extract a Value from Metadata.
@ User
could "use" a pointer
NodeAddr< UseNode * > Use
Context & getContext() const
friend class Instruction
Iterator for Instructions in a `BasicBlock.
LLVM_ABI iterator begin() const
This is an optimization pass for GlobalISel generic memory operations.
bool operator<(int64_t V1, const APSInt &V2)
FunctionAddr VTableAddr Value
LLVM_ATTRIBUTE_ALWAYS_INLINE DynamicAPInt gcd(const DynamicAPInt &A, const DynamicAPInt &B)
LLVM_ABI KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, const SimplifyQuery &SQ, unsigned Depth=0)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
LLVM_ABI bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
LLVM_ABI Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
auto pred_end(const MachineBasicBlock *BB)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
auto successors(const MachineBasicBlock *BB)
LLVM_ABI bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
LLVM_ABI raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
LLVM_ABI Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
auto dyn_cast_if_present(const Y &Val)
dyn_cast_if_present<X> - Functionally identical to dyn_cast, except that a null (or none in the case ...
LLVM_ABI Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
LLVM_ABI bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
LLVM_ABI bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr, bool UseVariableInfo=true, bool IgnoreUBImplyingAttrs=true)
Return true if the instruction does not have any effects besides calculating the result and does not ...
bool isa_and_nonnull(const Y &Val)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
LLVM_ABI ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
auto map_range(ContainerTy &&C, FuncTy F)
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
LLVM_ABI Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
auto dyn_cast_or_null(const Y &Val)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
LLVM_ABI bool isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(const CallBase *Call, bool MustPreserveNullness)
{launder,strip}.invariant.group returns pointer that aliases its argument, and it only captures point...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
void sort(IteratorTy Start, IteratorTy End)
LLVM_ABI bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
LLVM_ABI bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
BumpPtrAllocatorImpl BumpPtrAllocator
The standard BumpPtrAllocator which just uses the default template parameters.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
LLVM_ABI CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
LLVM_ABI bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
LLVM_ABI RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
@ Success
The lock was released successfully.
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
LLVM_ABI bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
PotentialValuesState< APInt > PotentialConstantIntValuesState
std::string join(IteratorT Begin, IteratorT End, StringRef Separator)
Joins the strings in the range [Begin, End), adding Separator between the elements.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
InterleavedRange< Range > interleaved_array(const Range &R, StringRef Separator=", ")
Output range R as an array of interleaved elements.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr, const MetadataPredicate *IdentityMD=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
DWARFExpression::Operation Op
LLVM_ABI bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
ArrayRef(const T &OneElt) -> ArrayRef< T >
LLVM_ABI Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
constexpr unsigned BitWidth
ValueMap< const Value *, WeakTrackingVH > ValueToValueMapTy
auto pred_begin(const MachineBasicBlock *BB)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
LLVM_ABI std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
LLVM_ABI DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
LLVM_ABI UseCaptureInfo DetermineUseCaptureKind(const Use &U, const Value *Base)
Determine what kind of capture behaviour U may exhibit.
LLVM_ABI Value * simplifyCmpInst(CmpPredicate Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
LLVM_ABI bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
LLVM_ABI const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=MaxLookupSearchDepth)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
bool capturesNothing(CaptureComponents CC)
LLVM_ABI bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
constexpr bool isCallableCC(CallingConv::ID CC)
GenericCycleInfo< SSAContext > CycleInfo
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
bool forallInterferingAccesses(AA::RangeTy Range, F CB) const
See AAPointerInfo::forallInterferingAccesses.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
AAPointerInfo::OffsetInfo ReturnedOffsets
Flag to determine if the underlying pointer is reaching a return statement in the associated function...
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
bool forallInterferingAccesses(Instruction &I, F CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
const Instruction * From
Start here,.
Reachable Result
and remember if it worked:
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
const ToTy * To
reach this place,
const AA::InstExclusionSetTy * ExclusionSet
without going through any of these instructions,
unsigned computeHashValue() const
An abstract interface for address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
static LLVM_ABI const char ID
An abstract attribute for getting assumption information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of "denormal-fp-math" and "denormal-fp-ma...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for identifying pointers from which loads can be marked invariant.
static LLVM_ABI const char ID
Unique ID (due to the unique address).
An abstract interface for liveness abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static LLVM_ABI std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for potential address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
@ NO_CAPTURE
If we do not capture the value in memory, through integers, or as a derived pointer we know it is not...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static LLVM_ABI bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static LLVM_ABI bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A helper containing a list of offsets computed for a Use.
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual bool reachesReturn() const =0
OffsetBinsTy::const_iterator const_bin_iterator
virtual const_bin_iterator begin() const =0
DenseMap< AA::RangeTy, SmallSet< unsigned, 4 > > OffsetBinsTy
static LLVM_ABI const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
virtual bool forallUnderlyingObjects(function_ref< bool(Value &)> Pred, AA::ValueScope Scope=AA::Interprocedural) const =0
Check Pred on all underlying objects in Scope collected so far.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual bool isAtFixpoint() const =0
Return if this abstract state is fixed, thus does not need to be updated if information changes as it...
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
AAPointerInfo::Access Access
static Access getTombstoneKey()
DenseMapInfo< Instruction * > Base
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
uint64_t value() const
This is a hole in the type system and should not be abused.
std::function< void( const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >( const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
static constexpr DenormalMode getDefault()
Return the assumed default mode for a function without denormal-fp-math.
static constexpr DenormalMode getInvalid()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
AAPointerInfo::Access Access
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
DenseMapInfo< std::pair< const Instruction *, const ToTy * > > PairDMI
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
DenseMapInfo< const AA::InstExclusionSetTy * > InstSetDMI
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
LLVM_ABI Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
static unsigned MaxPotentialValues
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool isPassthrough() const
LLVM_ABI bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.
std::optional< Value * > SimplifiedAssociatedValue
An assumed simplified value.
Type * Ty
The type of the original value.