54#include "llvm/IR/IntrinsicsAMDGPU.h"
55#include "llvm/IR/IntrinsicsNVPTX.h"
82#define DEBUG_TYPE "attributor"
86 cl::desc(
"Manifest Attributor internal string attributes."),
99 cl::desc(
"Maximum number of potential values to be "
100 "tracked for each position."),
105 "attributor-max-potential-values-iterations",
cl::Hidden,
107 "Maximum number of iterations we keep dismantling potential values."),
110STATISTIC(NumAAs,
"Number of abstract attributes created");
111STATISTIC(NumIndirectCallsPromoted,
"Number of indirect calls promoted");
126#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
127 ("Number of " #TYPE " marked '" #NAME "'")
128#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
129#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
130#define STATS_DECL(NAME, TYPE, MSG) \
131 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
132#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
133#define STATS_DECLTRACK(NAME, TYPE, MSG) \
134 {STATS_DECL(NAME, TYPE, MSG) STATS_TRACK(NAME, TYPE)}
135#define STATS_DECLTRACK_ARG_ATTR(NAME) \
136 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
137#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
138 STATS_DECLTRACK(NAME, CSArguments, \
139 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
140#define STATS_DECLTRACK_FN_ATTR(NAME) \
141 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
142#define STATS_DECLTRACK_CS_ATTR(NAME) \
143 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
144#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
145 STATS_DECLTRACK(NAME, FunctionReturn, \
146 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
147#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
148 STATS_DECLTRACK(NAME, CSReturn, \
149 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
150#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
151 STATS_DECLTRACK(NAME, Floating, \
152 ("Number of floating values known to be '" #NAME "'"))
157#define PIPE_OPERATOR(CLASS) \
158 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
159 return OS << static_cast<const AbstractAttribute &>(AA); \
216 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
219 auto *BB =
I->getParent();
225 return !HeaderOnly || BB ==
C->getHeader();
236 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
261 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
271 bool AllowVolatile) {
272 if (!AllowVolatile &&
I->isVolatile())
276 return LI->getPointerOperand();
280 return SI->getPointerOperand();
284 return CXI->getPointerOperand();
288 return RMWI->getPointerOperand();
310 bool GetMinOffset,
bool AllowNonInbounds,
311 bool UseAssumed =
false) {
313 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
320 if (!ValueConstantRangeAA)
324 if (
Range.isFullSet())
330 ROffset =
Range.getSignedMin();
332 ROffset =
Range.getSignedMax();
343 const Value *
Ptr, int64_t &BytesOffset,
345 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
348 true, AllowNonInbounds);
356template <
typename AAType,
typename StateType =
typename AAType::StateType,
358 bool RecurseForSelectAndPHI =
true>
360 Attributor &
A,
const AAType &QueryingAA, StateType &S,
362 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
363 << QueryingAA <<
" into " << S <<
"\n");
365 assert((QueryingAA.getIRPosition().getPositionKind() ==
367 QueryingAA.getIRPosition().getPositionKind() ==
369 "Can only clamp returned value states for a function returned or call "
370 "site returned position!");
374 std::optional<StateType>
T;
377 auto CheckReturnValue = [&](
Value &RV) ->
bool {
391 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
392 const StateType &AAS =
AA->getState();
394 T = StateType::getBestState(AAS);
396 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
398 return T->isValidState();
401 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
403 RecurseForSelectAndPHI))
404 S.indicatePessimisticFixpoint();
411template <
typename AAType,
typename BaseType,
412 typename StateType =
typename BaseType::StateType,
413 bool PropagateCallBaseContext =
false,
415 bool RecurseForSelectAndPHI =
true>
416struct AAReturnedFromReturnedValues :
public BaseType {
417 AAReturnedFromReturnedValues(
const IRPosition &IRP, Attributor &
A)
422 StateType S(StateType::getBestState(this->getState()));
424 RecurseForSelectAndPHI>(
426 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
435template <
typename AAType,
typename StateType =
typename AAType::StateType,
437static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
439 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
440 << QueryingAA <<
" into " << S <<
"\n");
442 assert(QueryingAA.getIRPosition().getPositionKind() ==
444 "Can only clamp call site argument states for an argument position!");
448 std::optional<StateType>
T;
451 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
471 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
472 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @" << ACSArgPos
474 const StateType &AAS =
AA->getState();
476 T = StateType::getBestState(AAS);
478 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
480 return T->isValidState();
483 bool UsedAssumedInformation =
false;
484 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
485 UsedAssumedInformation))
486 S.indicatePessimisticFixpoint();
493template <
typename AAType,
typename BaseType,
494 typename StateType =
typename AAType::StateType,
496bool getArgumentStateFromCallBaseContext(
Attributor &
A,
500 "Expected an 'argument' position !");
506 assert(ArgNo >= 0 &&
"Invalid Arg No!");
520 const StateType &CBArgumentState =
521 static_cast<const StateType &
>(
AA->getState());
523 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
524 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
528 State ^= CBArgumentState;
533template <
typename AAType,
typename BaseType,
534 typename StateType =
typename AAType::StateType,
535 bool BridgeCallBaseContext =
false,
537struct AAArgumentFromCallSiteArguments :
public BaseType {
538 AAArgumentFromCallSiteArguments(
const IRPosition &IRP, Attributor &
A)
543 StateType S = StateType::getBestState(this->getState());
545 if (BridgeCallBaseContext) {
547 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
549 A, *
this, this->getIRPosition(), S);
553 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
563template <
typename AAType,
typename BaseType,
564 typename StateType =
typename BaseType::StateType,
565 bool IntroduceCallBaseContext =
false,
567struct AACalleeToCallSite :
public BaseType {
568 AACalleeToCallSite(
const IRPosition &IRP, Attributor &
A) :
BaseType(IRP,
A) {}
572 auto IRPKind = this->getIRPosition().getPositionKind();
575 "Can only wrap function returned positions for call site "
576 "returned positions!");
577 auto &S = this->getState();
580 if (IntroduceCallBaseContext)
581 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
586 for (
const Function *Callee : Callees) {
590 IntroduceCallBaseContext ? &CB :
nullptr)
592 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
594 if (Attribute::isEnumAttrKind(IRAttributeKind)) {
597 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
603 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
607 if (S.isAtFixpoint())
608 return S.isValidState();
612 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
613 return S.indicatePessimisticFixpoint();
619template <
class AAType,
typename StateType =
typename AAType::StateType>
625 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
626 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
630 if (Found &&
AA.followUseInMBEC(
A, U, UserI, State))
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &
AA,
Attributor &
A, StateType &S,
648 const Value &Val =
AA.getIRPosition().getAssociatedValue();
653 A.getInfoCache().getMustBeExecutedContextExplorer();
659 for (
const Use &U : Val.
uses())
662 followUsesInContext<AAType>(
AA,
A, *Explorer, &CtxI,
Uses, S);
664 if (S.isAtFixpoint())
670 if (Br->isConditional())
709 StateType ParentState;
713 ParentState.indicateOptimisticFixpoint();
715 for (
const BasicBlock *BB : Br->successors()) {
716 StateType ChildState;
718 size_t BeforeSize =
Uses.size();
719 followUsesInContext(
AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
722 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
725 ParentState &= ChildState;
799 R.indicatePessimisticFixpoint();
816 BS.indicateOptimisticFixpoint();
822 BS.indicatePessimisticFixpoint();
892 template <
typename F>
899 if (!
Range.mayOverlap(ItRange))
901 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
902 for (
auto Index : It.getSecond()) {
912 template <
typename F>
923 for (
unsigned Index : LocalList->getSecond()) {
926 if (
Range.offsetAndSizeAreUnknown())
942 RemoteI = RemoteI ? RemoteI : &
I;
946 bool AccExists =
false;
948 for (
auto Index : LocalList) {
950 if (
A.getLocalInst() == &
I) {
959 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
961 for (
auto Key : ToAdd) {
968 AccessList.emplace_back(&
I, RemoteI, Ranges, Content, Kind, Ty);
970 "New Access should have been at AccIndex");
971 LocalList.push_back(AccIndex);
980 auto Before = Current;
982 if (Current == Before)
985 auto &ExistingRanges = Before.getRanges();
986 auto &NewRanges = Current.getRanges();
993 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
1000 "Expected bin to actually contain the Access.");
1001 Bin.erase(AccIndex);
1022struct AAPointerInfoImpl
1023 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1028 const std::string getAsStr(
Attributor *
A)
const override {
1029 return std::string(
"PointerInfo ") +
1030 (isValidState() ? (std::string(
"#") +
1031 std::to_string(OffsetBins.size()) +
" bins")
1036 [](int64_t O) {
return std::to_string(O); }),
1044 return AAPointerInfo::manifest(
A);
1047 virtual const_bin_iterator
begin()
const override {
return State::begin(); }
1048 virtual const_bin_iterator
end()
const override {
return State::end(); }
1049 virtual int64_t numOffsetBins()
const override {
1050 return State::numOffsetBins();
1052 virtual bool reachesReturn()
const override {
1053 return !ReturnedOffsets.isUnassigned();
1055 virtual void addReturnedOffsetsTo(OffsetInfo &OI)
const override {
1056 if (ReturnedOffsets.isUnknown()) {
1061 OffsetInfo MergedOI;
1062 for (
auto Offset : ReturnedOffsets) {
1063 OffsetInfo TmpOI = OI;
1065 MergedOI.merge(TmpOI);
1067 OI = std::move(MergedOI);
1070 ChangeStatus setReachesReturn(
const OffsetInfo &ReachedReturnedOffsets) {
1071 if (ReturnedOffsets.isUnknown())
1072 return ChangeStatus::UNCHANGED;
1073 if (ReachedReturnedOffsets.isUnknown()) {
1074 ReturnedOffsets.setUnknown();
1075 return ChangeStatus::CHANGED;
1077 if (ReturnedOffsets.merge(ReachedReturnedOffsets))
1078 return ChangeStatus::CHANGED;
1079 return ChangeStatus::UNCHANGED;
1082 bool forallInterferingAccesses(
1084 function_ref<
bool(
const AAPointerInfo::Access &,
bool)> CB)
1086 return State::forallInterferingAccesses(
Range, CB);
1089 bool forallInterferingAccesses(
1090 Attributor &
A,
const AbstractAttribute &QueryingAA, Instruction &
I,
1091 bool FindInterferingWrites,
bool FindInterferingReads,
1092 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1094 function_ref<
bool(
const Access &)> SkipCB)
const override {
1095 HasBeenWrittenTo =
false;
1097 SmallPtrSet<const Access *, 8> DominatingWrites;
1105 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
1107 bool AllInSameNoSyncFn = IsAssumedNoSync;
1108 bool InstIsExecutedByInitialThreadOnly =
1109 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1116 bool InstIsExecutedInAlignedRegion =
1117 FindInterferingReads && ExecDomainAA &&
1118 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1120 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1121 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1123 InformationCache &InfoCache =
A.getInfoCache();
1124 bool IsThreadLocalObj =
1133 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1134 if (IsThreadLocalObj || AllInSameNoSyncFn)
1136 const auto *FnExecDomainAA =
1137 I.getFunction() == &
Scope
1139 :
A.lookupAAFor<AAExecutionDomain>(
1142 if (!FnExecDomainAA)
1144 if (InstIsExecutedInAlignedRegion ||
1145 (FindInterferingWrites &&
1146 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1147 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1150 if (InstIsExecutedByInitialThreadOnly &&
1151 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1152 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1161 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1162 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1163 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1164 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1168 bool IsKnownNoRecurse;
1176 bool InstInKernel =
A.getInfoCache().isKernel(Scope);
1177 bool ObjHasKernelLifetime =
false;
1178 const bool UseDominanceReasoning =
1179 FindInterferingWrites && IsKnownNoRecurse;
1180 const DominatorTree *DT =
1190 case AA::GPUAddressSpace::Shared:
1191 case AA::GPUAddressSpace::Constant:
1192 case AA::GPUAddressSpace::Local:
1204 std::function<bool(
const Function &)> IsLiveInCalleeCB;
1209 const Function *AIFn = AI->getFunction();
1210 ObjHasKernelLifetime =
A.getInfoCache().isKernel(*AIFn);
1211 bool IsKnownNoRecurse;
1214 IsKnownNoRecurse)) {
1215 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1220 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1221 if (ObjHasKernelLifetime)
1222 IsLiveInCalleeCB = [&
A](
const Function &Fn) {
1223 return !
A.getInfoCache().isKernel(Fn);
1231 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1232 Function *AccScope = Acc.getRemoteInst()->getFunction();
1233 bool AccInSameScope = AccScope == &
Scope;
1237 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1238 A.getInfoCache().isKernel(*AccScope))
1241 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1242 if (Acc.isWrite() || (
isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1243 ExclusionSet.
insert(Acc.getRemoteInst());
1246 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1247 (!FindInterferingReads || !Acc.isRead()))
1250 bool Dominates = FindInterferingWrites && DT && Exact &&
1251 Acc.isMustAccess() && AccInSameScope &&
1254 DominatingWrites.
insert(&Acc);
1258 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1260 InterferingAccesses.
push_back({&Acc, Exact});
1263 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1266 HasBeenWrittenTo = !DominatingWrites.
empty();
1270 for (
const Access *Acc : DominatingWrites) {
1271 if (!LeastDominatingWriteInst) {
1272 LeastDominatingWriteInst = Acc->getRemoteInst();
1273 }
else if (DT->
dominates(LeastDominatingWriteInst,
1274 Acc->getRemoteInst())) {
1275 LeastDominatingWriteInst = Acc->getRemoteInst();
1280 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1281 if (SkipCB && SkipCB(Acc))
1283 if (!CanIgnoreThreading(Acc))
1289 bool ReadChecked = !FindInterferingReads;
1290 bool WriteChecked = !FindInterferingWrites;
1296 &ExclusionSet, IsLiveInCalleeCB))
1301 if (!WriteChecked) {
1303 &ExclusionSet, IsLiveInCalleeCB))
1304 WriteChecked =
true;
1318 if (!WriteChecked && HasBeenWrittenTo &&
1319 Acc.getRemoteInst()->getFunction() != &Scope) {
1321 const auto *FnReachabilityAA =
A.getAAFor<AAInterFnReachability>(
1323 if (FnReachabilityAA) {
1329 if (!FnReachabilityAA->instructionCanReach(
1330 A, *LeastDominatingWriteInst,
1331 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1332 WriteChecked =
true;
1339 if (ReadChecked && WriteChecked)
1342 if (!DT || !UseDominanceReasoning)
1344 if (!DominatingWrites.count(&Acc))
1346 return LeastDominatingWriteInst != Acc.getRemoteInst();
1351 for (
auto &It : InterferingAccesses) {
1352 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1353 !CanSkipAccess(*It.first, It.second)) {
1354 if (!UserCB(*It.first, It.second))
1362 const AAPointerInfo &OtherAA,
1364 using namespace AA::PointerInfo;
1366 return indicatePessimisticFixpoint();
1369 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1370 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1371 Changed |= setReachesReturn(OtherAAImpl.ReturnedOffsets);
1374 const auto &State = OtherAAImpl.getState();
1375 for (
const auto &It : State) {
1376 for (
auto Index : It.getSecond()) {
1377 const auto &RAcc = State.getAccess(Index);
1378 if (IsByval && !RAcc.isRead())
1380 bool UsedAssumedInformation =
false;
1382 auto Content =
A.translateArgumentToCallSiteContent(
1383 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1384 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1385 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1387 Changed |= addAccess(
A, RAcc.getRanges(), CB, Content, AK,
1388 RAcc.getType(), RAcc.getRemoteInst());
1394 ChangeStatus translateAndAddState(Attributor &
A,
const AAPointerInfo &OtherAA,
1395 const OffsetInfo &Offsets, CallBase &CB,
1397 using namespace AA::PointerInfo;
1399 return indicatePessimisticFixpoint();
1401 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1405 const auto &State = OtherAAImpl.getState();
1406 for (
const auto &It : State) {
1407 for (
auto Index : It.getSecond()) {
1408 const auto &RAcc = State.getAccess(Index);
1409 if (!IsMustAcc && RAcc.isAssumption())
1411 for (
auto Offset : Offsets) {
1415 if (!NewRanges.isUnknown()) {
1416 NewRanges.addToAllOffsets(Offset);
1421 Changed |= addAccess(
A, NewRanges, CB, RAcc.getContent(), AK,
1422 RAcc.getType(), RAcc.getRemoteInst());
1431 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1434 void dumpState(raw_ostream &O) {
1435 for (
auto &It : OffsetBins) {
1436 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1437 <<
"] : " << It.getSecond().size() <<
"\n";
1438 for (
auto AccIndex : It.getSecond()) {
1439 auto &Acc = AccessList[AccIndex];
1440 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1441 if (Acc.getLocalInst() != Acc.getRemoteInst())
1442 O <<
" --> " << *Acc.getRemoteInst()
1444 if (!Acc.isWrittenValueYetUndetermined()) {
1446 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1448 else if (Acc.getWrittenValue())
1449 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1451 O <<
" - c: <unknown>\n";
1458struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1460 AAPointerInfoFloating(
const IRPosition &IRP, Attributor &
A)
1461 : AAPointerInfoImpl(IRP,
A) {}
1464 bool handleAccess(Attributor &
A, Instruction &
I,
1465 std::optional<Value *> Content,
AccessKind Kind,
1468 using namespace AA::PointerInfo;
1470 const DataLayout &
DL =
A.getDataLayout();
1471 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1480 if (!VT || VT->getElementCount().isScalable() ||
1482 (*Content)->getType() != VT ||
1483 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1494 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1499 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1501 ConstContent, ConstantInt::get(
Int32Ty, i));
1508 for (
auto &ElementOffset : ElementOffsets)
1509 ElementOffset += ElementSize;
1522 bool collectConstantsForGEP(Attributor &
A,
const DataLayout &
DL,
1523 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1524 const GEPOperator *
GEP);
1527 void trackStatistics()
const override {
1528 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1532bool AAPointerInfoFloating::collectConstantsForGEP(Attributor &
A,
1533 const DataLayout &
DL,
1535 const OffsetInfo &PtrOI,
1536 const GEPOperator *
GEP) {
1537 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1538 SmallMapVector<Value *, APInt, 4> VariableOffsets;
1541 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1542 "Don't look for constant values if the offset has already been "
1543 "determined to be unknown.");
1545 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1551 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1555 Union.addToAll(ConstantOffset.getSExtValue());
1560 for (
const auto &VI : VariableOffsets) {
1561 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
1563 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1569 if (PotentialConstantsAA->undefIsContained())
1576 auto &AssumedSet = PotentialConstantsAA->getAssumedSet();
1577 if (AssumedSet.empty())
1581 for (
const auto &ConstOffset : AssumedSet) {
1582 auto CopyPerOffset =
Union;
1583 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1584 VI.second.getZExtValue());
1585 Product.merge(CopyPerOffset);
1590 UsrOI = std::move(Union);
1594ChangeStatus AAPointerInfoFloating::updateImpl(Attributor &
A) {
1595 using namespace AA::PointerInfo;
1597 const DataLayout &
DL =
A.getDataLayout();
1598 Value &AssociatedValue = getAssociatedValue();
1600 DenseMap<Value *, OffsetInfo> OffsetInfoMap;
1601 OffsetInfoMap[&AssociatedValue].
insert(0);
1603 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1614 "CurPtr does not exist in the map!");
1616 auto &UsrOI = OffsetInfoMap[Usr];
1617 auto &PtrOI = OffsetInfoMap[CurPtr];
1618 assert(!PtrOI.isUnassigned() &&
1619 "Cannot pass through if the input Ptr was not visited!");
1625 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1627 User *Usr =
U.getUser();
1628 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1631 "The current pointer offset should have been seeded!");
1632 assert(!OffsetInfoMap[CurPtr].isUnassigned() &&
1633 "Current pointer should be assigned");
1637 return HandlePassthroughUser(Usr, CurPtr, Follow);
1639 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1647 auto &UsrOI = OffsetInfoMap[Usr];
1648 auto &PtrOI = OffsetInfoMap[CurPtr];
1650 if (UsrOI.isUnknown())
1653 if (PtrOI.isUnknown()) {
1659 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1665 return HandlePassthroughUser(Usr, CurPtr, Follow);
1670 if (RI->getFunction() == getAssociatedFunction()) {
1671 auto &PtrOI = OffsetInfoMap[CurPtr];
1672 Changed |= setReachesReturn(PtrOI);
1685 auto &UsrOI = PhiIt->second;
1686 auto &PtrOI = OffsetInfoMap[CurPtr];
1690 if (PtrOI.isUnknown()) {
1691 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1692 << *CurPtr <<
" in " << *
PHI <<
"\n");
1693 Follow = !UsrOI.isUnknown();
1699 if (UsrOI == PtrOI) {
1700 assert(!PtrOI.isUnassigned() &&
1701 "Cannot assign if the current Ptr was not visited!");
1702 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1712 auto It = OffsetInfoMap.
find(CurPtrBase);
1713 if (It == OffsetInfoMap.
end()) {
1714 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1715 << *CurPtr <<
" in " << *
PHI
1716 <<
" (base: " << *CurPtrBase <<
")\n");
1730 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
1731 *
PHI->getFunction());
1733 auto BaseOI = It->getSecond();
1734 BaseOI.addToAll(
Offset.getZExtValue());
1735 if (IsFirstPHIUser || BaseOI == UsrOI) {
1736 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1737 <<
" in " << *Usr <<
"\n");
1738 return HandlePassthroughUser(Usr, CurPtr, Follow);
1742 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1743 << *CurPtr <<
" in " << *
PHI <<
"\n");
1762 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1763 OffsetInfoMap[CurPtr].Offsets,
Changed,
1769 return II->isAssumeLikeIntrinsic();
1780 }
while (FromI && FromI != ToI);
1785 auto IsValidAssume = [&](IntrinsicInst &IntrI) {
1786 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1789 if (IntrI.getParent() == BB) {
1790 if (IsImpactedInRange(LoadI->getNextNode(), &IntrI))
1796 if ((*PredIt) != BB)
1801 if (SuccBB == IntrBB)
1807 if (IsImpactedInRange(LoadI->getNextNode(), BB->
getTerminator()))
1809 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1815 std::pair<Value *, IntrinsicInst *> Assumption;
1816 for (
const Use &LoadU : LoadI->uses()) {
1818 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1820 for (
const Use &CmpU : CmpI->uses()) {
1822 if (!IsValidAssume(*IntrI))
1824 int Idx = CmpI->getOperandUse(0) == LoadU;
1825 Assumption = {CmpI->getOperand(Idx), IntrI};
1830 if (Assumption.first)
1835 if (!Assumption.first || !Assumption.second)
1839 << *Assumption.second <<
": " << *LoadI
1840 <<
" == " << *Assumption.first <<
"\n");
1841 bool UsedAssumedInformation =
false;
1842 std::optional<Value *> Content =
nullptr;
1843 if (Assumption.first)
1845 A.getAssumedSimplified(*Assumption.first, *
this,
1847 return handleAccess(
1848 A, *Assumption.second, Content, AccessKind::AK_ASSUMPTION,
1849 OffsetInfoMap[CurPtr].Offsets,
Changed, *LoadI->getType());
1854 for (
auto *OtherOp : OtherOps) {
1855 if (OtherOp == CurPtr) {
1858 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1870 bool UsedAssumedInformation =
false;
1871 std::optional<Value *> Content =
nullptr;
1873 Content =
A.getAssumedSimplified(
1875 return handleAccess(
A,
I, Content, AK, OffsetInfoMap[CurPtr].Offsets,
1880 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1881 *StoreI->getValueOperand()->getType(),
1882 {StoreI->getValueOperand()}, AccessKind::AK_W);
1884 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1885 {RMWI->getValOperand()}, AccessKind::AK_RW);
1887 return HandleStoreLike(
1888 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1889 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1896 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1901 const auto *CSArgPI =
A.getAAFor<AAPointerInfo>(
1907 Changed = translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB,
1910 if (!CSArgPI->reachesReturn())
1911 return isValidState();
1914 if (!Callee ||
Callee->arg_size() <= ArgNo)
1916 bool UsedAssumedInformation =
false;
1917 auto ReturnedValue =
A.getAssumedSimplified(
1922 auto *Arg =
Callee->getArg(ArgNo);
1923 if (ReturnedArg && Arg != ReturnedArg)
1925 bool IsRetMustAcc = IsArgMustAcc && (ReturnedArg == Arg);
1926 const auto *CSRetPI =
A.getAAFor<AAPointerInfo>(
1930 OffsetInfo OI = OffsetInfoMap[CurPtr];
1931 CSArgPI->addReturnedOffsetsTo(OI);
1933 translateAndAddState(
A, *CSRetPI, OI, *CB, IsRetMustAcc) |
Changed;
1934 return isValidState();
1936 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1941 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1944 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1945 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1946 assert(!OffsetInfoMap[OldU].isUnassigned() &&
"Old use should be assinged");
1947 if (OffsetInfoMap.
count(NewU)) {
1949 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1950 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1951 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1955 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1958 return HandlePassthroughUser(NewU.get(), OldU.
get(), Unused);
1960 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1962 true, EquivalentUseCB)) {
1963 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1964 return indicatePessimisticFixpoint();
1968 dbgs() <<
"Accesses by bin after update:\n";
1975struct AAPointerInfoReturned final : AAPointerInfoImpl {
1976 AAPointerInfoReturned(
const IRPosition &IRP, Attributor &
A)
1977 : AAPointerInfoImpl(IRP,
A) {}
1981 return indicatePessimisticFixpoint();
1985 void trackStatistics()
const override {
1986 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1990struct AAPointerInfoArgument final : AAPointerInfoFloating {
1991 AAPointerInfoArgument(
const IRPosition &IRP, Attributor &
A)
1992 : AAPointerInfoFloating(IRP,
A) {}
1995 void trackStatistics()
const override {
1996 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2000struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
2001 AAPointerInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2002 : AAPointerInfoFloating(IRP,
A) {}
2006 using namespace AA::PointerInfo;
2012 if (
auto Length =
MI->getLengthInBytes())
2013 LengthVal =
Length->getSExtValue();
2014 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
2017 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
2019 return indicatePessimisticFixpoint();
2022 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
2024 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
2027 dbgs() <<
"Accesses by bin after update:\n";
2038 Argument *Arg = getAssociatedArgument();
2042 A.getAAFor<AAPointerInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
2043 if (ArgAA && ArgAA->getState().isValidState())
2044 return translateAndAddStateFromCallee(
A, *ArgAA,
2047 return indicatePessimisticFixpoint();
2050 bool IsKnownNoCapture;
2052 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2053 return indicatePessimisticFixpoint();
2055 bool IsKnown =
false;
2057 return ChangeStatus::UNCHANGED;
2060 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2066 void trackStatistics()
const override {
2067 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2071struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2072 AAPointerInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2073 : AAPointerInfoFloating(IRP,
A) {}
2076 void trackStatistics()
const override {
2077 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2085struct AANoUnwindImpl : AANoUnwind {
2086 AANoUnwindImpl(
const IRPosition &IRP, Attributor &
A) : AANoUnwind(IRP,
A) {}
2092 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2096 const std::string getAsStr(Attributor *
A)
const override {
2097 return getAssumed() ?
"nounwind" :
"may-unwind";
2103 (unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2104 (unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2105 (unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2108 if (!
I.mayThrow(
true))
2112 bool IsKnownNoUnwind;
2120 bool UsedAssumedInformation =
false;
2121 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2122 UsedAssumedInformation))
2123 return indicatePessimisticFixpoint();
2125 return ChangeStatus::UNCHANGED;
2129struct AANoUnwindFunction final :
public AANoUnwindImpl {
2130 AANoUnwindFunction(
const IRPosition &IRP, Attributor &
A)
2131 : AANoUnwindImpl(IRP,
A) {}
2138struct AANoUnwindCallSite final
2139 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2140 AANoUnwindCallSite(
const IRPosition &IRP, Attributor &
A)
2141 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2152 case Intrinsic::nvvm_barrier_cta_sync_aligned_all:
2153 case Intrinsic::nvvm_barrier_cta_sync_aligned_count:
2154 case Intrinsic::nvvm_barrier0_and:
2155 case Intrinsic::nvvm_barrier0_or:
2156 case Intrinsic::nvvm_barrier0_popc:
2158 case Intrinsic::amdgcn_s_barrier:
2159 if (ExecutedAligned)
2182 switch (
I->getOpcode()) {
2183 case Instruction::AtomicRMW:
2186 case Instruction::Store:
2189 case Instruction::Load:
2194 "New atomic operations need to be known in the attributor.");
2206 return !
MI->isVolatile();
2222 const std::string getAsStr(Attributor *
A)
const override {
2223 return getAssumed() ?
"nosync" :
"may-sync";
2239 if (
I.mayReadOrWriteMemory())
2253 bool UsedAssumedInformation =
false;
2254 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2255 UsedAssumedInformation) ||
2256 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2257 UsedAssumedInformation))
2258 return indicatePessimisticFixpoint();
2263struct AANoSyncFunction final :
public AANoSyncImpl {
2264 AANoSyncFunction(
const IRPosition &IRP, Attributor &
A)
2265 : AANoSyncImpl(IRP,
A) {}
2272struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2273 AANoSyncCallSite(
const IRPosition &IRP, Attributor &
A)
2274 : AACalleeToCallSite<AANoSync, AANoSyncImpl>(IRP,
A) {}
2284struct AANoFreeImpl :
public AANoFree {
2285 AANoFreeImpl(
const IRPosition &IRP, Attributor &
A) : AANoFree(IRP,
A) {}
2291 DepClassTy::NONE, IsKnown));
2301 DepClassTy::REQUIRED, IsKnown);
2304 bool UsedAssumedInformation =
false;
2305 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2306 UsedAssumedInformation))
2307 return indicatePessimisticFixpoint();
2308 return ChangeStatus::UNCHANGED;
2312 const std::string getAsStr(Attributor *
A)
const override {
2313 return getAssumed() ?
"nofree" :
"may-free";
2317struct AANoFreeFunction final :
public AANoFreeImpl {
2318 AANoFreeFunction(
const IRPosition &IRP, Attributor &
A)
2319 : AANoFreeImpl(IRP,
A) {}
2326struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2327 AANoFreeCallSite(
const IRPosition &IRP, Attributor &
A)
2328 : AACalleeToCallSite<AANoFree, AANoFreeImpl>(IRP,
A) {}
2335struct AANoFreeFloating : AANoFreeImpl {
2336 AANoFreeFloating(
const IRPosition &IRP, Attributor &
A)
2337 : AANoFreeImpl(IRP,
A) {}
2344 const IRPosition &IRP = getIRPosition();
2349 DepClassTy::OPTIONAL, IsKnown))
2350 return ChangeStatus::UNCHANGED;
2352 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2353 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2365 DepClassTy::REQUIRED, IsKnown);
2382 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2383 return indicatePessimisticFixpoint();
2385 return ChangeStatus::UNCHANGED;
2390struct AANoFreeArgument final : AANoFreeFloating {
2391 AANoFreeArgument(
const IRPosition &IRP, Attributor &
A)
2392 : AANoFreeFloating(IRP,
A) {}
2399struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2400 AANoFreeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2401 : AANoFreeFloating(IRP,
A) {}
2409 Argument *Arg = getAssociatedArgument();
2411 return indicatePessimisticFixpoint();
2415 DepClassTy::REQUIRED, IsKnown))
2416 return ChangeStatus::UNCHANGED;
2417 return indicatePessimisticFixpoint();
2425struct AANoFreeReturned final : AANoFreeFloating {
2426 AANoFreeReturned(
const IRPosition &IRP, Attributor &
A)
2427 : AANoFreeFloating(IRP,
A) {
2442 void trackStatistics()
const override {}
2446struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2447 AANoFreeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2448 : AANoFreeFloating(IRP,
A) {}
2451 return ChangeStatus::UNCHANGED;
2462 bool IgnoreSubsumingPositions) {
2464 AttrKinds.
push_back(Attribute::NonNull);
2467 AttrKinds.
push_back(Attribute::Dereferenceable);
2468 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2475 if (!Fn->isDeclaration()) {
2485 bool UsedAssumedInformation =
false;
2486 if (!
A.checkForAllInstructions(
2488 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2492 UsedAssumedInformation,
false,
true))
2504 Attribute::NonNull)});
2509static int64_t getKnownNonNullAndDerefBytesForUse(
2510 Attributor &
A,
const AbstractAttribute &QueryingAA,
Value &AssociatedValue,
2511 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2514 const Value *UseV =
U->get();
2535 const DataLayout &
DL =
A.getInfoCache().getDL();
2539 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2556 bool IsKnownNonNull;
2559 IsNonNull |= IsKnownNonNull;
2562 return DerefAA ? DerefAA->getKnownDereferenceableBytes() : 0;
2566 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2567 Loc->Size.isScalable() ||
I->isVolatile())
2573 if (
Base &&
Base == &AssociatedValue) {
2574 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2576 return std::max(int64_t(0), DerefBytes);
2583 int64_t DerefBytes = Loc->Size.getValue();
2585 return std::max(int64_t(0), DerefBytes);
2591struct AANonNullImpl : AANonNull {
2592 AANonNullImpl(
const IRPosition &IRP, Attributor &
A) : AANonNull(IRP,
A) {}
2596 Value &
V = *getAssociatedValue().stripPointerCasts();
2598 indicatePessimisticFixpoint();
2602 if (Instruction *CtxI = getCtxI())
2603 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2607 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
2608 AANonNull::StateType &State) {
2609 bool IsNonNull =
false;
2610 bool TrackUse =
false;
2611 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2612 IsNonNull, TrackUse);
2613 State.setKnown(IsNonNull);
2618 const std::string getAsStr(Attributor *
A)
const override {
2619 return getAssumed() ?
"nonnull" :
"may-null";
2624struct AANonNullFloating :
public AANonNullImpl {
2625 AANonNullFloating(
const IRPosition &IRP, Attributor &
A)
2626 : AANonNullImpl(IRP,
A) {}
2630 auto CheckIRP = [&](
const IRPosition &IRP) {
2631 bool IsKnownNonNull;
2633 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2637 bool UsedAssumedInformation =
false;
2638 Value *AssociatedValue = &getAssociatedValue();
2640 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2645 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2651 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2652 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2655 return ChangeStatus::UNCHANGED;
2659 DepClassTy::OPTIONAL, IsKnown) &&
2662 DepClassTy::OPTIONAL, IsKnown))
2663 return ChangeStatus::UNCHANGED;
2670 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2671 return indicatePessimisticFixpoint();
2672 return ChangeStatus::UNCHANGED;
2675 for (
const auto &VAC : Values)
2677 return indicatePessimisticFixpoint();
2679 return ChangeStatus::UNCHANGED;
2687struct AANonNullReturned final
2688 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2689 false, AANonNull::IRAttributeKind, false> {
2690 AANonNullReturned(
const IRPosition &IRP, Attributor &
A)
2691 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2696 const std::string getAsStr(Attributor *
A)
const override {
2697 return getAssumed() ?
"nonnull" :
"may-null";
2705struct AANonNullArgument final
2706 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2707 AANonNullArgument(
const IRPosition &IRP, Attributor &
A)
2708 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl>(IRP,
A) {}
2714struct AANonNullCallSiteArgument final : AANonNullFloating {
2715 AANonNullCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2716 : AANonNullFloating(IRP,
A) {}
2723struct AANonNullCallSiteReturned final
2724 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2725 AANonNullCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2726 : AACalleeToCallSite<AANonNull, AANonNullImpl>(IRP,
A) {}
2735struct AAMustProgressImpl :
public AAMustProgress {
2736 AAMustProgressImpl(
const IRPosition &IRP, Attributor &
A)
2737 : AAMustProgress(IRP,
A) {}
2743 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2748 const std::string getAsStr(Attributor *
A)
const override {
2749 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2753struct AAMustProgressFunction final : AAMustProgressImpl {
2754 AAMustProgressFunction(
const IRPosition &IRP, Attributor &
A)
2755 : AAMustProgressImpl(IRP,
A) {}
2761 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2763 return indicateOptimisticFixpoint();
2764 return ChangeStatus::UNCHANGED;
2767 auto CheckForMustProgress = [&](AbstractCallSite ACS) {
2769 bool IsKnownMustProgress;
2771 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2775 bool AllCallSitesKnown =
true;
2776 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2779 return indicatePessimisticFixpoint();
2781 return ChangeStatus::UNCHANGED;
2785 void trackStatistics()
const override {
2791struct AAMustProgressCallSite final : AAMustProgressImpl {
2792 AAMustProgressCallSite(
const IRPosition &IRP, Attributor &
A)
2793 : AAMustProgressImpl(IRP,
A) {}
2802 bool IsKnownMustProgress;
2804 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2805 return indicatePessimisticFixpoint();
2806 return ChangeStatus::UNCHANGED;
2810 void trackStatistics()
const override {
2819struct AANoRecurseImpl :
public AANoRecurse {
2820 AANoRecurseImpl(
const IRPosition &IRP, Attributor &
A) : AANoRecurse(IRP,
A) {}
2826 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2831 const std::string getAsStr(Attributor *
A)
const override {
2832 return getAssumed() ?
"norecurse" :
"may-recurse";
2836struct AANoRecurseFunction final : AANoRecurseImpl {
2837 AANoRecurseFunction(
const IRPosition &IRP, Attributor &
A)
2838 : AANoRecurseImpl(IRP,
A) {}
2844 auto CallSitePred = [&](AbstractCallSite ACS) {
2845 bool IsKnownNoRecurse;
2849 DepClassTy::NONE, IsKnownNoRecurse))
2851 return IsKnownNoRecurse;
2853 bool UsedAssumedInformation =
false;
2854 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2855 UsedAssumedInformation)) {
2861 if (!UsedAssumedInformation)
2862 indicateOptimisticFixpoint();
2863 return ChangeStatus::UNCHANGED;
2866 const AAInterFnReachability *EdgeReachability =
2867 A.getAAFor<AAInterFnReachability>(*
this, getIRPosition(),
2868 DepClassTy::REQUIRED);
2869 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2870 return indicatePessimisticFixpoint();
2871 return ChangeStatus::UNCHANGED;
2878struct AANoRecurseCallSite final
2879 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2880 AANoRecurseCallSite(
const IRPosition &IRP, Attributor &
A)
2881 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2891struct AANonConvergentImpl :
public AANonConvergent {
2892 AANonConvergentImpl(
const IRPosition &IRP, Attributor &
A)
2893 : AANonConvergent(IRP,
A) {}
2896 const std::string getAsStr(Attributor *
A)
const override {
2897 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2901struct AANonConvergentFunction final : AANonConvergentImpl {
2902 AANonConvergentFunction(
const IRPosition &IRP, Attributor &
A)
2903 : AANonConvergentImpl(IRP,
A) {}
2909 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2912 if (!Callee ||
Callee->isIntrinsic()) {
2915 if (
Callee->isDeclaration()) {
2916 return !
Callee->hasFnAttribute(Attribute::Convergent);
2918 const auto *ConvergentAA =
A.getAAFor<AANonConvergent>(
2920 return ConvergentAA && ConvergentAA->isAssumedNotConvergent();
2923 bool UsedAssumedInformation =
false;
2924 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2925 UsedAssumedInformation)) {
2926 return indicatePessimisticFixpoint();
2928 return ChangeStatus::UNCHANGED;
2932 if (isKnownNotConvergent() &&
2933 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2934 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2935 return ChangeStatus::CHANGED;
2937 return ChangeStatus::UNCHANGED;
2947struct AAUndefinedBehaviorImpl :
public AAUndefinedBehavior {
2948 AAUndefinedBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
2949 : AAUndefinedBehavior(IRP,
A) {}
2954 const size_t UBPrevSize = KnownUBInsts.size();
2955 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2959 if (
I.isVolatile() &&
I.mayWriteToMemory())
2963 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2972 "Expected pointer operand of memory accessing instruction");
2976 std::optional<Value *> SimplifiedPtrOp =
2977 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2978 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2980 const Value *PtrOpVal = *SimplifiedPtrOp;
2986 AssumedNoUBInsts.insert(&
I);
2998 AssumedNoUBInsts.insert(&
I);
3000 KnownUBInsts.insert(&
I);
3009 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3016 if (BrInst->isUnconditional())
3021 std::optional<Value *> SimplifiedCond =
3022 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3023 if (!SimplifiedCond || !*SimplifiedCond)
3025 AssumedNoUBInsts.insert(&
I);
3033 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3042 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3048 if (idx >=
Callee->arg_size())
3060 bool IsKnownNoUndef;
3062 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3063 if (!IsKnownNoUndef)
3065 bool UsedAssumedInformation =
false;
3066 std::optional<Value *> SimplifiedVal =
3069 if (UsedAssumedInformation)
3071 if (SimplifiedVal && !*SimplifiedVal)
3074 KnownUBInsts.insert(&
I);
3080 bool IsKnownNonNull;
3082 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3084 KnownUBInsts.insert(&
I);
3093 std::optional<Value *> SimplifiedRetValue =
3094 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3095 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3113 bool IsKnownNonNull;
3118 KnownUBInsts.insert(&
I);
3124 bool UsedAssumedInformation =
false;
3125 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3126 {Instruction::Load, Instruction::Store,
3127 Instruction::AtomicCmpXchg,
3128 Instruction::AtomicRMW},
3129 UsedAssumedInformation,
3131 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3132 UsedAssumedInformation,
3134 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3135 UsedAssumedInformation);
3139 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3141 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3142 bool IsKnownNoUndef;
3144 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3146 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3147 {Instruction::Ret}, UsedAssumedInformation,
3152 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3153 UBPrevSize != KnownUBInsts.size())
3154 return ChangeStatus::CHANGED;
3155 return ChangeStatus::UNCHANGED;
3158 bool isKnownToCauseUB(Instruction *
I)
const override {
3159 return KnownUBInsts.count(
I);
3162 bool isAssumedToCauseUB(Instruction *
I)
const override {
3169 switch (
I->getOpcode()) {
3170 case Instruction::Load:
3171 case Instruction::Store:
3172 case Instruction::AtomicCmpXchg:
3173 case Instruction::AtomicRMW:
3174 return !AssumedNoUBInsts.count(
I);
3175 case Instruction::Br: {
3177 if (BrInst->isUnconditional())
3179 return !AssumedNoUBInsts.count(
I);
3188 if (KnownUBInsts.empty())
3189 return ChangeStatus::UNCHANGED;
3190 for (Instruction *
I : KnownUBInsts)
3191 A.changeToUnreachableAfterManifest(
I);
3192 return ChangeStatus::CHANGED;
3196 const std::string getAsStr(Attributor *
A)
const override {
3197 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3225 SmallPtrSet<Instruction *, 8> KnownUBInsts;
3229 SmallPtrSet<Instruction *, 8> AssumedNoUBInsts;
3240 std::optional<Value *> stopOnUndefOrAssumed(Attributor &
A,
Value *V,
3242 bool UsedAssumedInformation =
false;
3243 std::optional<Value *> SimplifiedV =
3246 if (!UsedAssumedInformation) {
3251 KnownUBInsts.insert(
I);
3252 return std::nullopt;
3259 KnownUBInsts.insert(
I);
3260 return std::nullopt;
3266struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3267 AAUndefinedBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
3268 : AAUndefinedBehaviorImpl(IRP,
A) {}
3271 void trackStatistics()
const override {
3272 STATS_DECL(UndefinedBehaviorInstruction, Instruction,
3273 "Number of instructions known to have UB");
3275 KnownUBInsts.size();
3286static bool mayContainUnboundedCycle(Function &
F, Attributor &
A) {
3287 ScalarEvolution *SE =
3288 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
F);
3289 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
F);
3295 for (scc_iterator<Function *> SCCI =
scc_begin(&
F); !SCCI.isAtEnd(); ++SCCI)
3296 if (SCCI.hasCycle())
3306 for (
auto *L : LI->getLoopsInPreorder()) {
3313struct AAWillReturnImpl :
public AAWillReturn {
3314 AAWillReturnImpl(
const IRPosition &IRP, Attributor &
A)
3315 : AAWillReturn(IRP,
A) {}
3321 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3326 bool isImpliedByMustprogressAndReadonly(Attributor &
A,
bool KnownOnly) {
3327 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3332 return IsKnown || !KnownOnly;
3338 if (isImpliedByMustprogressAndReadonly(
A,
false))
3339 return ChangeStatus::UNCHANGED;
3345 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3351 bool IsKnownNoRecurse;
3353 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3356 bool UsedAssumedInformation =
false;
3357 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3358 UsedAssumedInformation))
3359 return indicatePessimisticFixpoint();
3361 return ChangeStatus::UNCHANGED;
3365 const std::string getAsStr(Attributor *
A)
const override {
3366 return getAssumed() ?
"willreturn" :
"may-noreturn";
3370struct AAWillReturnFunction final : AAWillReturnImpl {
3371 AAWillReturnFunction(
const IRPosition &IRP, Attributor &
A)
3372 : AAWillReturnImpl(IRP,
A) {}
3376 AAWillReturnImpl::initialize(
A);
3379 assert(
F &&
"Did expect an anchor function");
3380 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3381 indicatePessimisticFixpoint();
3389struct AAWillReturnCallSite final
3390 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3391 AAWillReturnCallSite(
const IRPosition &IRP, Attributor &
A)
3392 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3396 if (isImpliedByMustprogressAndReadonly(
A,
false))
3397 return ChangeStatus::UNCHANGED;
3399 return AACalleeToCallSite::updateImpl(
A);
3421 const ToTy *
To =
nullptr;
3448 if (!ES || ES->
empty()) {
3449 ExclusionSet = nullptr;
3450 }
else if (MakeUnique) {
3451 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3476 if (!PairDMI::isEqual({LHS->From, LHS->To}, {RHS->From, RHS->To}))
3478 return InstSetDMI::isEqual(LHS->ExclusionSet, RHS->ExclusionSet);
3482#define DefineKeys(ToTy) \
3484 ReachabilityQueryInfo<ToTy> \
3485 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3486 ReachabilityQueryInfo<ToTy>( \
3487 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3488 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3490 ReachabilityQueryInfo<ToTy> \
3491 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3492 ReachabilityQueryInfo<ToTy>( \
3493 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3494 DenseMapInfo<const ToTy *>::getTombstoneKey());
3503template <
typename BaseTy,
typename ToTy>
3504struct CachedReachabilityAA :
public BaseTy {
3505 using RQITy = ReachabilityQueryInfo<ToTy>;
3507 CachedReachabilityAA(
const IRPosition &IRP, Attributor &
A) : BaseTy(IRP,
A) {}
3510 bool isQueryAA()
const override {
return true; }
3515 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3516 RQITy *RQI = QueryVector[
u];
3517 if (RQI->Result == RQITy::Reachable::No &&
3519 Changed = ChangeStatus::CHANGED;
3525 bool IsTemporaryRQI) = 0;
3527 bool rememberResult(Attributor &
A,
typename RQITy::Reachable
Result,
3528 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3533 QueryCache.erase(&RQI);
3539 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3540 RQITy PlainRQI(RQI.From, RQI.To);
3541 if (!QueryCache.count(&PlainRQI)) {
3542 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3544 QueryVector.push_back(RQIPtr);
3545 QueryCache.insert(RQIPtr);
3550 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3551 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3552 "Did not expect empty set!");
3553 RQITy *RQIPtr =
new (
A.Allocator)
3554 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3555 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3557 assert(!QueryCache.count(RQIPtr));
3558 QueryVector.push_back(RQIPtr);
3559 QueryCache.insert(RQIPtr);
3562 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3563 A.registerForUpdate(*
this);
3564 return Result == RQITy::Reachable::Yes;
3567 const std::string getAsStr(Attributor *
A)
const override {
3569 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3572 bool checkQueryCache(Attributor &
A, RQITy &StackRQI,
3573 typename RQITy::Reachable &
Result) {
3574 if (!this->getState().isValidState()) {
3575 Result = RQITy::Reachable::Yes;
3581 if (StackRQI.ExclusionSet) {
3582 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3583 auto It = QueryCache.find(&PlainRQI);
3584 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3585 Result = RQITy::Reachable::No;
3590 auto It = QueryCache.find(&StackRQI);
3591 if (It != QueryCache.end()) {
3598 QueryCache.insert(&StackRQI);
3604 DenseSet<RQITy *> QueryCache;
3607struct AAIntraFnReachabilityFunction final
3608 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3609 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3610 AAIntraFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
3612 DT =
A.getInfoCache().getAnalysisResultForFunction<DominatorTreeAnalysis>(
3616 bool isAssumedReachable(
3617 Attributor &
A,
const Instruction &From,
const Instruction &To,
3619 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3623 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
3624 typename RQITy::Reachable
Result;
3625 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3626 return NonConstThis->isReachableImpl(
A, StackRQI,
3628 return Result == RQITy::Reachable::Yes;
3635 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3638 [&](
const auto &DeadEdge) {
3639 return LivenessAA->isEdgeDead(DeadEdge.first,
3643 return LivenessAA->isAssumedDead(BB);
3645 return ChangeStatus::UNCHANGED;
3649 return Base::updateImpl(
A);
3653 bool IsTemporaryRQI)
override {
3655 bool UsedExclusionSet =
false;
3660 while (IP && IP != &To) {
3661 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3662 UsedExclusionSet =
true;
3670 const BasicBlock *FromBB = RQI.From->getParent();
3671 const BasicBlock *ToBB = RQI.To->getParent();
3673 "Not an intra-procedural query!");
3677 if (FromBB == ToBB &&
3678 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3679 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3684 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3685 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3689 SmallPtrSet<const BasicBlock *, 16> ExclusionBlocks;
3690 if (RQI.ExclusionSet)
3691 for (
auto *
I : *RQI.ExclusionSet)
3692 if (
I->getFunction() == Fn)
3693 ExclusionBlocks.
insert(
I->getParent());
3696 if (ExclusionBlocks.
count(FromBB) &&
3699 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3702 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3703 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3704 DeadBlocks.insert(ToBB);
3705 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3709 SmallPtrSet<const BasicBlock *, 16> Visited;
3713 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> LocalDeadEdges;
3714 while (!Worklist.
empty()) {
3716 if (!Visited.
insert(BB).second)
3718 for (
const BasicBlock *SuccBB :
successors(BB)) {
3719 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3720 LocalDeadEdges.
insert({BB, SuccBB});
3725 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3728 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3731 if (ExclusionBlocks.
count(SuccBB)) {
3732 UsedExclusionSet =
true;
3739 DeadEdges.insert_range(LocalDeadEdges);
3740 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3745 void trackStatistics()
const override {}
3750 DenseSet<const BasicBlock *> DeadBlocks;
3754 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> DeadEdges;
3757 const DominatorTree *DT =
nullptr;
3765 bool IgnoreSubsumingPositions) {
3766 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3767 "Unexpected attribute kind");
3773 IgnoreSubsumingPositions =
true;
3784 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3785 IgnoreSubsumingPositions, Attribute::NoAlias))
3795 "Noalias is a pointer attribute");
3798 const std::string getAsStr(
Attributor *
A)
const override {
3799 return getAssumed() ?
"noalias" :
"may-alias";
3804struct AANoAliasFloating final : AANoAliasImpl {
3805 AANoAliasFloating(
const IRPosition &IRP, Attributor &
A)
3806 : AANoAliasImpl(IRP,
A) {}
3811 return indicatePessimisticFixpoint();
3815 void trackStatistics()
const override {
3821struct AANoAliasArgument final
3822 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3823 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3824 AANoAliasArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
3837 DepClassTy::OPTIONAL, IsKnownNoSycn))
3838 return Base::updateImpl(
A);
3843 return Base::updateImpl(
A);
3847 bool UsedAssumedInformation =
false;
3848 if (
A.checkForAllCallSites(
3849 [](AbstractCallSite ACS) { return !ACS.isCallbackCall(); }, *
this,
3850 true, UsedAssumedInformation))
3851 return Base::updateImpl(
A);
3859 return indicatePessimisticFixpoint();
3866struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3867 AANoAliasCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
3868 : AANoAliasImpl(IRP,
A) {}
3872 bool mayAliasWithArgument(Attributor &
A, AAResults *&AAR,
3873 const AAMemoryBehavior &MemBehaviorAA,
3874 const CallBase &CB,
unsigned OtherArgNo) {
3876 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3884 auto *CBArgMemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
3888 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3889 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3896 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3898 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3899 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3905 AAR =
A.getInfoCache().getAnalysisResultForFunction<AAManager>(
3909 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3911 "callsite arguments: "
3912 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3913 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3918 bool isKnownNoAliasDueToNoAliasPreservation(
3919 Attributor &
A, AAResults *&AAR,
const AAMemoryBehavior &MemBehaviorAA) {
3932 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3948 bool IsKnownNoCapture;
3951 DepClassTy::OPTIONAL, IsKnownNoCapture))
3957 A, *UserI, *getCtxI(), *
this,
nullptr,
3958 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3973 LLVM_DEBUG(
dbgs() <<
"[AANoAliasCSArg] Unknown user: " << *UserI <<
"\n");
3977 bool IsKnownNoCapture;
3978 const AANoCapture *NoCaptureAA =
nullptr;
3980 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3981 if (!IsAssumedNoCapture &&
3983 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3985 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3986 <<
" cannot be noalias as it is potentially captured\n");
3991 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3997 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3998 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
4008 auto *MemBehaviorAA =
4009 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
4011 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4012 return ChangeStatus::UNCHANGED;
4015 bool IsKnownNoAlias;
4018 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
4020 <<
" is not no-alias at the definition\n");
4021 return indicatePessimisticFixpoint();
4024 AAResults *AAR =
nullptr;
4025 if (MemBehaviorAA &&
4026 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
4028 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4029 return ChangeStatus::UNCHANGED;
4032 return indicatePessimisticFixpoint();
4040struct AANoAliasReturned final : AANoAliasImpl {
4041 AANoAliasReturned(
const IRPosition &IRP, Attributor &
A)
4042 : AANoAliasImpl(IRP,
A) {}
4047 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4058 bool IsKnownNoAlias;
4060 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4063 bool IsKnownNoCapture;
4064 const AANoCapture *NoCaptureAA =
nullptr;
4066 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4068 return IsAssumedNoCapture ||
4072 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4073 return indicatePessimisticFixpoint();
4075 return ChangeStatus::UNCHANGED;
4083struct AANoAliasCallSiteReturned final
4084 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4085 AANoAliasCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4086 : AACalleeToCallSite<AANoAlias, AANoAliasImpl>(IRP,
A) {}
4096struct AAIsDeadValueImpl :
public AAIsDead {
4097 AAIsDeadValueImpl(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4100 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4103 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4106 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4109 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4112 bool isAssumedDead(
const Instruction *
I)
const override {
4113 return I == getCtxI() && isAssumedDead();
4117 bool isKnownDead(
const Instruction *
I)
const override {
4118 return isAssumedDead(
I) && isKnownDead();
4122 const std::string getAsStr(Attributor *
A)
const override {
4123 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4127 bool areAllUsesAssumedDead(Attributor &
A,
Value &V) {
4129 if (
V.getType()->isVoidTy() ||
V.use_empty())
4135 if (!
A.isRunOn(*
I->getFunction()))
4137 bool UsedAssumedInformation =
false;
4138 std::optional<Constant *>
C =
4139 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4144 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4149 return A.checkForAllUses(UsePred, *
this, V,
false,
4150 DepClassTy::REQUIRED,
4155 bool isAssumedSideEffectFree(Attributor &
A, Instruction *
I) {
4165 bool IsKnownNoUnwind;
4167 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4175struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4176 AAIsDeadFloating(
const IRPosition &IRP, Attributor &
A)
4177 : AAIsDeadValueImpl(IRP,
A) {}
4181 AAIsDeadValueImpl::initialize(
A);
4184 indicatePessimisticFixpoint();
4189 if (!isAssumedSideEffectFree(
A,
I)) {
4191 indicatePessimisticFixpoint();
4193 removeAssumedBits(HAS_NO_EFFECT);
4197 bool isDeadFence(Attributor &
A, FenceInst &FI) {
4198 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
4200 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4202 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4206 bool isDeadStore(Attributor &
A, StoreInst &SI,
4207 SmallSetVector<Instruction *, 8> *AssumeOnlyInst =
nullptr) {
4209 if (
SI.isVolatile())
4215 bool UsedAssumedInformation =
false;
4216 if (!AssumeOnlyInst) {
4217 PotentialCopies.clear();
4219 UsedAssumedInformation)) {
4222 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4226 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4227 <<
" potential copies.\n");
4229 InformationCache &InfoCache =
A.getInfoCache();
4232 UsedAssumedInformation))
4236 auto &UserI = cast<Instruction>(*U.getUser());
4237 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4239 AssumeOnlyInst->insert(&UserI);
4242 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4248 <<
" is assumed live!\n");
4254 const std::string getAsStr(Attributor *
A)
const override {
4258 return "assumed-dead-store";
4261 return "assumed-dead-fence";
4262 return AAIsDeadValueImpl::getAsStr(
A);
4269 if (!isDeadStore(
A, *SI))
4270 return indicatePessimisticFixpoint();
4272 if (!isDeadFence(
A, *FI))
4273 return indicatePessimisticFixpoint();
4275 if (!isAssumedSideEffectFree(
A,
I))
4276 return indicatePessimisticFixpoint();
4277 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4278 return indicatePessimisticFixpoint();
4283 bool isRemovableStore()
const override {
4284 return isAssumed(IS_REMOVABLE) &&
isa<StoreInst>(&getAssociatedValue());
4289 Value &
V = getAssociatedValue();
4296 SmallSetVector<Instruction *, 8> AssumeOnlyInst;
4297 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4300 A.deleteAfterManifest(*
I);
4301 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4303 for (
auto *Usr : AOI->
users())
4305 A.deleteAfterManifest(*AOI);
4311 A.deleteAfterManifest(*FI);
4315 A.deleteAfterManifest(*
I);
4323 void trackStatistics()
const override {
4329 SmallSetVector<Value *, 4> PotentialCopies;
4332struct AAIsDeadArgument :
public AAIsDeadFloating {
4333 AAIsDeadArgument(
const IRPosition &IRP, Attributor &
A)
4334 : AAIsDeadFloating(IRP,
A) {}
4338 Argument &Arg = *getAssociatedArgument();
4339 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4340 if (
A.registerFunctionSignatureRewrite(
4344 return ChangeStatus::CHANGED;
4346 return ChangeStatus::UNCHANGED;
4353struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4354 AAIsDeadCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
4355 : AAIsDeadValueImpl(IRP,
A) {}
4359 AAIsDeadValueImpl::initialize(
A);
4361 indicatePessimisticFixpoint();
4370 Argument *Arg = getAssociatedArgument();
4372 return indicatePessimisticFixpoint();
4374 auto *ArgAA =
A.getAAFor<AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4376 return indicatePessimisticFixpoint();
4385 "Expected undef values to be filtered out!");
4387 if (
A.changeUseAfterManifest(U, UV))
4388 return ChangeStatus::CHANGED;
4389 return ChangeStatus::UNCHANGED;
4396struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4397 AAIsDeadCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4398 : AAIsDeadFloating(IRP,
A) {}
4401 bool isAssumedDead()
const override {
4402 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4407 AAIsDeadFloating::initialize(
A);
4409 indicatePessimisticFixpoint();
4414 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4420 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4421 IsAssumedSideEffectFree =
false;
4422 Changed = ChangeStatus::CHANGED;
4424 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4425 return indicatePessimisticFixpoint();
4430 void trackStatistics()
const override {
4431 if (IsAssumedSideEffectFree)
4438 const std::string getAsStr(Attributor *
A)
const override {
4439 return isAssumedDead()
4441 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4445 bool IsAssumedSideEffectFree =
true;
4448struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4449 AAIsDeadReturned(
const IRPosition &IRP, Attributor &
A)
4450 : AAIsDeadValueImpl(IRP,
A) {}
4455 bool UsedAssumedInformation =
false;
4456 A.checkForAllInstructions([](Instruction &) {
return true; }, *
this,
4457 {Instruction::Ret}, UsedAssumedInformation);
4459 auto PredForCallSite = [&](AbstractCallSite ACS) {
4460 if (ACS.isCallbackCall() || !ACS.getInstruction())
4462 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4465 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4466 UsedAssumedInformation))
4467 return indicatePessimisticFixpoint();
4469 return ChangeStatus::UNCHANGED;
4475 bool AnyChange =
false;
4476 UndefValue &UV = *
UndefValue::get(getAssociatedFunction()->getReturnType());
4483 bool UsedAssumedInformation =
false;
4484 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4485 UsedAssumedInformation);
4486 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4493struct AAIsDeadFunction :
public AAIsDead {
4494 AAIsDeadFunction(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4499 assert(
F &&
"Did expect an anchor function");
4500 if (!isAssumedDeadInternalFunction(
A)) {
4501 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4502 assumeLive(
A,
F->getEntryBlock());
4506 bool isAssumedDeadInternalFunction(Attributor &
A) {
4507 if (!getAnchorScope()->hasLocalLinkage())
4509 bool UsedAssumedInformation =
false;
4510 return A.checkForAllCallSites([](AbstractCallSite) {
return false; }, *
this,
4511 true, UsedAssumedInformation);
4515 const std::string getAsStr(Attributor *
A)
const override {
4516 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4517 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4518 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4519 std::to_string(KnownDeadEnds.size()) +
"]";
4524 assert(getState().isValidState() &&
4525 "Attempted to manifest an invalid state!");
4530 if (AssumedLiveBlocks.empty()) {
4531 A.deleteAfterManifest(
F);
4532 return ChangeStatus::CHANGED;
4538 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4540 KnownDeadEnds.set_union(ToBeExploredFrom);
4541 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4545 bool IsKnownNoReturn;
4553 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4555 A.changeToUnreachableAfterManifest(
4556 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4557 HasChanged = ChangeStatus::CHANGED;
4560 STATS_DECL(AAIsDead, BasicBlock,
"Number of dead basic blocks deleted.");
4561 for (BasicBlock &BB :
F)
4562 if (!AssumedLiveBlocks.count(&BB)) {
4563 A.deleteAfterManifest(BB);
4565 HasChanged = ChangeStatus::CHANGED;
4574 bool isEdgeDead(
const BasicBlock *From,
const BasicBlock *To)
const override {
4577 "Used AAIsDead of the wrong function");
4578 return isValidState() && !AssumedLiveEdges.count(std::make_pair(From, To));
4582 void trackStatistics()
const override {}
4585 bool isAssumedDead()
const override {
return false; }
4588 bool isKnownDead()
const override {
return false; }
4591 bool isAssumedDead(
const BasicBlock *BB)
const override {
4593 "BB must be in the same anchor scope function.");
4597 return !AssumedLiveBlocks.count(BB);
4601 bool isKnownDead(
const BasicBlock *BB)
const override {
4602 return getKnown() && isAssumedDead(BB);
4606 bool isAssumedDead(
const Instruction *
I)
const override {
4607 assert(
I->getParent()->getParent() == getAnchorScope() &&
4608 "Instruction must be in the same anchor scope function.");
4615 if (!AssumedLiveBlocks.count(
I->getParent()))
4621 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4629 bool isKnownDead(
const Instruction *
I)
const override {
4630 return getKnown() && isAssumedDead(
I);
4635 bool assumeLive(Attributor &
A,
const BasicBlock &BB) {
4636 if (!AssumedLiveBlocks.insert(&BB).second)
4643 for (
const Instruction &
I : BB)
4646 if (
F->hasLocalLinkage())
4647 A.markLiveInternalFunction(*
F);
4653 SmallSetVector<const Instruction *, 8> ToBeExploredFrom;
4656 SmallSetVector<const Instruction *, 8> KnownDeadEnds;
4659 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> AssumedLiveEdges;
4662 DenseSet<const BasicBlock *> AssumedLiveBlocks;
4666identifyAliveSuccessors(Attributor &
A,
const CallBase &CB,
4667 AbstractAttribute &AA,
4668 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4671 bool IsKnownNoReturn;
4674 return !IsKnownNoReturn;
4683identifyAliveSuccessors(Attributor &
A,
const InvokeInst &
II,
4684 AbstractAttribute &AA,
4685 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4686 bool UsedAssumedInformation =
4692 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4693 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4697 bool IsKnownNoUnwind;
4700 UsedAssumedInformation |= !IsKnownNoUnwind;
4702 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4705 return UsedAssumedInformation;
4709identifyAliveSuccessors(Attributor &
A,
const BranchInst &BI,
4710 AbstractAttribute &AA,
4711 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4712 bool UsedAssumedInformation =
false;
4716 std::optional<Constant *>
C =
4717 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4727 UsedAssumedInformation =
false;
4730 return UsedAssumedInformation;
4734identifyAliveSuccessors(Attributor &
A,
const SwitchInst &SI,
4735 AbstractAttribute &AA,
4736 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4737 bool UsedAssumedInformation =
false;
4741 UsedAssumedInformation)) {
4743 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4748 if (Values.
empty() ||
4749 (Values.
size() == 1 &&
4752 return UsedAssumedInformation;
4755 Type &Ty = *
SI.getCondition()->getType();
4756 SmallPtrSet<ConstantInt *, 8>
Constants;
4757 auto CheckForConstantInt = [&](
Value *
V) {
4765 if (!
all_of(Values, [&](AA::ValueAndContext &VAC) {
4766 return CheckForConstantInt(
VAC.getValue());
4768 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4770 return UsedAssumedInformation;
4773 unsigned MatchedCases = 0;
4774 for (
const auto &CaseIt :
SI.cases()) {
4775 if (
Constants.count(CaseIt.getCaseValue())) {
4777 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4784 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4785 return UsedAssumedInformation;
4791 if (AssumedLiveBlocks.empty()) {
4792 if (isAssumedDeadInternalFunction(
A))
4796 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4797 assumeLive(
A,
F->getEntryBlock());
4801 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4802 << getAnchorScope()->
size() <<
"] BBs and "
4803 << ToBeExploredFrom.size() <<
" exploration points and "
4804 << KnownDeadEnds.size() <<
" known dead ends\n");
4809 ToBeExploredFrom.end());
4810 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4813 while (!Worklist.
empty()) {
4820 I =
I->getNextNode();
4822 AliveSuccessors.
clear();
4824 bool UsedAssumedInformation =
false;
4825 switch (
I->getOpcode()) {
4829 "Expected non-terminators to be handled already!");
4830 for (
const BasicBlock *SuccBB :
successors(
I->getParent()))
4833 case Instruction::Call:
4835 *
this, AliveSuccessors);
4837 case Instruction::Invoke:
4839 *
this, AliveSuccessors);
4841 case Instruction::Br:
4843 *
this, AliveSuccessors);
4845 case Instruction::Switch:
4847 *
this, AliveSuccessors);
4851 if (UsedAssumedInformation) {
4852 NewToBeExploredFrom.insert(
I);
4853 }
else if (AliveSuccessors.
empty() ||
4854 (
I->isTerminator() &&
4855 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4856 if (KnownDeadEnds.insert(
I))
4861 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4862 << UsedAssumedInformation <<
"\n");
4864 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4865 if (!
I->isTerminator()) {
4866 assert(AliveSuccessors.size() == 1 &&
4867 "Non-terminator expected to have a single successor!");
4871 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4872 if (AssumedLiveEdges.insert(
Edge).second)
4874 if (assumeLive(
A, *AliveSuccessor->getParent()))
4881 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4882 llvm::any_of(NewToBeExploredFrom, [&](
const Instruction *
I) {
4883 return !ToBeExploredFrom.count(I);
4886 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4895 if (ToBeExploredFrom.empty() &&
4896 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4897 llvm::all_of(KnownDeadEnds, [](
const Instruction *DeadEndI) {
4898 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4900 return indicatePessimisticFixpoint();
4905struct AAIsDeadCallSite final : AAIsDeadFunction {
4906 AAIsDeadCallSite(
const IRPosition &IRP, Attributor &
A)
4907 : AAIsDeadFunction(IRP,
A) {}
4916 "supported for call sites yet!");
4921 return indicatePessimisticFixpoint();
4925 void trackStatistics()
const override {}
4932struct AADereferenceableImpl : AADereferenceable {
4933 AADereferenceableImpl(
const IRPosition &IRP, Attributor &
A)
4934 : AADereferenceable(IRP,
A) {}
4935 using StateType = DerefState;
4939 Value &
V = *getAssociatedValue().stripPointerCasts();
4941 A.getAttrs(getIRPosition(),
4942 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4945 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4948 bool IsKnownNonNull;
4950 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4952 bool CanBeNull, CanBeFreed;
4953 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4954 A.getDataLayout(), CanBeNull, CanBeFreed));
4956 if (Instruction *CtxI = getCtxI())
4957 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4962 StateType &getState()
override {
return *
this; }
4963 const StateType &getState()
const override {
return *
this; }
4967 void addAccessedBytesForUse(Attributor &
A,
const Use *U,
const Instruction *
I,
4968 DerefState &State) {
4969 const Value *UseV =
U->get();
4974 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4979 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4980 if (
Base &&
Base == &getAssociatedValue())
4981 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4985 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
4986 AADereferenceable::StateType &State) {
4987 bool IsNonNull =
false;
4988 bool TrackUse =
false;
4989 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4990 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4991 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4992 <<
" for instruction " << *
I <<
"\n");
4994 addAccessedBytesForUse(
A, U,
I, State);
4995 State.takeKnownDerefBytesMaximum(DerefBytes);
5002 bool IsKnownNonNull;
5004 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5005 if (IsAssumedNonNull &&
5006 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
5007 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
5008 return ChangeStatus::CHANGED;
5013 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5014 SmallVectorImpl<Attribute> &Attrs)
const override {
5016 bool IsKnownNonNull;
5018 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5019 if (IsAssumedNonNull)
5020 Attrs.emplace_back(Attribute::getWithDereferenceableBytes(
5021 Ctx, getAssumedDereferenceableBytes()));
5023 Attrs.emplace_back(Attribute::getWithDereferenceableOrNullBytes(
5024 Ctx, getAssumedDereferenceableBytes()));
5028 const std::string getAsStr(Attributor *
A)
const override {
5029 if (!getAssumedDereferenceableBytes())
5030 return "unknown-dereferenceable";
5031 bool IsKnownNonNull;
5032 bool IsAssumedNonNull =
false;
5035 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5036 return std::string(
"dereferenceable") +
5037 (IsAssumedNonNull ?
"" :
"_or_null") +
5038 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5039 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5040 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5041 (!
A ?
" [non-null is unknown]" :
"");
5046struct AADereferenceableFloating : AADereferenceableImpl {
5047 AADereferenceableFloating(
const IRPosition &IRP, Attributor &
A)
5048 : AADereferenceableImpl(IRP,
A) {}
5053 bool UsedAssumedInformation =
false;
5055 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5057 Values.
push_back({getAssociatedValue(), getCtxI()});
5060 Stripped = Values.
size() != 1 ||
5061 Values.
front().getValue() != &getAssociatedValue();
5064 const DataLayout &
DL =
A.getDataLayout();
5067 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5069 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5070 APInt
Offset(IdxWidth, 0);
5075 const auto *AA =
A.getAAFor<AADereferenceable>(
5077 int64_t DerefBytes = 0;
5078 if (!AA || (!Stripped &&
this == AA)) {
5081 bool CanBeNull, CanBeFreed;
5083 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5084 T.GlobalState.indicatePessimisticFixpoint();
5087 DerefBytes =
DS.DerefBytesState.getAssumed();
5088 T.GlobalState &=
DS.GlobalState;
5094 int64_t OffsetSExt =
Offset.getSExtValue();
5098 T.takeAssumedDerefBytesMinimum(
5099 std::max(int64_t(0), DerefBytes - OffsetSExt));
5104 T.takeKnownDerefBytesMaximum(
5105 std::max(int64_t(0), DerefBytes - OffsetSExt));
5106 T.indicatePessimisticFixpoint();
5107 }
else if (OffsetSExt > 0) {
5113 T.indicatePessimisticFixpoint();
5117 return T.isValidState();
5120 for (
const auto &VAC : Values)
5121 if (!VisitValueCB(*
VAC.getValue()))
5122 return indicatePessimisticFixpoint();
5128 void trackStatistics()
const override {
5134struct AADereferenceableReturned final
5135 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5137 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5138 AADereferenceableReturned(
const IRPosition &IRP, Attributor &
A)
5142 void trackStatistics()
const override {
5148struct AADereferenceableArgument final
5149 : AAArgumentFromCallSiteArguments<AADereferenceable,
5150 AADereferenceableImpl> {
5152 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5153 AADereferenceableArgument(
const IRPosition &IRP, Attributor &
A)
5157 void trackStatistics()
const override {
5163struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5164 AADereferenceableCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5165 : AADereferenceableFloating(IRP,
A) {}
5168 void trackStatistics()
const override {
5174struct AADereferenceableCallSiteReturned final
5175 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5176 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5177 AADereferenceableCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5181 void trackStatistics()
const override {
5190static unsigned getKnownAlignForUse(Attributor &
A, AAAlign &QueryingAA,
5191 Value &AssociatedValue,
const Use *U,
5192 const Instruction *
I,
bool &TrackUse) {
5201 if (
GEP->hasAllConstantIndices())
5217 MA = MaybeAlign(AlignAA->getKnownAlign());
5220 const DataLayout &
DL =
A.getDataLayout();
5221 const Value *UseV =
U->get();
5223 if (
SI->getPointerOperand() == UseV)
5224 MA =
SI->getAlign();
5226 if (LI->getPointerOperand() == UseV)
5227 MA = LI->getAlign();
5229 if (AI->getPointerOperand() == UseV)
5230 MA = AI->getAlign();
5232 if (AI->getPointerOperand() == UseV)
5233 MA = AI->getAlign();
5239 unsigned Alignment = MA->value();
5243 if (
Base == &AssociatedValue) {
5248 uint32_t
gcd = std::gcd(uint32_t(
abs((int32_t)
Offset)), Alignment);
5256struct AAAlignImpl : AAAlign {
5257 AAAlignImpl(
const IRPosition &IRP, Attributor &
A) : AAAlign(IRP,
A) {}
5262 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5264 takeKnownMaximum(Attr.getValueAsInt());
5266 Value &
V = *getAssociatedValue().stripPointerCasts();
5267 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5269 if (Instruction *CtxI = getCtxI())
5270 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5278 Value &AssociatedValue = getAssociatedValue();
5280 return ChangeStatus::UNCHANGED;
5282 for (
const Use &U : AssociatedValue.
uses()) {
5284 if (
SI->getPointerOperand() == &AssociatedValue)
5285 if (
SI->getAlign() < getAssumedAlign()) {
5287 "Number of times alignment added to a store");
5288 SI->setAlignment(getAssumedAlign());
5289 InstrChanged = ChangeStatus::CHANGED;
5292 if (LI->getPointerOperand() == &AssociatedValue)
5293 if (LI->getAlign() < getAssumedAlign()) {
5294 LI->setAlignment(getAssumedAlign());
5296 "Number of times alignment added to a load");
5297 InstrChanged = ChangeStatus::CHANGED;
5300 if (RMW->getPointerOperand() == &AssociatedValue) {
5301 if (RMW->getAlign() < getAssumedAlign()) {
5303 "Number of times alignment added to atomicrmw");
5305 RMW->setAlignment(getAssumedAlign());
5306 InstrChanged = ChangeStatus::CHANGED;
5310 if (CAS->getPointerOperand() == &AssociatedValue) {
5311 if (CAS->getAlign() < getAssumedAlign()) {
5313 "Number of times alignment added to cmpxchg");
5314 CAS->setAlignment(getAssumedAlign());
5315 InstrChanged = ChangeStatus::CHANGED;
5323 Align InheritAlign =
5324 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5325 if (InheritAlign >= getAssumedAlign())
5326 return InstrChanged;
5327 return Changed | InstrChanged;
5335 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5336 SmallVectorImpl<Attribute> &Attrs)
const override {
5337 if (getAssumedAlign() > 1)
5339 Attribute::getWithAlignment(Ctx,
Align(getAssumedAlign())));
5343 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
5344 AAAlign::StateType &State) {
5345 bool TrackUse =
false;
5347 unsigned int KnownAlign =
5348 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5349 State.takeKnownMaximum(KnownAlign);
5355 const std::string getAsStr(Attributor *
A)
const override {
5356 return "align<" + std::to_string(getKnownAlign().value()) +
"-" +
5357 std::to_string(getAssumedAlign().value()) +
">";
5362struct AAAlignFloating : AAAlignImpl {
5363 AAAlignFloating(
const IRPosition &IRP, Attributor &
A) : AAAlignImpl(IRP,
A) {}
5367 const DataLayout &
DL =
A.getDataLayout();
5370 bool UsedAssumedInformation =
false;
5372 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5374 Values.
push_back({getAssociatedValue(), getCtxI()});
5377 Stripped = Values.
size() != 1 ||
5378 Values.
front().getValue() != &getAssociatedValue();
5382 auto VisitValueCB = [&](
Value &
V) ->
bool {
5386 DepClassTy::REQUIRED);
5387 if (!AA || (!Stripped &&
this == AA)) {
5389 unsigned Alignment = 1;
5402 Alignment =
V.getPointerAlignment(
DL).value();
5405 T.takeKnownMaximum(Alignment);
5406 T.indicatePessimisticFixpoint();
5409 const AAAlign::StateType &
DS = AA->
getState();
5412 return T.isValidState();
5415 for (
const auto &VAC : Values) {
5416 if (!VisitValueCB(*
VAC.getValue()))
5417 return indicatePessimisticFixpoint();
5430struct AAAlignReturned final
5431 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5432 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5433 AAAlignReturned(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5440struct AAAlignArgument final
5441 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5442 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5443 AAAlignArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5450 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5451 return ChangeStatus::UNCHANGED;
5452 return Base::manifest(
A);
5459struct AAAlignCallSiteArgument final : AAAlignFloating {
5460 AAAlignCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5461 : AAAlignFloating(IRP,
A) {}
5468 if (Argument *Arg = getAssociatedArgument())
5469 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5470 return ChangeStatus::UNCHANGED;
5472 Align InheritAlign =
5473 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5474 if (InheritAlign >= getAssumedAlign())
5475 Changed = ChangeStatus::UNCHANGED;
5482 if (Argument *Arg = getAssociatedArgument()) {
5485 const auto *ArgAlignAA =
A.getAAFor<AAAlign>(
5488 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5498struct AAAlignCallSiteReturned final
5499 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5500 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5501 AAAlignCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5511struct AANoReturnImpl :
public AANoReturn {
5512 AANoReturnImpl(
const IRPosition &IRP, Attributor &
A) : AANoReturn(IRP,
A) {}
5518 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5523 const std::string getAsStr(Attributor *
A)
const override {
5524 return getAssumed() ?
"noreturn" :
"may-return";
5529 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5530 bool UsedAssumedInformation =
false;
5531 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5532 {(unsigned)Instruction::Ret},
5533 UsedAssumedInformation))
5534 return indicatePessimisticFixpoint();
5535 return ChangeStatus::UNCHANGED;
5539struct AANoReturnFunction final : AANoReturnImpl {
5540 AANoReturnFunction(
const IRPosition &IRP, Attributor &
A)
5541 : AANoReturnImpl(IRP,
A) {}
5548struct AANoReturnCallSite final
5549 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5550 AANoReturnCallSite(
const IRPosition &IRP, Attributor &
A)
5551 : AACalleeToCallSite<AANoReturn, AANoReturnImpl>(IRP,
A) {}
5562struct AAInstanceInfoImpl :
public AAInstanceInfo {
5563 AAInstanceInfoImpl(
const IRPosition &IRP, Attributor &
A)
5564 : AAInstanceInfo(IRP,
A) {}
5568 Value &
V = getAssociatedValue();
5570 if (
C->isThreadDependent())
5571 indicatePessimisticFixpoint();
5573 indicateOptimisticFixpoint();
5579 indicateOptimisticFixpoint();
5584 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
5587 indicatePessimisticFixpoint();
5597 Value &
V = getAssociatedValue();
5600 Scope =
I->getFunction();
5603 if (!
Scope->hasLocalLinkage())
5607 return indicateOptimisticFixpoint();
5609 bool IsKnownNoRecurse;
5615 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5630 if (!Callee || !
Callee->hasLocalLinkage())
5634 const auto *ArgInstanceInfoAA =
A.getAAFor<AAInstanceInfo>(
5636 DepClassTy::OPTIONAL);
5637 if (!ArgInstanceInfoAA ||
5638 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5643 A, *CB, *Scope, *
this,
nullptr,
5644 [Scope](
const Function &Fn) {
return &Fn !=
Scope; }))
5651 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5653 auto *
Ptr =
SI->getPointerOperand()->stripPointerCasts();
5661 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5662 DepClassTy::OPTIONAL,
5663 true, EquivalentUseCB))
5664 return indicatePessimisticFixpoint();
5670 const std::string getAsStr(Attributor *
A)
const override {
5671 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5675 void trackStatistics()
const override {}
5679struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5680 AAInstanceInfoFloating(
const IRPosition &IRP, Attributor &
A)
5681 : AAInstanceInfoImpl(IRP,
A) {}
5685struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5686 AAInstanceInfoArgument(
const IRPosition &IRP, Attributor &
A)
5687 : AAInstanceInfoFloating(IRP,
A) {}
5691struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5692 AAInstanceInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5693 : AAInstanceInfoImpl(IRP,
A) {}
5701 Argument *Arg = getAssociatedArgument();
5703 return indicatePessimisticFixpoint();
5706 A.getAAFor<AAInstanceInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
5708 return indicatePessimisticFixpoint();
5714struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5715 AAInstanceInfoReturned(
const IRPosition &IRP, Attributor &
A)
5716 : AAInstanceInfoImpl(IRP,
A) {
5732struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5733 AAInstanceInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5734 : AAInstanceInfoFloating(IRP,
A) {}
5741 bool IgnoreSubsumingPositions) {
5742 assert(ImpliedAttributeKind == Attribute::Captures &&
5743 "Unexpected attribute kind");
5753 V.getType()->getPointerAddressSpace() == 0)) {
5758 A.getAttrs(IRP, {Attribute::Captures}, Attrs,
5768 {Attribute::Captures, Attribute::ByVal}, Attrs,
5805 bool ReadOnly =
F.onlyReadsMemory();
5806 bool NoThrow =
F.doesNotThrow();
5807 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5808 if (ReadOnly && NoThrow && IsVoidReturn) {
5821 if (NoThrow && IsVoidReturn)
5826 if (!NoThrow || ArgNo < 0 ||
5827 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5830 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5831 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5832 if (U ==
unsigned(ArgNo))
5859 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5860 SmallVectorImpl<Attribute> &Attrs)
const override {
5861 if (!isAssumedNoCaptureMaybeReturned())
5864 if (isArgumentPosition()) {
5865 if (isAssumedNoCapture())
5866 Attrs.emplace_back(Attribute::get(Ctx, Attribute::Captures));
5868 Attrs.emplace_back(Attribute::get(Ctx,
"no-capture-maybe-returned"));
5873 const std::string getAsStr(Attributor *
A)
const override {
5874 if (isKnownNoCapture())
5875 return "known not-captured";
5876 if (isAssumedNoCapture())
5877 return "assumed not-captured";
5878 if (isKnownNoCaptureMaybeReturned())
5879 return "known not-captured-maybe-returned";
5880 if (isAssumedNoCaptureMaybeReturned())
5881 return "assumed not-captured-maybe-returned";
5882 return "assumed-captured";
5887 bool checkUse(Attributor &
A, AANoCapture::StateType &State,
const Use &U,
5890 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5896 return isCapturedIn(State,
true,
true,
5903 return isCapturedIn(State,
true,
true,
5909 return isCapturedIn(State,
false,
false,
5911 return isCapturedIn(State,
true,
true,
5919 return isCapturedIn(State,
true,
true,
5926 bool IsKnownNoCapture;
5927 const AANoCapture *ArgNoCaptureAA =
nullptr;
5929 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
5931 if (IsAssumedNoCapture)
5932 return isCapturedIn(State,
false,
false,
5936 return isCapturedIn(State,
false,
false,
5941 return isCapturedIn(State,
true,
true,
5948 static bool isCapturedIn(AANoCapture::StateType &State,
bool CapturedInMem,
5949 bool CapturedInInt,
bool CapturedInRet) {
5950 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
5951 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
5963 const IRPosition &IRP = getIRPosition();
5967 return indicatePessimisticFixpoint();
5974 return indicatePessimisticFixpoint();
5982 T.addKnownBits(NOT_CAPTURED_IN_MEM);
5984 addKnownBits(NOT_CAPTURED_IN_MEM);
5991 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
5995 UsedAssumedInformation))
5997 bool SeenConstant =
false;
5998 for (
const AA::ValueAndContext &VAC : Values) {
6002 SeenConstant =
true;
6004 VAC.getValue() == getAssociatedArgument())
6010 bool IsKnownNoUnwind;
6013 bool IsVoidTy =
F->getReturnType()->isVoidTy();
6014 bool UsedAssumedInformation =
false;
6015 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
6016 T.addKnownBits(NOT_CAPTURED_IN_RET);
6017 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
6019 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
6020 addKnownBits(NOT_CAPTURED_IN_RET);
6021 if (isKnown(NOT_CAPTURED_IN_MEM))
6022 return indicateOptimisticFixpoint();
6027 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
6036 return checkUse(
A,
T, U, Follow);
6039 if (!
A.checkForAllUses(UseCheck, *
this, *V))
6040 return indicatePessimisticFixpoint();
6043 auto Assumed = S.getAssumed();
6044 S.intersectAssumedBits(
T.getAssumed());
6045 if (!isAssumedNoCaptureMaybeReturned())
6046 return indicatePessimisticFixpoint();
6052struct AANoCaptureArgument final : AANoCaptureImpl {
6053 AANoCaptureArgument(
const IRPosition &IRP, Attributor &
A)
6054 : AANoCaptureImpl(IRP,
A) {}
6061struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
6062 AANoCaptureCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6063 : AANoCaptureImpl(IRP,
A) {}
6071 Argument *Arg = getAssociatedArgument();
6073 return indicatePessimisticFixpoint();
6075 bool IsKnownNoCapture;
6076 const AANoCapture *ArgAA =
nullptr;
6078 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6080 return ChangeStatus::UNCHANGED;
6082 return indicatePessimisticFixpoint();
6087 void trackStatistics()
const override {
6093struct AANoCaptureFloating final : AANoCaptureImpl {
6094 AANoCaptureFloating(
const IRPosition &IRP, Attributor &
A)
6095 : AANoCaptureImpl(IRP,
A) {}
6098 void trackStatistics()
const override {
6104struct AANoCaptureReturned final : AANoCaptureImpl {
6105 AANoCaptureReturned(
const IRPosition &IRP, Attributor &
A)
6106 : AANoCaptureImpl(IRP,
A) {
6121 void trackStatistics()
const override {}
6125struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6126 AANoCaptureCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6127 : AANoCaptureImpl(IRP,
A) {}
6133 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6137 void trackStatistics()
const override {
6154 dbgs() <<
"[ValueSimplify] is assumed to be "
6157 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6169 if (getAssociatedValue().
getType()->isVoidTy())
6170 indicatePessimisticFixpoint();
6171 if (
A.hasSimplificationCallback(getIRPosition()))
6172 indicatePessimisticFixpoint();
6176 const std::string getAsStr(Attributor *
A)
const override {
6178 dbgs() <<
"SAV: " << (bool)SimplifiedAssociatedValue <<
" ";
6179 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6180 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6182 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6187 void trackStatistics()
const override {}
6190 std::optional<Value *>
6191 getAssumedSimplifiedValue(Attributor &
A)
const override {
6192 return SimplifiedAssociatedValue;
6199 static Value *ensureType(Attributor &
A,
Value &V,
Type &Ty, Instruction *CtxI,
6203 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6205 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6214 static Value *reproduceInst(Attributor &
A,
6215 const AbstractAttribute &QueryingAA,
6216 Instruction &
I,
Type &Ty, Instruction *CtxI,
6218 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6219 if (
Check && (
I.mayReadFromMemory() ||
6224 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6226 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6248 static Value *reproduceValue(Attributor &
A,
6249 const AbstractAttribute &QueryingAA,
Value &V,
6250 Type &Ty, Instruction *CtxI,
bool Check,
6252 if (
const auto &NewV = VMap.
lookup(&V))
6254 bool UsedAssumedInformation =
false;
6255 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6257 if (!SimpleV.has_value())
6261 EffectiveV = *SimpleV;
6266 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6268 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6269 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6275 Value *manifestReplacementValue(Attributor &
A, Instruction *CtxI)
const {
6276 Value *NewV = SimplifiedAssociatedValue
6277 ? *SimplifiedAssociatedValue
6279 if (NewV && NewV != &getAssociatedValue()) {
6283 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6285 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6293 bool checkAndUpdate(Attributor &
A,
const AbstractAttribute &QueryingAA,
6294 const IRPosition &IRP,
bool Simplify =
true) {
6295 bool UsedAssumedInformation =
false;
6298 QueryingValueSimplified =
A.getAssumedSimplified(
6300 return unionAssumed(QueryingValueSimplified);
6304 template <
typename AAType>
bool askSimplifiedValueFor(Attributor &
A) {
6305 if (!getAssociatedValue().
getType()->isIntegerTy())
6310 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6314 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6317 SimplifiedAssociatedValue = std::nullopt;
6318 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6321 if (
auto *
C = *COpt) {
6322 SimplifiedAssociatedValue =
C;
6323 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6329 bool askSimplifiedValueForOtherAAs(Attributor &
A) {
6330 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6332 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6340 for (
auto &U : getAssociatedValue().uses()) {
6345 IP =
PHI->getIncomingBlock(U)->getTerminator();
6346 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6348 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6349 if (
A.changeUseAfterManifest(U, *NewV))
6350 Changed = ChangeStatus::CHANGED;
6354 return Changed | AAValueSimplify::manifest(
A);
6359 SimplifiedAssociatedValue = &getAssociatedValue();
6360 return AAValueSimplify::indicatePessimisticFixpoint();
6364struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6365 AAValueSimplifyArgument(
const IRPosition &IRP, Attributor &
A)
6366 : AAValueSimplifyImpl(IRP,
A) {}
6369 AAValueSimplifyImpl::initialize(
A);
6370 if (
A.hasAttr(getIRPosition(),
6371 {Attribute::InAlloca, Attribute::Preallocated,
6372 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6374 indicatePessimisticFixpoint();
6381 Argument *Arg = getAssociatedArgument();
6387 return indicatePessimisticFixpoint();
6390 auto Before = SimplifiedAssociatedValue;
6392 auto PredForCallSite = [&](AbstractCallSite ACS) {
6393 const IRPosition &ACSArgPos =
6404 bool UsedAssumedInformation =
false;
6405 std::optional<Constant *> SimpleArgOp =
6406 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6413 return unionAssumed(*SimpleArgOp);
6418 bool UsedAssumedInformation =
false;
6419 if (hasCallBaseContext() &&
6420 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6422 AbstractCallSite(&getCallBaseContext()->getCalledOperandUse()));
6424 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6425 UsedAssumedInformation);
6428 if (!askSimplifiedValueForOtherAAs(
A))
6429 return indicatePessimisticFixpoint();
6432 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6433 : ChangeStatus ::CHANGED;
6437 void trackStatistics()
const override {
6442struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6443 AAValueSimplifyReturned(
const IRPosition &IRP, Attributor &
A)
6444 : AAValueSimplifyImpl(IRP,
A) {}
6447 std::optional<Value *>
6448 getAssumedSimplifiedValue(Attributor &
A)
const override {
6449 if (!isValidState())
6451 return SimplifiedAssociatedValue;
6456 auto Before = SimplifiedAssociatedValue;
6460 return checkAndUpdate(
6465 bool UsedAssumedInformation =
false;
6466 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6467 UsedAssumedInformation))
6468 if (!askSimplifiedValueForOtherAAs(
A))
6469 return indicatePessimisticFixpoint();
6472 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6473 : ChangeStatus ::CHANGED;
6479 return ChangeStatus::UNCHANGED;
6483 void trackStatistics()
const override {
6488struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6489 AAValueSimplifyFloating(
const IRPosition &IRP, Attributor &
A)
6490 : AAValueSimplifyImpl(IRP,
A) {}
6494 AAValueSimplifyImpl::initialize(
A);
6495 Value &
V = getAnchorValue();
6499 indicatePessimisticFixpoint();
6504 auto Before = SimplifiedAssociatedValue;
6505 if (!askSimplifiedValueForOtherAAs(
A))
6506 return indicatePessimisticFixpoint();
6509 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6510 : ChangeStatus ::CHANGED;
6514 void trackStatistics()
const override {
6519struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6520 AAValueSimplifyFunction(
const IRPosition &IRP, Attributor &
A)
6521 : AAValueSimplifyImpl(IRP,
A) {}
6525 SimplifiedAssociatedValue =
nullptr;
6526 indicateOptimisticFixpoint();
6531 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6534 void trackStatistics()
const override {
6539struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6540 AAValueSimplifyCallSite(
const IRPosition &IRP, Attributor &
A)
6541 : AAValueSimplifyFunction(IRP,
A) {}
6543 void trackStatistics()
const override {
6548struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6549 AAValueSimplifyCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6550 : AAValueSimplifyImpl(IRP,
A) {}
6553 AAValueSimplifyImpl::initialize(
A);
6554 Function *Fn = getAssociatedFunction();
6555 assert(Fn &&
"Did expect an associted function");
6556 for (Argument &Arg : Fn->
args()) {
6561 checkAndUpdate(
A, *
this, IRP))
6562 indicateOptimisticFixpoint();
6564 indicatePessimisticFixpoint();
6572 return indicatePessimisticFixpoint();
6575 void trackStatistics()
const override {
6580struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6581 AAValueSimplifyCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6582 : AAValueSimplifyFloating(IRP,
A) {}
6588 auto *FloatAA =
A.lookupAAFor<AAValueSimplify>(
6590 if (FloatAA && FloatAA->getState().isValidState())
6593 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6595 ->getArgOperandUse(getCallSiteArgNo());
6596 if (
A.changeUseAfterManifest(U, *NewV))
6597 Changed = ChangeStatus::CHANGED;
6600 return Changed | AAValueSimplify::manifest(
A);
6603 void trackStatistics()
const override {
6611struct AAHeapToStackFunction final :
public AAHeapToStack {
6613 struct AllocationInfo {
6625 } Status = STACK_DUE_TO_USE;
6629 bool HasPotentiallyFreeingUnknownUses =
false;
6633 bool MoveAllocaIntoEntry =
true;
6636 SmallSetVector<CallBase *, 1> PotentialFreeCalls{};
6639 struct DeallocationInfo {
6647 bool MightFreeUnknownObjects =
false;
6650 SmallSetVector<CallBase *, 1> PotentialAllocationCalls{};
6653 AAHeapToStackFunction(
const IRPosition &IRP, Attributor &
A)
6654 : AAHeapToStack(IRP,
A) {}
6656 ~AAHeapToStackFunction() {
6659 for (
auto &It : AllocationInfos)
6660 It.second->~AllocationInfo();
6661 for (
auto &It : DeallocationInfos)
6662 It.second->~DeallocationInfo();
6666 AAHeapToStack::initialize(
A);
6669 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6676 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6683 auto *I8Ty = Type::getInt8Ty(CB->
getParent()->getContext());
6685 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6686 AllocationInfos[CB] = AI;
6688 TLI->getLibFunc(*CB, AI->LibraryFunctionId);
6694 bool UsedAssumedInformation =
false;
6695 bool Success =
A.checkForAllCallLikeInstructions(
6696 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6700 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6703 [](
const IRPosition &,
const AbstractAttribute *,
6704 bool &) -> std::optional<Value *> {
return nullptr; };
6705 for (
const auto &It : AllocationInfos)
6708 for (
const auto &It : DeallocationInfos)
6713 const std::string getAsStr(Attributor *
A)
const override {
6714 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6715 for (
const auto &It : AllocationInfos) {
6716 if (It.second->Status == AllocationInfo::INVALID)
6717 ++NumInvalidMallocs;
6721 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6722 std::to_string(NumInvalidMallocs);
6726 void trackStatistics()
const override {
6728 MallocCalls, Function,
6729 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6730 for (
const auto &It : AllocationInfos)
6731 if (It.second->Status != AllocationInfo::INVALID)
6735 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6737 if (AllocationInfo *AI =
6738 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6739 return AI->Status != AllocationInfo::INVALID;
6743 bool isAssumedHeapToStackRemovedFree(CallBase &CB)
const override {
6744 if (!isValidState())
6747 for (
const auto &It : AllocationInfos) {
6748 AllocationInfo &AI = *It.second;
6749 if (AI.Status == AllocationInfo::INVALID)
6752 if (AI.PotentialFreeCalls.count(&CB))
6760 assert(getState().isValidState() &&
6761 "Attempted to manifest an invalid state!");
6765 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6767 for (
auto &It : AllocationInfos) {
6768 AllocationInfo &AI = *It.second;
6769 if (AI.Status == AllocationInfo::INVALID)
6772 for (CallBase *FreeCall : AI.PotentialFreeCalls) {
6773 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6774 A.deleteAfterManifest(*FreeCall);
6775 HasChanged = ChangeStatus::CHANGED;
6778 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6781 auto Remark = [&](OptimizationRemark
OR) {
6783 if (TLI->getLibFunc(*AI.CB, IsAllocShared))
6784 if (IsAllocShared == LibFunc___kmpc_alloc_shared)
6785 return OR <<
"Moving globalized variable to the stack.";
6786 return OR <<
"Moving memory allocation from the heap to the stack.";
6788 if (AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
6789 A.emitRemark<OptimizationRemark>(AI.CB,
"OMP110",
Remark);
6791 A.emitRemark<OptimizationRemark>(AI.CB,
"HeapToStack",
Remark);
6793 const DataLayout &
DL =
A.getInfoCache().getDL();
6795 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6797 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6799 LLVMContext &Ctx = AI.CB->getContext();
6800 ObjectSizeOpts Opts;
6801 ObjectSizeOffsetEvaluator Eval(
DL, TLI, Ctx, Opts);
6802 SizeOffsetValue SizeOffsetPair = Eval.compute(AI.CB);
6809 ?
F->getEntryBlock().begin()
6810 : AI.CB->getIterator();
6813 if (MaybeAlign RetAlign = AI.CB->getRetAlign())
6814 Alignment = std::max(Alignment, *RetAlign);
6816 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *Align);
6817 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6818 "Expected an alignment during manifest!");
6820 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6824 unsigned AS =
DL.getAllocaAddrSpace();
6826 new AllocaInst(Type::getInt8Ty(
F->getContext()), AS,
Size, Alignment,
6827 AI.CB->getName() +
".h2s", IP);
6829 if (Alloca->
getType() != AI.CB->getType())
6830 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6831 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6833 auto *I8Ty = Type::getInt8Ty(
F->getContext());
6836 "Must be able to materialize initial memory state of allocation");
6841 auto *NBB =
II->getNormalDest();
6843 A.deleteAfterManifest(*AI.CB);
6845 A.deleteAfterManifest(*AI.CB);
6854 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6856 HasChanged = ChangeStatus::CHANGED;
6862 std::optional<APInt> getAPInt(Attributor &
A,
const AbstractAttribute &AA,
6864 bool UsedAssumedInformation =
false;
6865 std::optional<Constant *> SimpleV =
6866 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6868 return APInt(64, 0);
6870 return CI->getValue();
6871 return std::nullopt;
6874 std::optional<APInt>
getSize(Attributor &
A,
const AbstractAttribute &AA,
6875 AllocationInfo &AI) {
6876 auto Mapper = [&](
const Value *
V) ->
const Value * {
6877 bool UsedAssumedInformation =
false;
6878 if (std::optional<Constant *> SimpleV =
6879 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6886 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6892 MapVector<CallBase *, AllocationInfo *> AllocationInfos;
6896 MapVector<CallBase *, DeallocationInfo *> DeallocationInfos;
6901ChangeStatus AAHeapToStackFunction::updateImpl(Attributor &
A) {
6904 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6906 const auto *LivenessAA =
6909 MustBeExecutedContextExplorer *Explorer =
6910 A.getInfoCache().getMustBeExecutedContextExplorer();
6912 bool StackIsAccessibleByOtherThreads =
6913 A.getInfoCache().stackIsAccessibleByOtherThreads();
6916 A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(*F);
6917 std::optional<bool> MayContainIrreducibleControl;
6919 if (&
F->getEntryBlock() == &BB)
6921 if (!MayContainIrreducibleControl.has_value())
6923 if (*MayContainIrreducibleControl)
6932 bool HasUpdatedFrees =
false;
6934 auto UpdateFrees = [&]() {
6935 HasUpdatedFrees =
true;
6937 for (
auto &It : DeallocationInfos) {
6938 DeallocationInfo &DI = *It.second;
6941 if (DI.MightFreeUnknownObjects)
6945 bool UsedAssumedInformation =
false;
6946 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
6953 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
6954 DI.MightFreeUnknownObjects =
true;
6967 DI.MightFreeUnknownObjects =
true;
6971 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
6973 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
6975 DI.MightFreeUnknownObjects =
true;
6979 DI.PotentialAllocationCalls.insert(ObjCB);
6983 auto FreeCheck = [&](AllocationInfo &AI) {
6987 if (!StackIsAccessibleByOtherThreads) {
6992 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
6993 "other threads and function is not nosync:\n");
6997 if (!HasUpdatedFrees)
7001 if (AI.PotentialFreeCalls.size() != 1) {
7003 << AI.PotentialFreeCalls.size() <<
"\n");
7006 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
7007 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
7010 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
7011 << *UniqueFree <<
"\n");
7014 if (DI->MightFreeUnknownObjects) {
7016 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
7019 if (DI->PotentialAllocationCalls.empty())
7021 if (DI->PotentialAllocationCalls.size() > 1) {
7023 << DI->PotentialAllocationCalls.size()
7024 <<
" different allocations\n");
7027 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
7030 <<
"[H2S] unique free call not known to free this allocation but "
7031 << **DI->PotentialAllocationCalls.begin() <<
"\n");
7036 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared) {
7038 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
7039 LLVM_DEBUG(
dbgs() <<
"[H2S] unique free call might not be executed "
7040 "with the allocation "
7041 << *UniqueFree <<
"\n");
7048 auto UsesCheck = [&](AllocationInfo &AI) {
7049 bool ValidUsesOnly =
true;
7051 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
7056 if (
SI->getValueOperand() ==
U.get()) {
7058 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
7059 ValidUsesOnly =
false;
7068 if (DeallocationInfos.count(CB)) {
7069 AI.PotentialFreeCalls.insert(CB);
7076 bool IsKnownNoCapture;
7085 if (!IsAssumedNoCapture ||
7086 (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7087 !IsAssumedNoFree)) {
7088 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7091 auto Remark = [&](OptimizationRemarkMissed ORM) {
7093 <<
"Could not move globalized variable to the stack. "
7094 "Variable is potentially captured in call. Mark "
7095 "parameter as `__attribute__((noescape))` to override.";
7098 if (ValidUsesOnly &&
7099 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
7100 A.emitRemark<OptimizationRemarkMissed>(CB,
"OMP113",
Remark);
7103 ValidUsesOnly =
false;
7116 ValidUsesOnly =
false;
7119 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7121 [&](
const Use &OldU,
const Use &NewU) {
7122 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7123 return !SI || StackIsAccessibleByOtherThreads ||
7124 AA::isAssumedThreadLocalObject(
7125 A, *SI->getPointerOperand(), *this);
7128 return ValidUsesOnly;
7133 for (
auto &It : AllocationInfos) {
7134 AllocationInfo &AI = *It.second;
7135 if (AI.Status == AllocationInfo::INVALID)
7139 std::optional<APInt> APAlign = getAPInt(
A, *
this, *Align);
7143 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7145 AI.Status = AllocationInfo::INVALID;
7150 !APAlign->isPowerOf2()) {
7151 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7153 AI.Status = AllocationInfo::INVALID;
7160 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7165 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7167 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7171 AI.Status = AllocationInfo::INVALID;
7177 switch (AI.Status) {
7178 case AllocationInfo::STACK_DUE_TO_USE:
7181 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7183 case AllocationInfo::STACK_DUE_TO_FREE:
7186 AI.Status = AllocationInfo::INVALID;
7189 case AllocationInfo::INVALID:
7196 bool IsGlobalizedLocal =
7197 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared;
7198 if (AI.MoveAllocaIntoEntry &&
7199 (!
Size.has_value() ||
7200 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7201 AI.MoveAllocaIntoEntry =
false;
7210struct AAPrivatizablePtrImpl :
public AAPrivatizablePtr {
7211 AAPrivatizablePtrImpl(
const IRPosition &IRP, Attributor &
A)
7212 : AAPrivatizablePtr(IRP,
A), PrivatizableType(std::nullopt) {}
7215 AAPrivatizablePtr::indicatePessimisticFixpoint();
7216 PrivatizableType =
nullptr;
7217 return ChangeStatus::CHANGED;
7223 virtual std::optional<Type *> identifyPrivatizableType(Attributor &
A) = 0;
7227 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7228 std::optional<Type *>
T1) {
7238 std::optional<Type *> getPrivatizableType()
const override {
7239 return PrivatizableType;
7242 const std::string getAsStr(Attributor *
A)
const override {
7243 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7247 std::optional<Type *> PrivatizableType;
7252struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7253 AAPrivatizablePtrArgument(
const IRPosition &IRP, Attributor &
A)
7254 : AAPrivatizablePtrImpl(IRP,
A) {}
7257 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7260 bool UsedAssumedInformation =
false;
7262 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7264 if (!
Attrs.empty() &&
7265 A.checkForAllCallSites([](AbstractCallSite ACS) { return true; }, *
this,
7266 true, UsedAssumedInformation))
7267 return Attrs[0].getValueAsType();
7269 std::optional<Type *> Ty;
7270 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7278 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7287 A.getAAFor<AAPrivatizablePtr>(*
this, ACSArgPos, DepClassTy::REQUIRED);
7290 std::optional<Type *> CSTy = PrivCSArgAA->getPrivatizableType();
7293 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7297 dbgs() <<
"<nullptr>";
7302 Ty = combineTypes(Ty, CSTy);
7305 dbgs() <<
" : New Type: ";
7307 (*Ty)->print(
dbgs());
7309 dbgs() <<
"<nullptr>";
7318 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7319 UsedAssumedInformation))
7326 PrivatizableType = identifyPrivatizableType(
A);
7327 if (!PrivatizableType)
7328 return ChangeStatus::UNCHANGED;
7329 if (!*PrivatizableType)
7330 return indicatePessimisticFixpoint();
7335 DepClassTy::OPTIONAL);
7338 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7341 return indicatePessimisticFixpoint();
7347 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7351 Function &Fn = *getIRPosition().getAnchorScope();
7353 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(Fn);
7355 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7357 return indicatePessimisticFixpoint();
7360 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7367 bool UsedAssumedInformation =
false;
7368 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7369 UsedAssumedInformation)) {
7371 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7373 return indicatePessimisticFixpoint();
7377 Argument *Arg = getAssociatedArgument();
7378 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7380 return indicatePessimisticFixpoint();
7387 auto IsCompatiblePrivArgOfCallback = [&](CallBase &CB) {
7390 for (
const Use *U : CallbackUses) {
7391 AbstractCallSite CBACS(U);
7392 assert(CBACS && CBACS.isCallbackCall());
7393 for (Argument &CBArg : CBACS.getCalledFunction()->args()) {
7394 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7398 <<
"[AAPrivatizablePtr] Argument " << *Arg
7399 <<
"check if can be privatized in the context of its parent ("
7401 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7403 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7404 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7405 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7407 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7408 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7411 if (CBArgNo !=
int(ArgNo))
7413 const auto *CBArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7415 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7416 auto CBArgPrivTy = CBArgPrivAA->getPrivatizableType();
7419 if (*CBArgPrivTy == PrivatizableType)
7424 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7425 <<
" cannot be privatized in the context of its parent ("
7427 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7429 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7430 <<
").\n[AAPrivatizablePtr] for which the argument "
7431 "privatization is not compatible.\n";
7441 auto IsCompatiblePrivArgOfDirectCS = [&](AbstractCallSite ACS) {
7445 "Expected a direct call operand for callback call operand");
7450 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7451 <<
" check if be privatized in the context of its parent ("
7453 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7455 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7458 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7459 const auto *DCArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7461 DepClassTy::REQUIRED);
7462 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7463 auto DCArgPrivTy = DCArgPrivAA->getPrivatizableType();
7466 if (*DCArgPrivTy == PrivatizableType)
7472 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7473 <<
" cannot be privatized in the context of its parent ("
7475 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7478 <<
").\n[AAPrivatizablePtr] for which the argument "
7479 "privatization is not compatible.\n";
7487 auto IsCompatiblePrivArgOfOtherCallSite = [&](AbstractCallSite ACS) {
7491 return IsCompatiblePrivArgOfDirectCS(ACS);
7495 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7496 UsedAssumedInformation))
7497 return indicatePessimisticFixpoint();
7499 return ChangeStatus::UNCHANGED;
7505 identifyReplacementTypes(
Type *PrivType,
7506 SmallVectorImpl<Type *> &ReplacementTypes) {
7509 assert(PrivType &&
"Expected privatizable type!");
7513 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7514 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7516 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7517 PrivArrayType->getElementType());
7526 static void createInitialization(
Type *PrivType,
Value &
Base, Function &
F,
7528 assert(PrivType &&
"Expected privatizable type!");
7531 const DataLayout &
DL =
F.getDataLayout();
7535 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7536 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7539 new StoreInst(
F.getArg(ArgNo + u),
Ptr, IP);
7542 Type *PointeeTy = PrivArrayType->getElementType();
7543 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7544 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7546 new StoreInst(
F.getArg(ArgNo + u),
Ptr, IP);
7549 new StoreInst(
F.getArg(ArgNo), &
Base, IP);
7555 void createReplacementValues(Align Alignment,
Type *PrivType,
7557 SmallVectorImpl<Value *> &ReplacementValues) {
7559 assert(PrivType &&
"Expected privatizable type!");
7567 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7568 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7569 Type *PointeeTy = PrivStructType->getElementType(u);
7573 L->setAlignment(Alignment);
7577 Type *PointeeTy = PrivArrayType->getElementType();
7578 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7579 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7582 L->setAlignment(Alignment);
7587 L->setAlignment(Alignment);
7594 if (!PrivatizableType)
7595 return ChangeStatus::UNCHANGED;
7596 assert(*PrivatizableType &&
"Expected privatizable type!");
7602 bool UsedAssumedInformation =
false;
7603 if (!
A.checkForAllInstructions(
7604 [&](Instruction &
I) {
7605 CallInst &CI = cast<CallInst>(I);
7606 if (CI.isTailCall())
7607 TailCalls.push_back(&CI);
7610 *
this, {Instruction::Call}, UsedAssumedInformation))
7611 return ChangeStatus::UNCHANGED;
7613 Argument *Arg = getAssociatedArgument();
7616 const auto *AlignAA =
7623 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7625 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7627 const DataLayout &
DL = IP->getDataLayout();
7628 unsigned AS =
DL.getAllocaAddrSpace();
7629 Instruction *AI =
new AllocaInst(*PrivatizableType, AS,
7630 Arg->
getName() +
".priv", IP);
7631 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7632 ArgIt->getArgNo(), IP);
7635 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7639 for (CallInst *CI : TailCalls)
7640 CI->setTailCall(
false);
7647 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7648 AbstractCallSite ACS, SmallVectorImpl<Value *> &NewArgOperands) {
7651 createReplacementValues(
7652 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7653 *PrivatizableType, ACS,
7661 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7664 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7665 std::move(FnRepairCB),
7666 std::move(ACSRepairCB)))
7667 return ChangeStatus::CHANGED;
7668 return ChangeStatus::UNCHANGED;
7672 void trackStatistics()
const override {
7677struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7678 AAPrivatizablePtrFloating(
const IRPosition &IRP, Attributor &
A)
7679 : AAPrivatizablePtrImpl(IRP,
A) {}
7684 indicatePessimisticFixpoint();
7689 "updateImpl will not be called");
7693 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7696 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7703 return AI->getAllocatedType();
7705 auto *PrivArgAA =
A.getAAFor<AAPrivatizablePtr>(
7707 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7708 return PrivArgAA->getPrivatizableType();
7711 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7712 "alloca nor privatizable argument: "
7718 void trackStatistics()
const override {
7723struct AAPrivatizablePtrCallSiteArgument final
7724 :
public AAPrivatizablePtrFloating {
7725 AAPrivatizablePtrCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
7726 : AAPrivatizablePtrFloating(IRP,
A) {}
7730 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7731 indicateOptimisticFixpoint();
7736 PrivatizableType = identifyPrivatizableType(
A);
7737 if (!PrivatizableType)
7738 return ChangeStatus::UNCHANGED;
7739 if (!*PrivatizableType)
7740 return indicatePessimisticFixpoint();
7742 const IRPosition &IRP = getIRPosition();
7743 bool IsKnownNoCapture;
7745 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7746 if (!IsAssumedNoCapture) {
7747 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7748 return indicatePessimisticFixpoint();
7751 bool IsKnownNoAlias;
7753 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7754 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7755 return indicatePessimisticFixpoint();
7760 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7761 return indicatePessimisticFixpoint();
7764 return ChangeStatus::UNCHANGED;
7768 void trackStatistics()
const override {
7773struct AAPrivatizablePtrCallSiteReturned final
7774 :
public AAPrivatizablePtrFloating {
7775 AAPrivatizablePtrCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
7776 : AAPrivatizablePtrFloating(IRP,
A) {}
7781 indicatePessimisticFixpoint();
7785 void trackStatistics()
const override {
7790struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7791 AAPrivatizablePtrReturned(
const IRPosition &IRP, Attributor &
A)
7792 : AAPrivatizablePtrFloating(IRP,
A) {}
7797 indicatePessimisticFixpoint();
7801 void trackStatistics()
const override {
7811struct AAMemoryBehaviorImpl :
public AAMemoryBehavior {
7812 AAMemoryBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
7813 : AAMemoryBehavior(IRP,
A) {}
7817 intersectAssumedBits(BEST_STATE);
7818 getKnownStateFromValue(
A, getIRPosition(), getState());
7819 AAMemoryBehavior::initialize(
A);
7823 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
7824 BitIntegerState &State,
7825 bool IgnoreSubsumingPositions =
false) {
7827 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7829 switch (Attr.getKindAsEnum()) {
7830 case Attribute::ReadNone:
7833 case Attribute::ReadOnly:
7836 case Attribute::WriteOnly:
7845 if (!
I->mayReadFromMemory())
7847 if (!
I->mayWriteToMemory())
7853 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
7854 SmallVectorImpl<Attribute> &Attrs)
const override {
7857 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadNone));
7859 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadOnly));
7860 else if (isAssumedWriteOnly())
7861 Attrs.push_back(Attribute::get(Ctx, Attribute::WriteOnly));
7867 const IRPosition &IRP = getIRPosition();
7869 if (
A.hasAttr(IRP, Attribute::ReadNone,
7871 return ChangeStatus::UNCHANGED;
7880 return ChangeStatus::UNCHANGED;
7883 A.removeAttrs(IRP, AttrKinds);
7886 A.removeAttrs(IRP, Attribute::Writable);
7893 const std::string getAsStr(Attributor *
A)
const override {
7898 if (isAssumedWriteOnly())
7900 return "may-read/write";
7904 static const Attribute::AttrKind AttrKinds[3];
7908 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7911struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7912 AAMemoryBehaviorFloating(
const IRPosition &IRP, Attributor &
A)
7913 : AAMemoryBehaviorImpl(IRP,
A) {}
7919 void trackStatistics()
const override {
7924 else if (isAssumedWriteOnly())
7931 bool followUsersOfUseIn(Attributor &
A,
const Use &U,
7932 const Instruction *UserI);
7935 void analyzeUseIn(Attributor &
A,
const Use &U,
const Instruction *UserI);
7939struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
7940 AAMemoryBehaviorArgument(
const IRPosition &IRP, Attributor &
A)
7941 : AAMemoryBehaviorFloating(IRP,
A) {}
7945 intersectAssumedBits(BEST_STATE);
7946 const IRPosition &IRP = getIRPosition();
7950 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
7952 getKnownStateFromValue(
A, IRP, getState(),
7959 return ChangeStatus::UNCHANGED;
7963 if (
A.hasAttr(getIRPosition(),
7964 {Attribute::InAlloca, Attribute::Preallocated})) {
7965 removeKnownBits(NO_WRITES);
7966 removeAssumedBits(NO_WRITES);
7968 A.removeAttrs(getIRPosition(), AttrKinds);
7969 return AAMemoryBehaviorFloating::manifest(
A);
7973 void trackStatistics()
const override {
7978 else if (isAssumedWriteOnly())
7983struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
7984 AAMemoryBehaviorCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
7985 : AAMemoryBehaviorArgument(IRP,
A) {}
7991 Argument *Arg = getAssociatedArgument();
7993 indicatePessimisticFixpoint();
7997 addKnownBits(NO_WRITES);
7998 removeKnownBits(NO_READS);
7999 removeAssumedBits(NO_READS);
8001 AAMemoryBehaviorArgument::initialize(
A);
8002 if (getAssociatedFunction()->isDeclaration())
8003 indicatePessimisticFixpoint();
8012 Argument *Arg = getAssociatedArgument();
8015 A.getAAFor<AAMemoryBehavior>(*
this, ArgPos, DepClassTy::REQUIRED);
8017 return indicatePessimisticFixpoint();
8022 void trackStatistics()
const override {
8027 else if (isAssumedWriteOnly())
8033struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
8034 AAMemoryBehaviorCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
8035 : AAMemoryBehaviorFloating(IRP,
A) {}
8039 AAMemoryBehaviorImpl::initialize(
A);
8044 return ChangeStatus::UNCHANGED;
8048 void trackStatistics()
const override {}
8052struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
8053 AAMemoryBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
8054 : AAMemoryBehaviorImpl(IRP,
A) {}
8070 else if (isAssumedWriteOnly())
8073 A.removeAttrs(getIRPosition(), AttrKinds);
8076 for (Argument &Arg :
F.args())
8078 return A.manifestAttrs(getIRPosition(),
8079 Attribute::getWithMemoryEffects(
F.getContext(), ME));
8083 void trackStatistics()
const override {
8088 else if (isAssumedWriteOnly())
8094struct AAMemoryBehaviorCallSite final
8095 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8096 AAMemoryBehaviorCallSite(
const IRPosition &IRP, Attributor &
A)
8097 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl>(IRP,
A) {}
8108 else if (isAssumedWriteOnly())
8111 A.removeAttrs(getIRPosition(), AttrKinds);
8114 for (Use &U : CB.
args())
8116 Attribute::Writable);
8117 return A.manifestAttrs(
8118 getIRPosition(), Attribute::getWithMemoryEffects(CB.
getContext(), ME));
8122 void trackStatistics()
const override {
8127 else if (isAssumedWriteOnly())
8132ChangeStatus AAMemoryBehaviorFunction::updateImpl(Attributor &
A) {
8135 auto AssumedState = getAssumed();
8142 const auto *MemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
8144 if (MemBehaviorAA) {
8145 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8146 return !isAtFixpoint();
8151 if (
I.mayReadFromMemory())
8152 removeAssumedBits(NO_READS);
8153 if (
I.mayWriteToMemory())
8154 removeAssumedBits(NO_WRITES);
8155 return !isAtFixpoint();
8158 bool UsedAssumedInformation =
false;
8159 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8160 UsedAssumedInformation))
8161 return indicatePessimisticFixpoint();
8167ChangeStatus AAMemoryBehaviorFloating::updateImpl(Attributor &
A) {
8169 const IRPosition &IRP = getIRPosition();
8180 const auto *FnMemAA =
8183 FnMemAssumedState = FnMemAA->getAssumed();
8184 S.addKnownBits(FnMemAA->getKnown());
8185 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8191 auto AssumedState = S.getAssumed();
8197 bool IsKnownNoCapture;
8198 const AANoCapture *ArgNoCaptureAA =
nullptr;
8203 if (!IsAssumedNoCapture &&
8205 S.intersectAssumedBits(FnMemAssumedState);
8211 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8213 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8221 Follow = followUsersOfUseIn(
A, U, UserI);
8225 analyzeUseIn(
A, U, UserI);
8227 return !isAtFixpoint();
8230 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8231 return indicatePessimisticFixpoint();
8237bool AAMemoryBehaviorFloating::followUsersOfUseIn(Attributor &
A,
const Use &U,
8238 const Instruction *UserI) {
8256 if (
U.get()->getType()->isPointerTy()) {
8258 bool IsKnownNoCapture;
8267void AAMemoryBehaviorFloating::analyzeUseIn(Attributor &
A,
const Use &U,
8268 const Instruction *UserI) {
8275 case Instruction::Load:
8277 removeAssumedBits(NO_READS);
8280 case Instruction::Store:
8285 removeAssumedBits(NO_WRITES);
8287 indicatePessimisticFixpoint();
8290 case Instruction::Call:
8291 case Instruction::CallBr:
8292 case Instruction::Invoke: {
8299 indicatePessimisticFixpoint();
8306 removeAssumedBits(NO_READS);
8313 if (
U.get()->getType()->isPointerTy())
8317 const auto *MemBehaviorAA =
8323 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8331 removeAssumedBits(NO_READS);
8333 removeAssumedBits(NO_WRITES);
8345 return "all memory";
8348 std::string S =
"memory:";
8354 S +=
"internal global,";
8356 S +=
"external global,";
8360 S +=
"inaccessible,";
8374 AccessKind2Accesses.fill(
nullptr);
8377 ~AAMemoryLocationImpl() {
8380 for (AccessSet *AS : AccessKind2Accesses)
8387 intersectAssumedBits(BEST_STATE);
8388 getKnownStateFromValue(
A, getIRPosition(), getState());
8389 AAMemoryLocation::initialize(
A);
8393 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
8394 BitIntegerState &State,
8395 bool IgnoreSubsumingPositions =
false) {
8404 bool UseArgMemOnly =
true;
8406 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8410 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8419 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8424 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8428 A.manifestAttrs(IRP,
8429 Attribute::getWithMemoryEffects(
8438 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8442 A.manifestAttrs(IRP,
8443 Attribute::getWithMemoryEffects(
8453 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
8454 SmallVectorImpl<Attribute> &Attrs)
const override {
8461 else if (isAssumedInaccessibleMemOnly())
8462 Attrs.push_back(Attribute::getWithMemoryEffects(
8464 else if (isAssumedArgMemOnly())
8467 else if (isAssumedInaccessibleOrArgMemOnly())
8468 Attrs.push_back(Attribute::getWithMemoryEffects(
8478 const IRPosition &IRP = getIRPosition();
8482 if (DeducedAttrs.
size() != 1)
8483 return ChangeStatus::UNCHANGED;
8486 return A.manifestAttrs(IRP, Attribute::getWithMemoryEffects(
8491 bool checkForAllAccessesToMemoryKind(
8493 MemoryLocationsKind)>
8495 MemoryLocationsKind RequestedMLK)
const override {
8496 if (!isValidState())
8499 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8500 if (AssumedMLK == NO_LOCATIONS)
8504 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8505 CurMLK *= 2, ++Idx) {
8506 if (CurMLK & RequestedMLK)
8509 if (
const AccessSet *
Accesses = AccessKind2Accesses[Idx])
8510 for (
const AccessInfo &AI : *
Accesses)
8511 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8524 MemoryLocationsKind KnownMLK = getKnown();
8526 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8527 if (!(CurMLK & KnownMLK))
8528 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr,
Changed,
8529 getAccessKindFromInst(
I));
8530 return AAMemoryLocation::indicatePessimisticFixpoint();
8550 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8554 return LHS.Ptr <
RHS.Ptr;
8555 if (
LHS.Kind !=
RHS.Kind)
8556 return LHS.Kind <
RHS.Kind;
8563 using AccessSet = SmallSet<AccessInfo, 2, AccessInfo>;
8564 std::array<AccessSet *, llvm::CTLog2<VALID_STATE>()> AccessKind2Accesses;
8569 categorizeArgumentPointerLocations(Attributor &
A, CallBase &CB,
8570 AAMemoryLocation::StateType &AccessedLocs,
8575 categorizeAccessedLocations(Attributor &
A, Instruction &
I,
bool &
Changed);
8578 AccessKind getAccessKindFromInst(
const Instruction *
I) {
8581 AK =
I->mayReadFromMemory() ? READ :
NONE;
8590 void updateStateAndAccessesMap(AAMemoryLocation::StateType &State,
8591 MemoryLocationsKind MLK,
const Instruction *
I,
8600 if (MLK == NO_UNKOWN_MEM)
8602 State.removeAssumedBits(MLK);
8607 void categorizePtrValue(Attributor &
A,
const Instruction &
I,
const Value &
Ptr,
8608 AAMemoryLocation::StateType &State,
bool &
Changed,
8609 unsigned AccessAS = 0);
8615void AAMemoryLocationImpl::categorizePtrValue(
8616 Attributor &
A,
const Instruction &
I,
const Value &
Ptr,
8618 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8622 auto Pred = [&](
Value &Obj) {
8625 MemoryLocationsKind MLK = NO_LOCATIONS;
8644 MLK = NO_ARGUMENT_MEM;
8650 if (GVar->isConstant())
8653 if (GV->hasLocalLinkage())
8654 MLK = NO_GLOBAL_INTERNAL_MEM;
8656 MLK = NO_GLOBAL_EXTERNAL_MEM;
8664 bool IsKnownNoAlias;
8668 MLK = NO_MALLOCED_MEM;
8670 MLK = NO_UNKOWN_MEM;
8672 MLK = NO_UNKOWN_MEM;
8675 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8676 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8677 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8679 getAccessKindFromInst(&
I));
8684 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
8688 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8689 updateStateAndAccessesMap(
State, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8690 getAccessKindFromInst(&
I));
8695 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8699void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8702 for (
unsigned ArgNo = 0,
E = CB.
arg_size(); ArgNo <
E; ++ArgNo) {
8711 const auto *ArgOpMemLocationAA =
8714 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8719 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs,
Changed);
8724AAMemoryLocationImpl::categorizeAccessedLocations(Attributor &
A, Instruction &
I,
8726 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8730 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8735 const auto *CBMemLocationAA =
A.getAAFor<AAMemoryLocation>(
8738 <<
" [" << CBMemLocationAA <<
"]\n");
8739 if (!CBMemLocationAA) {
8740 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8741 Changed, getAccessKindFromInst(&
I));
8742 return NO_UNKOWN_MEM;
8745 if (CBMemLocationAA->isAssumedReadNone())
8746 return NO_LOCATIONS;
8748 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8749 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8750 Changed, getAccessKindFromInst(&
I));
8751 return AccessedLocs.getAssumed();
8754 uint32_t CBAssumedNotAccessedLocs =
8755 CBMemLocationAA->getAssumedNotAccessedLocation();
8758 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8759 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8761 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8762 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8764 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr,
Changed,
8765 getAccessKindFromInst(&
I));
8770 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8771 if (HasGlobalAccesses) {
8774 updateStateAndAccessesMap(AccessedLocs, MLK, &
I,
Ptr,
Changed,
8775 getAccessKindFromInst(&
I));
8778 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8779 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8780 return AccessedLocs.getWorstState();
8784 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8785 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8788 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8790 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs,
Changed);
8793 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8794 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8796 return AccessedLocs.getAssumed();
8801 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8802 <<
I <<
" [" << *
Ptr <<
"]\n");
8804 Ptr->getType()->getPointerAddressSpace());
8805 return AccessedLocs.getAssumed();
8808 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8810 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8811 getAccessKindFromInst(&
I));
8812 return AccessedLocs.getAssumed();
8816struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8817 AAMemoryLocationFunction(
const IRPosition &IRP, Attributor &
A)
8818 : AAMemoryLocationImpl(IRP,
A) {}
8823 const auto *MemBehaviorAA =
8824 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
8827 return indicateOptimisticFixpoint();
8829 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8830 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8831 return ChangeStatus::UNCHANGED;
8835 auto AssumedState = getAssumed();
8839 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I,
Changed);
8840 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8841 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8842 removeAssumedBits(inverseLocation(MLK,
false,
false));
8845 return getAssumedNotAccessedLocation() != VALID_STATE;
8848 bool UsedAssumedInformation =
false;
8849 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8850 UsedAssumedInformation))
8851 return indicatePessimisticFixpoint();
8853 Changed |= AssumedState != getAssumed();
8854 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8858 void trackStatistics()
const override {
8861 else if (isAssumedArgMemOnly())
8863 else if (isAssumedInaccessibleMemOnly())
8865 else if (isAssumedInaccessibleOrArgMemOnly())
8871struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8872 AAMemoryLocationCallSite(
const IRPosition &IRP, Attributor &
A)
8873 : AAMemoryLocationImpl(IRP,
A) {}
8884 A.getAAFor<AAMemoryLocation>(*
this, FnPos, DepClassTy::REQUIRED);
8886 return indicatePessimisticFixpoint();
8890 updateStateAndAccessesMap(getState(), MLK,
I,
Ptr,
Changed,
8891 getAccessKindFromInst(
I));
8894 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8895 return indicatePessimisticFixpoint();
8896 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8900 void trackStatistics()
const override {
8910struct AADenormalFPMathImpl :
public AADenormalFPMath {
8911 AADenormalFPMathImpl(
const IRPosition &IRP, Attributor &
A)
8912 : AADenormalFPMath(IRP,
A) {}
8914 const std::string getAsStr(Attributor *
A)
const override {
8915 std::string Str(
"AADenormalFPMath[");
8916 raw_string_ostream OS(Str);
8918 DenormalState Known = getKnown();
8919 if (Known.Mode.isValid())
8920 OS <<
"denormal-fp-math=" << Known.Mode;
8924 if (Known.ModeF32.isValid())
8925 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
8931struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
8932 AADenormalFPMathFunction(
const IRPosition &IRP, Attributor &
A)
8933 : AADenormalFPMathImpl(IRP,
A) {}
8937 DenormalMode
Mode =
F->getDenormalModeRaw();
8938 DenormalMode ModeF32 =
F->getDenormalModeF32Raw();
8945 Known = DenormalState{
Mode, ModeF32};
8953 auto CheckCallSite = [=, &Change, &
A](AbstractCallSite CS) {
8956 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
8958 const auto *CallerInfo =
A.getAAFor<AADenormalFPMath>(
8964 CallerInfo->getState());
8968 bool AllCallSitesKnown =
true;
8969 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
8970 return indicatePessimisticFixpoint();
8972 if (Change == ChangeStatus::CHANGED && isModeFixed())
8978 LLVMContext &Ctx = getAssociatedFunction()->getContext();
8983 AttrToRemove.
push_back(
"denormal-fp-math");
8986 Attribute::get(Ctx,
"denormal-fp-math", Known.Mode.str()));
8989 if (Known.ModeF32 != Known.Mode) {
8991 Attribute::get(Ctx,
"denormal-fp-math-f32", Known.ModeF32.str()));
8993 AttrToRemove.
push_back(
"denormal-fp-math-f32");
8996 auto &IRP = getIRPosition();
8999 return A.removeAttrs(IRP, AttrToRemove) |
9000 A.manifestAttrs(IRP, AttrToAdd,
true);
9003 void trackStatistics()
const override {
9012struct AAValueConstantRangeImpl : AAValueConstantRange {
9013 using StateType = IntegerRangeState;
9014 AAValueConstantRangeImpl(
const IRPosition &IRP, Attributor &
A)
9015 : AAValueConstantRange(IRP,
A) {}
9019 if (
A.hasSimplificationCallback(getIRPosition())) {
9020 indicatePessimisticFixpoint();
9025 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
9028 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
9032 const std::string getAsStr(Attributor *
A)
const override {
9034 llvm::raw_string_ostream OS(Str);
9036 getKnown().print(OS);
9038 getAssumed().print(OS);
9045 const SCEV *getSCEV(Attributor &
A,
const Instruction *
I =
nullptr)
const {
9046 if (!getAnchorScope())
9049 ScalarEvolution *SE =
9050 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9053 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
9059 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9068 ConstantRange getConstantRangeFromSCEV(Attributor &
A,
9069 const Instruction *
I =
nullptr)
const {
9070 if (!getAnchorScope())
9073 ScalarEvolution *SE =
9074 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9077 const SCEV *S = getSCEV(
A,
I);
9087 getConstantRangeFromLVI(Attributor &
A,
9088 const Instruction *CtxI =
nullptr)
const {
9089 if (!getAnchorScope())
9092 LazyValueInfo *LVI =
9093 A.getInfoCache().getAnalysisResultForFunction<LazyValueAnalysis>(
9108 bool isValidCtxInstructionForOutsideAnalysis(Attributor &
A,
9109 const Instruction *CtxI,
9110 bool AllowAACtxI)
const {
9111 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9123 InformationCache &InfoCache =
A.getInfoCache();
9124 const DominatorTree *DT =
9135 getKnownConstantRange(Attributor &
A,
9136 const Instruction *CtxI =
nullptr)
const override {
9137 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9141 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9142 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9143 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9148 getAssumedConstantRange(Attributor &
A,
9149 const Instruction *CtxI =
nullptr)
const override {
9154 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9156 return getAssumed();
9158 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9159 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9160 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9165 getMDNodeForConstantRange(
Type *Ty, LLVMContext &Ctx,
9166 const ConstantRange &AssumedConstantRange) {
9168 Ty, AssumedConstantRange.
getLower())),
9170 Ty, AssumedConstantRange.
getUpper()))};
9175 static bool isBetterRange(
const ConstantRange &Assumed,
9176 const Instruction &
I) {
9180 std::optional<ConstantRange> Known;
9184 }
else if (MDNode *KnownRanges =
I.getMetadata(LLVMContext::MD_range)) {
9190 if (KnownRanges->getNumOperands() > 2)
9193 ConstantInt *
Lower =
9195 ConstantInt *
Upper =
9198 Known.emplace(
Lower->getValue(),
Upper->getValue());
9200 return !Known || (*Known != Assumed && Known->contains(Assumed));
9205 setRangeMetadataIfisBetterRange(Instruction *
I,
9206 const ConstantRange &AssumedConstantRange) {
9207 if (isBetterRange(AssumedConstantRange, *
I)) {
9208 I->setMetadata(LLVMContext::MD_range,
9209 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9210 AssumedConstantRange));
9217 setRangeRetAttrIfisBetterRange(Attributor &
A,
const IRPosition &IRP,
9219 const ConstantRange &AssumedConstantRange) {
9220 if (isBetterRange(AssumedConstantRange, *
I)) {
9221 A.manifestAttrs(IRP,
9222 Attribute::get(
I->getContext(), Attribute::Range,
9223 AssumedConstantRange),
9233 ConstantRange AssumedConstantRange = getAssumedConstantRange(
A);
9236 auto &
V = getAssociatedValue();
9240 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9241 "not the context instruction");
9243 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9244 Changed = ChangeStatus::CHANGED;
9246 if (setRangeRetAttrIfisBetterRange(
A, getIRPosition(),
I,
9247 AssumedConstantRange))
9248 Changed = ChangeStatus::CHANGED;
9256struct AAValueConstantRangeArgument final
9257 : AAArgumentFromCallSiteArguments<
9258 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9260 using Base = AAArgumentFromCallSiteArguments<
9261 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9263 AAValueConstantRangeArgument(
const IRPosition &IRP, Attributor &
A)
9267 void trackStatistics()
const override {
9272struct AAValueConstantRangeReturned
9273 : AAReturnedFromReturnedValues<AAValueConstantRange,
9274 AAValueConstantRangeImpl,
9275 AAValueConstantRangeImpl::StateType,
9278 AAReturnedFromReturnedValues<AAValueConstantRange,
9279 AAValueConstantRangeImpl,
9280 AAValueConstantRangeImpl::StateType,
9282 AAValueConstantRangeReturned(
const IRPosition &IRP, Attributor &
A)
9287 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9288 indicatePessimisticFixpoint();
9292 void trackStatistics()
const override {
9297struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9298 AAValueConstantRangeFloating(
const IRPosition &IRP, Attributor &
A)
9299 : AAValueConstantRangeImpl(IRP,
A) {}
9303 AAValueConstantRangeImpl::initialize(
A);
9307 Value &
V = getAssociatedValue();
9310 unionAssumed(ConstantRange(
C->getValue()));
9311 indicateOptimisticFixpoint();
9317 unionAssumed(ConstantRange(APInt(
getBitWidth(), 0)));
9318 indicateOptimisticFixpoint();
9330 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9341 indicatePessimisticFixpoint();
9344 << getAssociatedValue() <<
"\n");
9347 bool calculateBinaryOperator(
9348 Attributor &
A, BinaryOperator *BinOp, IntegerRangeState &
T,
9349 const Instruction *CtxI,
9350 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9355 bool UsedAssumedInformation =
false;
9356 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9359 if (!SimplifiedLHS.has_value())
9361 if (!*SimplifiedLHS)
9363 LHS = *SimplifiedLHS;
9365 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9368 if (!SimplifiedRHS.has_value())
9370 if (!*SimplifiedRHS)
9372 RHS = *SimplifiedRHS;
9378 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9380 DepClassTy::REQUIRED);
9384 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9386 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9388 DepClassTy::REQUIRED);
9392 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9394 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9396 T.unionAssumed(AssumedRange);
9400 return T.isValidState();
9403 bool calculateCastInst(
9404 Attributor &
A, CastInst *CastI, IntegerRangeState &
T,
9405 const Instruction *CtxI,
9406 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9412 bool UsedAssumedInformation =
false;
9413 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9416 if (!SimplifiedOpV.has_value())
9418 if (!*SimplifiedOpV)
9420 OpV = *SimplifiedOpV;
9425 auto *OpAA =
A.getAAFor<AAValueConstantRange>(
9427 DepClassTy::REQUIRED);
9431 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9433 return T.isValidState();
9437 calculateCmpInst(Attributor &
A, CmpInst *CmpI, IntegerRangeState &
T,
9438 const Instruction *CtxI,
9439 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9444 bool UsedAssumedInformation =
false;
9445 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9448 if (!SimplifiedLHS.has_value())
9450 if (!*SimplifiedLHS)
9452 LHS = *SimplifiedLHS;
9454 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9457 if (!SimplifiedRHS.has_value())
9459 if (!*SimplifiedRHS)
9461 RHS = *SimplifiedRHS;
9467 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9469 DepClassTy::REQUIRED);
9473 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9475 DepClassTy::REQUIRED);
9479 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9480 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9483 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9486 bool MustTrue =
false, MustFalse =
false;
9488 auto AllowedRegion =
9491 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9497 assert((!MustTrue || !MustFalse) &&
9498 "Either MustTrue or MustFalse should be false!");
9501 T.unionAssumed(ConstantRange(APInt( 1, 1)));
9503 T.unionAssumed(ConstantRange(APInt( 1, 0)));
9505 T.unionAssumed(ConstantRange( 1,
true));
9507 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9508 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9509 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9513 return T.isValidState();
9525 bool UsedAssumedInformation =
false;
9526 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9529 if (!SimplifiedOpV.has_value())
9531 if (!*SimplifiedOpV)
9533 Value *VPtr = *SimplifiedOpV;
9536 const auto *AA =
A.getAAFor<AAValueConstantRange>(
9538 DepClassTy::REQUIRED);
9542 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9546 return T.isValidState();
9551 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9554 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9557 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9563 T.indicatePessimisticFixpoint();
9570 for (
const AAValueConstantRange *QueriedAA : QuerriedAAs) {
9571 if (QueriedAA !=
this)
9574 if (
T.getAssumed() == getState().getAssumed())
9576 T.indicatePessimisticFixpoint();
9579 return T.isValidState();
9582 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9583 return indicatePessimisticFixpoint();
9588 return ChangeStatus::UNCHANGED;
9589 if (++NumChanges > MaxNumChanges) {
9590 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9591 <<
" but only " << MaxNumChanges
9592 <<
" are allowed to avoid cyclic reasoning.");
9593 return indicatePessimisticFixpoint();
9595 return ChangeStatus::CHANGED;
9599 void trackStatistics()
const override {
9608 static constexpr int MaxNumChanges = 5;
9611struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9612 AAValueConstantRangeFunction(
const IRPosition &IRP, Attributor &
A)
9613 : AAValueConstantRangeImpl(IRP,
A) {}
9617 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9625struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9626 AAValueConstantRangeCallSite(
const IRPosition &IRP, Attributor &
A)
9627 : AAValueConstantRangeFunction(IRP,
A) {}
9633struct AAValueConstantRangeCallSiteReturned
9634 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9635 AAValueConstantRangeImpl::StateType,
9637 AAValueConstantRangeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
9638 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9639 AAValueConstantRangeImpl::StateType,
9646 if (std::optional<ConstantRange>
Range = CI->getRange())
9647 intersectKnown(*
Range);
9650 AAValueConstantRangeImpl::initialize(
A);
9654 void trackStatistics()
const override {
9658struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9659 AAValueConstantRangeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
9660 : AAValueConstantRangeFloating(IRP,
A) {}
9664 return ChangeStatus::UNCHANGED;
9668 void trackStatistics()
const override {
9677struct AAPotentialConstantValuesImpl : AAPotentialConstantValues {
9680 AAPotentialConstantValuesImpl(
const IRPosition &IRP, Attributor &
A)
9681 : AAPotentialConstantValues(IRP,
A) {}
9685 if (
A.hasSimplificationCallback(getIRPosition()))
9686 indicatePessimisticFixpoint();
9688 AAPotentialConstantValues::initialize(
A);
9691 bool fillSetWithConstantValues(Attributor &
A,
const IRPosition &IRP, SetTy &S,
9692 bool &ContainsUndef,
bool ForSelf) {
9694 bool UsedAssumedInformation =
false;
9696 UsedAssumedInformation)) {
9703 auto *PotentialValuesAA =
A.getAAFor<AAPotentialConstantValues>(
9704 *
this, IRP, DepClassTy::REQUIRED);
9705 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9707 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9708 S = PotentialValuesAA->getState().getAssumedSet();
9715 ContainsUndef =
false;
9716 for (
auto &It : Values) {
9718 ContainsUndef =
true;
9724 S.insert(CI->getValue());
9726 ContainsUndef &= S.empty();
9732 const std::string getAsStr(Attributor *
A)
const override {
9734 llvm::raw_string_ostream OS(Str);
9741 return indicatePessimisticFixpoint();
9745struct AAPotentialConstantValuesArgument final
9746 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9747 AAPotentialConstantValuesImpl,
9748 PotentialConstantIntValuesState> {
9749 using Base = AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9750 AAPotentialConstantValuesImpl,
9752 AAPotentialConstantValuesArgument(
const IRPosition &IRP, Attributor &
A)
9756 void trackStatistics()
const override {
9761struct AAPotentialConstantValuesReturned
9762 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9763 AAPotentialConstantValuesImpl> {
9764 using Base = AAReturnedFromReturnedValues<AAPotentialConstantValues,
9765 AAPotentialConstantValuesImpl>;
9766 AAPotentialConstantValuesReturned(
const IRPosition &IRP, Attributor &
A)
9770 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9771 indicatePessimisticFixpoint();
9772 Base::initialize(
A);
9776 void trackStatistics()
const override {
9781struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9782 AAPotentialConstantValuesFloating(
const IRPosition &IRP, Attributor &
A)
9783 : AAPotentialConstantValuesImpl(IRP,
A) {}
9787 AAPotentialConstantValuesImpl::initialize(
A);
9791 Value &
V = getAssociatedValue();
9794 unionAssumed(
C->getValue());
9795 indicateOptimisticFixpoint();
9800 unionAssumedWithUndef();
9801 indicateOptimisticFixpoint();
9811 indicatePessimisticFixpoint();
9814 << getAssociatedValue() <<
"\n");
9817 static bool calculateICmpInst(
const ICmpInst *ICI,
const APInt &
LHS,
9822 static APInt calculateCastInst(
const CastInst *CI,
const APInt &Src,
9823 uint32_t ResultBitWidth) {
9828 case Instruction::Trunc:
9829 return Src.trunc(ResultBitWidth);
9830 case Instruction::SExt:
9831 return Src.sext(ResultBitWidth);
9832 case Instruction::ZExt:
9833 return Src.zext(ResultBitWidth);
9834 case Instruction::BitCast:
9839 static APInt calculateBinaryOperator(
const BinaryOperator *BinOp,
9840 const APInt &
LHS,
const APInt &
RHS,
9841 bool &SkipOperation,
bool &Unsupported) {
9848 switch (BinOpcode) {
9852 case Instruction::Add:
9854 case Instruction::Sub:
9856 case Instruction::Mul:
9858 case Instruction::UDiv:
9860 SkipOperation =
true;
9864 case Instruction::SDiv:
9866 SkipOperation =
true;
9870 case Instruction::URem:
9872 SkipOperation =
true;
9876 case Instruction::SRem:
9878 SkipOperation =
true;
9882 case Instruction::Shl:
9884 case Instruction::LShr:
9886 case Instruction::AShr:
9888 case Instruction::And:
9890 case Instruction::Or:
9892 case Instruction::Xor:
9897 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9898 const APInt &
LHS,
const APInt &
RHS) {
9899 bool SkipOperation =
false;
9902 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9907 unionAssumed(Result);
9908 return isValidState();
9911 ChangeStatus updateWithICmpInst(Attributor &
A, ICmpInst *ICI) {
9912 auto AssumedBefore = getAssumed();
9916 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9917 SetTy LHSAAPVS, RHSAAPVS;
9919 LHSContainsUndef,
false) ||
9921 RHSContainsUndef,
false))
9922 return indicatePessimisticFixpoint();
9925 bool MaybeTrue =
false, MaybeFalse =
false;
9927 if (LHSContainsUndef && RHSContainsUndef) {
9930 unionAssumedWithUndef();
9931 }
else if (LHSContainsUndef) {
9932 for (
const APInt &R : RHSAAPVS) {
9933 bool CmpResult = calculateICmpInst(ICI, Zero, R);
9934 MaybeTrue |= CmpResult;
9935 MaybeFalse |= !CmpResult;
9936 if (MaybeTrue & MaybeFalse)
9937 return indicatePessimisticFixpoint();
9939 }
else if (RHSContainsUndef) {
9940 for (
const APInt &L : LHSAAPVS) {
9941 bool CmpResult = calculateICmpInst(ICI, L, Zero);
9942 MaybeTrue |= CmpResult;
9943 MaybeFalse |= !CmpResult;
9944 if (MaybeTrue & MaybeFalse)
9945 return indicatePessimisticFixpoint();
9948 for (
const APInt &L : LHSAAPVS) {
9949 for (
const APInt &R : RHSAAPVS) {
9950 bool CmpResult = calculateICmpInst(ICI, L, R);
9951 MaybeTrue |= CmpResult;
9952 MaybeFalse |= !CmpResult;
9953 if (MaybeTrue & MaybeFalse)
9954 return indicatePessimisticFixpoint();
9959 unionAssumed(APInt( 1, 1));
9961 unionAssumed(APInt( 1, 0));
9962 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9963 : ChangeStatus::CHANGED;
9966 ChangeStatus updateWithSelectInst(Attributor &
A, SelectInst *SI) {
9967 auto AssumedBefore = getAssumed();
9971 bool UsedAssumedInformation =
false;
9972 std::optional<Constant *>
C =
A.getAssumedConstant(
9973 *
SI->getCondition(), *
this, UsedAssumedInformation);
9976 bool OnlyLeft =
false, OnlyRight =
false;
9977 if (
C && *
C && (*C)->isOneValue())
9979 else if (
C && *
C && (*C)->isZeroValue())
9982 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9983 SetTy LHSAAPVS, RHSAAPVS;
9986 LHSContainsUndef,
false))
9987 return indicatePessimisticFixpoint();
9991 RHSContainsUndef,
false))
9992 return indicatePessimisticFixpoint();
9994 if (OnlyLeft || OnlyRight) {
9996 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
9997 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
10000 unionAssumedWithUndef();
10002 for (
const auto &It : *OpAA)
10006 }
else if (LHSContainsUndef && RHSContainsUndef) {
10008 unionAssumedWithUndef();
10010 for (
const auto &It : LHSAAPVS)
10012 for (
const auto &It : RHSAAPVS)
10015 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10016 : ChangeStatus::CHANGED;
10019 ChangeStatus updateWithCastInst(Attributor &
A, CastInst *CI) {
10020 auto AssumedBefore = getAssumed();
10022 return indicatePessimisticFixpoint();
10027 bool SrcContainsUndef =
false;
10030 SrcContainsUndef,
false))
10031 return indicatePessimisticFixpoint();
10033 if (SrcContainsUndef)
10034 unionAssumedWithUndef();
10036 for (
const APInt &S : SrcPVS) {
10037 APInt
T = calculateCastInst(CI, S, ResultBitWidth);
10041 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10042 : ChangeStatus::CHANGED;
10045 ChangeStatus updateWithBinaryOperator(Attributor &
A, BinaryOperator *BinOp) {
10046 auto AssumedBefore = getAssumed();
10050 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10051 SetTy LHSAAPVS, RHSAAPVS;
10053 LHSContainsUndef,
false) ||
10055 RHSContainsUndef,
false))
10056 return indicatePessimisticFixpoint();
10061 if (LHSContainsUndef && RHSContainsUndef) {
10062 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
10063 return indicatePessimisticFixpoint();
10064 }
else if (LHSContainsUndef) {
10065 for (
const APInt &R : RHSAAPVS) {
10066 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
10067 return indicatePessimisticFixpoint();
10069 }
else if (RHSContainsUndef) {
10070 for (
const APInt &L : LHSAAPVS) {
10071 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
10072 return indicatePessimisticFixpoint();
10075 for (
const APInt &L : LHSAAPVS) {
10076 for (
const APInt &R : RHSAAPVS) {
10077 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
10078 return indicatePessimisticFixpoint();
10082 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10083 : ChangeStatus::CHANGED;
10086 ChangeStatus updateWithInstruction(Attributor &
A, Instruction *Inst) {
10087 auto AssumedBefore = getAssumed();
10089 bool ContainsUndef;
10091 ContainsUndef,
true))
10092 return indicatePessimisticFixpoint();
10093 if (ContainsUndef) {
10094 unionAssumedWithUndef();
10096 for (
const auto &It : Incoming)
10099 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10100 : ChangeStatus::CHANGED;
10105 Value &
V = getAssociatedValue();
10109 return updateWithICmpInst(
A, ICI);
10112 return updateWithSelectInst(
A, SI);
10115 return updateWithCastInst(
A, CI);
10118 return updateWithBinaryOperator(
A, BinOp);
10121 return updateWithInstruction(
A,
I);
10123 return indicatePessimisticFixpoint();
10127 void trackStatistics()
const override {
10132struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10133 AAPotentialConstantValuesFunction(
const IRPosition &IRP, Attributor &
A)
10134 : AAPotentialConstantValuesImpl(IRP,
A) {}
10139 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10144 void trackStatistics()
const override {
10149struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10150 AAPotentialConstantValuesCallSite(
const IRPosition &IRP, Attributor &
A)
10151 : AAPotentialConstantValuesFunction(IRP,
A) {}
10154 void trackStatistics()
const override {
10159struct AAPotentialConstantValuesCallSiteReturned
10160 : AACalleeToCallSite<AAPotentialConstantValues,
10161 AAPotentialConstantValuesImpl> {
10162 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10164 : AACalleeToCallSite<AAPotentialConstantValues,
10165 AAPotentialConstantValuesImpl>(IRP,
A) {}
10168 void trackStatistics()
const override {
10173struct AAPotentialConstantValuesCallSiteArgument
10174 : AAPotentialConstantValuesFloating {
10175 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10177 : AAPotentialConstantValuesFloating(IRP,
A) {}
10181 AAPotentialConstantValuesImpl::initialize(
A);
10182 if (isAtFixpoint())
10185 Value &
V = getAssociatedValue();
10188 unionAssumed(
C->getValue());
10189 indicateOptimisticFixpoint();
10194 unionAssumedWithUndef();
10195 indicateOptimisticFixpoint();
10202 Value &
V = getAssociatedValue();
10203 auto AssumedBefore = getAssumed();
10204 auto *AA =
A.getAAFor<AAPotentialConstantValues>(
10207 return indicatePessimisticFixpoint();
10208 const auto &S = AA->getAssumed();
10210 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10211 : ChangeStatus::CHANGED;
10215 void trackStatistics()
const override {
10224 bool IgnoreSubsumingPositions) {
10225 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10226 "Unexpected attribute kind");
10227 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10228 Attribute::NoUndef))
10248 Value &V = getAssociatedValue();
10250 indicatePessimisticFixpoint();
10251 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10255 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10256 AANoUndef::StateType &State) {
10257 const Value *UseV =
U->get();
10258 const DominatorTree *DT =
nullptr;
10259 AssumptionCache *AC =
nullptr;
10260 InformationCache &InfoCache =
A.getInfoCache();
10261 if (Function *
F = getAnchorScope()) {
10266 bool TrackUse =
false;
10275 const std::string getAsStr(Attributor *
A)
const override {
10276 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10283 bool UsedAssumedInformation =
false;
10284 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10285 UsedAssumedInformation))
10286 return ChangeStatus::UNCHANGED;
10290 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10293 return ChangeStatus::UNCHANGED;
10294 return AANoUndef::manifest(
A);
10298struct AANoUndefFloating :
public AANoUndefImpl {
10299 AANoUndefFloating(
const IRPosition &IRP, Attributor &
A)
10300 : AANoUndefImpl(IRP,
A) {}
10304 AANoUndefImpl::initialize(
A);
10305 if (!getState().isAtFixpoint() && getAnchorScope() &&
10306 !getAnchorScope()->isDeclaration())
10307 if (Instruction *CtxI = getCtxI())
10308 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10313 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10314 bool IsKnownNoUndef;
10316 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10320 bool UsedAssumedInformation =
false;
10321 Value *AssociatedValue = &getAssociatedValue();
10323 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10328 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10336 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10337 return indicatePessimisticFixpoint();
10338 return ChangeStatus::UNCHANGED;
10341 for (
const auto &VAC : Values)
10343 return indicatePessimisticFixpoint();
10345 return ChangeStatus::UNCHANGED;
10352struct AANoUndefReturned final
10353 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10354 AANoUndefReturned(
const IRPosition &IRP, Attributor &
A)
10355 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10361struct AANoUndefArgument final
10362 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10363 AANoUndefArgument(
const IRPosition &IRP, Attributor &
A)
10364 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10370struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10371 AANoUndefCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10372 : AANoUndefFloating(IRP,
A) {}
10378struct AANoUndefCallSiteReturned final
10379 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10380 AANoUndefCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10381 : AACalleeToCallSite<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10389struct AANoFPClassImpl : AANoFPClass {
10390 AANoFPClassImpl(
const IRPosition &IRP, Attributor &
A) : AANoFPClass(IRP,
A) {}
10393 const IRPosition &IRP = getIRPosition();
10397 indicateOptimisticFixpoint();
10402 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10403 for (
const auto &Attr : Attrs) {
10407 const DataLayout &
DL =
A.getDataLayout();
10413 if (Instruction *CtxI = getCtxI())
10414 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10418 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10419 AANoFPClass::StateType &State) {
10430 if (
auto *NoFPAA =
A.getAAFor<AANoFPClass>(*
this, IRP, DepClassTy::NONE))
10431 State.addKnownBits(NoFPAA->getState().getKnown());
10435 const std::string getAsStr(Attributor *
A)
const override {
10436 std::string
Result =
"nofpclass";
10437 raw_string_ostream OS(Result);
10438 OS << getKnownNoFPClass() <<
'/' << getAssumedNoFPClass();
10442 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
10443 SmallVectorImpl<Attribute> &Attrs)
const override {
10444 Attrs.emplace_back(Attribute::getWithNoFPClass(Ctx, getAssumedNoFPClass()));
10448struct AANoFPClassFloating :
public AANoFPClassImpl {
10449 AANoFPClassFloating(
const IRPosition &IRP, Attributor &
A)
10450 : AANoFPClassImpl(IRP,
A) {}
10455 bool UsedAssumedInformation =
false;
10456 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10458 Values.
push_back({getAssociatedValue(), getCtxI()});
10464 DepClassTy::REQUIRED);
10465 if (!AA ||
this == AA) {
10466 T.indicatePessimisticFixpoint();
10468 const AANoFPClass::StateType &S =
10469 static_cast<const AANoFPClass::StateType &
>(AA->
getState());
10472 return T.isValidState();
10475 for (
const auto &VAC : Values)
10476 if (!VisitValueCB(*
VAC.getValue(),
VAC.getCtxI()))
10477 return indicatePessimisticFixpoint();
10483 void trackStatistics()
const override {
10488struct AANoFPClassReturned final
10489 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10490 AANoFPClassImpl::StateType, false,
10491 Attribute::None, false> {
10492 AANoFPClassReturned(
const IRPosition &IRP, Attributor &
A)
10493 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10494 AANoFPClassImpl::StateType,
false,
10498 void trackStatistics()
const override {
10503struct AANoFPClassArgument final
10504 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10505 AANoFPClassArgument(
const IRPosition &IRP, Attributor &
A)
10506 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10512struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10513 AANoFPClassCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10514 : AANoFPClassFloating(IRP,
A) {}
10517 void trackStatistics()
const override {
10522struct AANoFPClassCallSiteReturned final
10523 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10524 AANoFPClassCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10525 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10528 void trackStatistics()
const override {
10533struct AACallEdgesImpl :
public AACallEdges {
10534 AACallEdgesImpl(
const IRPosition &IRP, Attributor &
A) : AACallEdges(IRP,
A) {}
10536 const SetVector<Function *> &getOptimisticEdges()
const override {
10537 return CalledFunctions;
10540 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10542 bool hasNonAsmUnknownCallee()
const override {
10543 return HasUnknownCalleeNonAsm;
10546 const std::string getAsStr(Attributor *
A)
const override {
10547 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10548 std::to_string(CalledFunctions.size()) +
"]";
10551 void trackStatistics()
const override {}
10554 void addCalledFunction(Function *Fn,
ChangeStatus &Change) {
10555 if (CalledFunctions.insert(Fn)) {
10556 Change = ChangeStatus::CHANGED;
10562 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10563 if (!HasUnknownCallee)
10564 Change = ChangeStatus::CHANGED;
10565 if (NonAsm && !HasUnknownCalleeNonAsm)
10566 Change = ChangeStatus::CHANGED;
10567 HasUnknownCalleeNonAsm |= NonAsm;
10568 HasUnknownCallee =
true;
10573 SetVector<Function *> CalledFunctions;
10576 bool HasUnknownCallee =
false;
10579 bool HasUnknownCalleeNonAsm =
false;
10582struct AACallEdgesCallSite :
public AACallEdgesImpl {
10583 AACallEdgesCallSite(
const IRPosition &IRP, Attributor &
A)
10584 : AACallEdgesImpl(IRP,
A) {}
10591 addCalledFunction(Fn, Change);
10593 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10594 setHasUnknownCallee(
true, Change);
10605 VisitValue(*V, CtxI);
10609 bool UsedAssumedInformation =
false;
10615 for (
auto &VAC : Values)
10616 VisitValue(*
VAC.getValue(),
VAC.getCtxI());
10622 if (
IA->hasSideEffects() &&
10625 setHasUnknownCallee(
false, Change);
10631 if (
auto *IndirectCallAA =
A.getAAFor<AAIndirectCallInfo>(
10632 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10633 if (IndirectCallAA->foreachCallee(
10634 [&](Function *Fn) { return VisitValue(*Fn, CB); }))
10643 for (
const Use *U : CallbackUses)
10644 ProcessCalledOperand(
U->get(), CB);
10650struct AACallEdgesFunction :
public AACallEdgesImpl {
10651 AACallEdgesFunction(
const IRPosition &IRP, Attributor &
A)
10652 : AACallEdgesImpl(IRP,
A) {}
10661 auto *CBEdges =
A.getAAFor<AACallEdges>(
10665 if (CBEdges->hasNonAsmUnknownCallee())
10666 setHasUnknownCallee(
true, Change);
10667 if (CBEdges->hasUnknownCallee())
10668 setHasUnknownCallee(
false, Change);
10670 for (Function *
F : CBEdges->getOptimisticEdges())
10671 addCalledFunction(
F, Change);
10677 bool UsedAssumedInformation =
false;
10678 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10679 UsedAssumedInformation,
10683 setHasUnknownCallee(
true, Change);
10692struct AAInterFnReachabilityFunction
10693 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10694 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10695 AAInterFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
10698 bool instructionCanReach(
10699 Attributor &
A,
const Instruction &From,
const Function &To,
10702 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10704 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
10705 typename RQITy::Reachable
Result;
10706 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10707 return NonConstThis->isReachableImpl(
A, StackRQI,
10709 return Result == RQITy::Reachable::Yes;
10713 bool IsTemporaryRQI)
override {
10715 &RQI.From->getFunction()->getEntryBlock().front();
10716 if (EntryI != RQI.From &&
10717 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10718 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10721 auto CheckReachableCallBase = [&](CallBase *CB) {
10722 auto *CBEdges =
A.getAAFor<AACallEdges>(
10724 if (!CBEdges || !CBEdges->getState().isValidState())
10727 if (CBEdges->hasUnknownCallee())
10730 for (Function *Fn : CBEdges->getOptimisticEdges()) {
10741 if (Fn == getAnchorScope()) {
10742 if (EntryI == RQI.From)
10747 const AAInterFnReachability *InterFnReachability =
10749 DepClassTy::OPTIONAL);
10752 if (!InterFnReachability ||
10760 const auto *IntraFnReachability =
A.getAAFor<AAIntraFnReachability>(
10762 DepClassTy::OPTIONAL);
10770 return IntraFnReachability && !IntraFnReachability->isAssumedReachable(
10771 A, *RQI.From, CBInst, RQI.ExclusionSet);
10774 bool UsedExclusionSet =
true;
10775 bool UsedAssumedInformation =
false;
10776 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10777 UsedAssumedInformation,
10779 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10782 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10786 void trackStatistics()
const override {}
10790template <
typename AAType>
10791static std::optional<Constant *>
10794 if (!Ty.isIntegerTy())
10802 std::optional<Constant *> COpt =
AA->getAssumedConstant(
A);
10804 if (!COpt.has_value()) {
10806 return std::nullopt;
10808 if (
auto *
C = *COpt) {
10819 std::optional<Value *> V;
10820 for (
auto &It : Values) {
10822 if (V.has_value() && !*V)
10825 if (!V.has_value())
10839 if (
A.hasSimplificationCallback(getIRPosition())) {
10840 indicatePessimisticFixpoint();
10843 Value *Stripped = getAssociatedValue().stripPointerCasts();
10845 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10847 indicateOptimisticFixpoint();
10850 AAPotentialValues::initialize(
A);
10854 const std::string getAsStr(Attributor *
A)
const override {
10856 llvm::raw_string_ostream OS(Str);
10861 template <
typename AAType>
10862 static std::optional<Value *> askOtherAA(Attributor &
A,
10863 const AbstractAttribute &AA,
10864 const IRPosition &IRP,
Type &Ty) {
10869 return std::nullopt;
10876 virtual void addValue(Attributor &
A, StateType &State,
Value &V,
10878 Function *AnchorScope)
const {
10882 for (
const auto &U : CB->
args()) {
10892 Type &Ty = *getAssociatedType();
10893 std::optional<Value *> SimpleV =
10894 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10895 if (SimpleV.has_value() && !*SimpleV) {
10896 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
10897 *
this, ValIRP, DepClassTy::OPTIONAL);
10898 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10899 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
10900 State.unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
10901 if (PotentialConstantsAA->undefIsContained())
10906 if (!SimpleV.has_value())
10918 State.unionAssumed({{*VPtr, CtxI}, S});
10924 AA::ValueAndContext
I;
10928 return II.I ==
I &&
II.S == S;
10931 return std::tie(
I, S) < std::tie(
II.I,
II.S);
10935 bool recurseForValue(Attributor &
A,
const IRPosition &IRP,
AA::ValueScope S) {
10936 SmallMapVector<AA::ValueAndContext, int, 8> ValueScopeMap;
10941 bool UsedAssumedInformation =
false;
10943 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
10944 UsedAssumedInformation))
10947 for (
auto &It : Values)
10948 ValueScopeMap[It] += CS;
10950 for (
auto &It : ValueScopeMap)
10951 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
10957 void giveUpOnIntraprocedural(Attributor &
A) {
10958 auto NewS = StateType::getBestState(getState());
10959 for (
const auto &It : getAssumedSet()) {
10962 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
10965 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10973 getState() = StateType::getBestState(getState());
10974 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
10975 AAPotentialValues::indicateOptimisticFixpoint();
10976 return ChangeStatus::CHANGED;
10981 return indicatePessimisticFixpoint();
10989 if (!getAssumedSimplifiedValues(
A, Values, S))
10991 Value &OldV = getAssociatedValue();
10994 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
10995 if (!NewV || NewV == &OldV)
11000 if (
A.changeAfterManifest(getIRPosition(), *NewV))
11001 return ChangeStatus::CHANGED;
11003 return ChangeStatus::UNCHANGED;
11006 bool getAssumedSimplifiedValues(
11007 Attributor &
A, SmallVectorImpl<AA::ValueAndContext> &Values,
11008 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
11009 if (!isValidState())
11011 bool UsedAssumedInformation =
false;
11012 for (
const auto &It : getAssumedSet())
11013 if (It.second & S) {
11014 if (RecurseForSelectAndPHI && (
isa<PHINode>(It.first.getValue()) ||
11016 if (
A.getAssumedSimplifiedValues(
11018 this, Values, S, UsedAssumedInformation))
11023 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11028struct AAPotentialValuesFloating : AAPotentialValuesImpl {
11029 AAPotentialValuesFloating(
const IRPosition &IRP, Attributor &
A)
11030 : AAPotentialValuesImpl(IRP,
A) {}
11034 auto AssumedBefore = getAssumed();
11036 genericValueTraversal(
A, &getAssociatedValue());
11038 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11039 : ChangeStatus::CHANGED;
11043 struct LivenessInfo {
11044 const AAIsDead *LivenessAA =
nullptr;
11045 bool AnyDead =
false;
11055 SmallVectorImpl<ItemInfo> &Worklist) {
11058 bool UsedAssumedInformation =
false;
11060 auto GetSimplifiedValues = [&](
Value &
V,
11062 if (!
A.getAssumedSimplifiedValues(
11066 Values.
push_back(AA::ValueAndContext{
V,
II.I.getCtxI()});
11068 return Values.
empty();
11070 if (GetSimplifiedValues(*
LHS, LHSValues))
11072 if (GetSimplifiedValues(*
RHS, RHSValues))
11077 InformationCache &InfoCache =
A.getInfoCache();
11084 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11089 const DataLayout &
DL =
A.getDataLayout();
11090 SimplifyQuery Q(
DL, TLI, DT, AC, CmpI);
11092 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11095 nullptr,
II.S, getAnchorScope());
11101 if (&LHSV == &RHSV &&
11103 Constant *NewV = ConstantInt::get(Type::getInt1Ty(Ctx),
11105 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11112 if (TypedLHS && TypedRHS) {
11114 if (NewV && NewV != &Cmp) {
11115 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11127 if (!LHSIsNull && !RHSIsNull)
11133 assert((LHSIsNull || RHSIsNull) &&
11134 "Expected nullptr versus non-nullptr comparison at this point");
11137 unsigned PtrIdx = LHSIsNull;
11138 bool IsKnownNonNull;
11141 DepClassTy::REQUIRED, IsKnownNonNull);
11142 if (!IsAssumedNonNull)
11148 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11153 for (
auto &LHSValue : LHSValues)
11154 for (
auto &RHSValue : RHSValues)
11155 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11160 bool handleSelectInst(Attributor &
A, SelectInst &SI, ItemInfo
II,
11161 SmallVectorImpl<ItemInfo> &Worklist) {
11163 bool UsedAssumedInformation =
false;
11165 std::optional<Constant *>
C =
11166 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11167 bool NoValueYet = !
C.has_value();
11175 }
else if (&SI == &getAssociatedValue()) {
11180 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11182 if (!SimpleV.has_value())
11185 addValue(
A, getState(), **SimpleV, CtxI,
II.S, getAnchorScope());
11193 bool handleLoadInst(Attributor &
A, LoadInst &LI, ItemInfo
II,
11194 SmallVectorImpl<ItemInfo> &Worklist) {
11195 SmallSetVector<Value *, 4> PotentialCopies;
11196 SmallSetVector<Instruction *, 4> PotentialValueOrigins;
11197 bool UsedAssumedInformation =
false;
11199 PotentialValueOrigins, *
this,
11200 UsedAssumedInformation,
11202 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11203 "loaded values for load instruction "
11211 InformationCache &InfoCache =
A.getInfoCache();
11213 if (!
llvm::all_of(PotentialValueOrigins, [&](Instruction *
I) {
11217 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11219 UsedAssumedInformation,
11221 return A.isAssumedDead(*
I,
this,
nullptr,
11222 UsedAssumedInformation,
11225 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11226 "and we cannot delete all the stores: "
11237 bool AllLocal = ScopeIsLocal;
11242 if (!DynamicallyUnique) {
11243 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11244 "values are dynamically unique: "
11249 for (
auto *PotentialCopy : PotentialCopies) {
11251 Worklist.
push_back({{*PotentialCopy, CtxI},
II.S});
11256 if (!AllLocal && ScopeIsLocal)
11261 bool handlePHINode(
11262 Attributor &
A, PHINode &
PHI, ItemInfo
II,
11263 SmallVectorImpl<ItemInfo> &Worklist,
11264 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11265 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11266 LivenessInfo &LI = LivenessAAs[&
F];
11267 if (!LI.LivenessAA)
11273 if (&
PHI == &getAssociatedValue()) {
11274 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11276 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
11277 *
PHI.getFunction());
11281 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11283 if (LI.LivenessAA &&
11284 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11303 bool UsedAssumedInformation =
false;
11304 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11306 if (!SimpleV.has_value())
11310 addValue(
A, getState(), **SimpleV, &
PHI,
II.S, getAnchorScope());
11317 bool handleGenericInst(Attributor &
A, Instruction &
I, ItemInfo
II,
11318 SmallVectorImpl<ItemInfo> &Worklist) {
11319 bool SomeSimplified =
false;
11320 bool UsedAssumedInformation =
false;
11322 SmallVector<Value *, 8> NewOps(
I.getNumOperands());
11325 const auto &SimplifiedOp =
A.getAssumedSimplified(
11330 if (!SimplifiedOp.has_value())
11334 NewOps[Idx] = *SimplifiedOp;
11338 SomeSimplified |= (NewOps[Idx] !=
Op);
11344 if (!SomeSimplified)
11347 InformationCache &InfoCache =
A.getInfoCache();
11351 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11354 const DataLayout &
DL =
I.getDataLayout();
11355 SimplifyQuery Q(
DL, TLI, DT, AC, &
I);
11357 if (!NewV || NewV == &
I)
11360 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11367 Attributor &
A, Instruction &
I, ItemInfo
II,
11368 SmallVectorImpl<ItemInfo> &Worklist,
11369 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11372 CI->getPredicate(),
II, Worklist);
11374 switch (
I.getOpcode()) {
11375 case Instruction::Select:
11377 case Instruction::PHI:
11379 case Instruction::Load:
11382 return handleGenericInst(
A,
I,
II, Worklist);
11387 void genericValueTraversal(Attributor &
A,
Value *InitialV) {
11388 SmallMapVector<const Function *, LivenessInfo, 4> LivenessAAs;
11390 SmallSet<ItemInfo, 16> Visited;
11409 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11410 << Iteration <<
"!\n");
11411 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11417 Value *NewV =
nullptr;
11418 if (
V->getType()->isPointerTy()) {
11424 for (Argument &Arg :
Callee->args())
11431 if (NewV && NewV != V) {
11432 Worklist.
push_back({{*NewV, CtxI}, S});
11446 if (V == InitialV && CtxI == getCtxI()) {
11447 indicatePessimisticFixpoint();
11451 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11452 }
while (!Worklist.
empty());
11456 for (
auto &It : LivenessAAs)
11457 if (It.second.AnyDead)
11458 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11462 void trackStatistics()
const override {
11467struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11468 using Base = AAPotentialValuesImpl;
11469 AAPotentialValuesArgument(
const IRPosition &IRP, Attributor &
A)
11476 indicatePessimisticFixpoint();
11481 auto AssumedBefore = getAssumed();
11483 unsigned ArgNo = getCalleeArgNo();
11485 bool UsedAssumedInformation =
false;
11487 auto CallSitePred = [&](AbstractCallSite ACS) {
11489 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11492 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11494 UsedAssumedInformation))
11497 return isValidState();
11500 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11502 UsedAssumedInformation))
11503 return indicatePessimisticFixpoint();
11505 Function *Fn = getAssociatedFunction();
11506 bool AnyNonLocal =
false;
11507 for (
auto &It : Values) {
11509 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11514 return indicatePessimisticFixpoint();
11518 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11524 AnyNonLocal =
true;
11526 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11528 giveUpOnIntraprocedural(
A);
11530 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11531 : ChangeStatus::CHANGED;
11535 void trackStatistics()
const override {
11540struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11541 using Base = AAPotentialValuesFloating;
11542 AAPotentialValuesReturned(
const IRPosition &IRP, Attributor &
A)
11548 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11549 indicatePessimisticFixpoint();
11553 for (Argument &Arg :
F->args())
11556 ReturnedArg = &Arg;
11559 if (!
A.isFunctionIPOAmendable(*
F) ||
11560 A.hasSimplificationCallback(getIRPosition())) {
11562 indicatePessimisticFixpoint();
11564 indicateOptimisticFixpoint();
11570 auto AssumedBefore = getAssumed();
11571 bool UsedAssumedInformation =
false;
11574 Function *AnchorScope = getAnchorScope();
11580 UsedAssumedInformation,
11586 bool AllInterAreIntra =
false;
11589 llvm::all_of(Values, [&](
const AA::ValueAndContext &VAC) {
11593 for (
const AA::ValueAndContext &VAC : Values) {
11594 addValue(
A, getState(), *
VAC.getValue(),
11595 VAC.getCtxI() ?
VAC.getCtxI() : CtxI,
11598 if (AllInterAreIntra)
11605 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11608 bool AddValues =
true;
11611 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11615 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11618 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11619 UsedAssumedInformation,
11621 return indicatePessimisticFixpoint();
11624 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11625 : ChangeStatus::CHANGED;
11630 return ChangeStatus::UNCHANGED;
11632 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11634 return ChangeStatus::UNCHANGED;
11635 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11637 return ChangeStatus::UNCHANGED;
11642 "Number of function with unique return");
11645 {Attribute::get(Arg->
getContext(), Attribute::Returned)});
11650 Value *RetOp = RetI.getOperand(0);
11654 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11655 Changed = ChangeStatus::CHANGED;
11658 bool UsedAssumedInformation =
false;
11659 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11660 UsedAssumedInformation,
11666 return AAPotentialValues::indicatePessimisticFixpoint();
11670 void trackStatistics()
const override{
11677struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11678 AAPotentialValuesFunction(
const IRPosition &IRP, Attributor &
A)
11679 : AAPotentialValuesImpl(IRP,
A) {}
11688 void trackStatistics()
const override {
11693struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11694 AAPotentialValuesCallSite(
const IRPosition &IRP, Attributor &
A)
11695 : AAPotentialValuesFunction(IRP,
A) {}
11698 void trackStatistics()
const override {
11703struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11704 AAPotentialValuesCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
11705 : AAPotentialValuesImpl(IRP,
A) {}
11709 auto AssumedBefore = getAssumed();
11713 return indicatePessimisticFixpoint();
11715 bool UsedAssumedInformation =
false;
11719 UsedAssumedInformation))
11720 return indicatePessimisticFixpoint();
11727 Values, S, UsedAssumedInformation))
11730 for (
auto &It : Values) {
11731 Value *
V = It.getValue();
11732 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11733 V, *CB, *
this, UsedAssumedInformation);
11734 if (!CallerV.has_value()) {
11738 V = *CallerV ? *CallerV :
V;
11744 giveUpOnIntraprocedural(
A);
11747 addValue(
A, getState(), *V, CB, S, getAnchorScope());
11752 return indicatePessimisticFixpoint();
11754 return indicatePessimisticFixpoint();
11755 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11756 : ChangeStatus::CHANGED;
11760 return AAPotentialValues::indicatePessimisticFixpoint();
11764 void trackStatistics()
const override {
11769struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11770 AAPotentialValuesCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
11771 : AAPotentialValuesFloating(IRP,
A) {}
11774 void trackStatistics()
const override {
11782struct AAAssumptionInfoImpl :
public AAAssumptionInfo {
11783 AAAssumptionInfoImpl(
const IRPosition &IRP, Attributor &
A,
11784 const DenseSet<StringRef> &Known)
11785 : AAAssumptionInfo(IRP,
A, Known) {}
11790 if (getKnown().isUniversal())
11791 return ChangeStatus::UNCHANGED;
11793 const IRPosition &IRP = getIRPosition();
11795 getAssumed().getSet().
end());
11797 return A.manifestAttrs(IRP,
11804 bool hasAssumption(
const StringRef Assumption)
const override {
11805 return isValidState() && setContains(Assumption);
11809 const std::string getAsStr(Attributor *
A)
const override {
11810 const SetContents &Known = getKnown();
11811 const SetContents &Assumed = getAssumed();
11815 const std::string KnownStr =
llvm::join(Set,
",");
11817 std::string AssumedStr =
"Universal";
11818 if (!Assumed.isUniversal()) {
11819 Set.assign(Assumed.getSet().begin(), Assumed.getSet().end());
11822 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11837struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11838 AAAssumptionInfoFunction(
const IRPosition &IRP, Attributor &
A)
11839 : AAAssumptionInfoImpl(IRP,
A,
11846 auto CallSitePred = [&](AbstractCallSite ACS) {
11847 const auto *AssumptionAA =
A.getAAFor<AAAssumptionInfo>(
11849 DepClassTy::REQUIRED);
11853 Changed |= getIntersection(AssumptionAA->getAssumed());
11854 return !getAssumed().empty() || !getKnown().empty();
11857 bool UsedAssumedInformation =
false;
11862 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11863 UsedAssumedInformation))
11864 return indicatePessimisticFixpoint();
11866 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11869 void trackStatistics()
const override {}
11873struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11875 AAAssumptionInfoCallSite(
const IRPosition &IRP, Attributor &
A)
11876 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11881 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11887 auto *AssumptionAA =
11888 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11890 return indicatePessimisticFixpoint();
11891 bool Changed = getIntersection(AssumptionAA->getAssumed());
11892 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11896 void trackStatistics()
const override {}
11901 DenseSet<StringRef> getInitialAssumptions(
const IRPosition &IRP) {
11908 return Assumptions;
11923struct AAUnderlyingObjectsImpl
11929 const std::string getAsStr(
Attributor *
A)
const override {
11930 if (!isValidState())
11931 return "<invalid>";
11934 OS <<
"underlying objects: inter " << InterAssumedUnderlyingObjects.size()
11935 <<
" objects, intra " << IntraAssumedUnderlyingObjects.size()
11937 if (!InterAssumedUnderlyingObjects.empty()) {
11938 OS <<
"inter objects:\n";
11939 for (
auto *Obj : InterAssumedUnderlyingObjects)
11940 OS << *Obj <<
'\n';
11942 if (!IntraAssumedUnderlyingObjects.empty()) {
11943 OS <<
"intra objects:\n";
11944 for (
auto *Obj : IntraAssumedUnderlyingObjects)
11945 OS << *Obj <<
'\n';
11951 void trackStatistics()
const override {}
11955 auto &
Ptr = getAssociatedValue();
11957 bool UsedAssumedInformation =
false;
11958 auto DoUpdate = [&](SmallSetVector<Value *, 8> &UnderlyingObjects,
11960 SmallPtrSet<Value *, 8> SeenObjects;
11964 Scope, UsedAssumedInformation))
11969 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
11970 auto &
VAC = Values[
I];
11971 auto *Obj =
VAC.getValue();
11973 if (!SeenObjects.
insert(UO ? UO : Obj).second)
11975 if (UO && UO != Obj) {
11981 const auto *OtherAA =
A.getAAFor<AAUnderlyingObjects>(
11983 auto Pred = [&](
Value &
V) {
11991 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
11993 "The forall call should not return false at this position");
11999 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope,
12000 UsedAssumedInformation);
12006 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
12008 handleIndirect(
A, *
PHI->getIncomingValue(u), UnderlyingObjects,
12009 Scope, UsedAssumedInformation);
12023 if (!UsedAssumedInformation)
12024 indicateOptimisticFixpoint();
12025 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12028 bool forallUnderlyingObjects(
12029 function_ref<
bool(
Value &)> Pred,
12031 if (!isValidState())
12032 return Pred(getAssociatedValue());
12035 ? IntraAssumedUnderlyingObjects
12036 : InterAssumedUnderlyingObjects;
12037 for (
Value *Obj : AssumedUnderlyingObjects)
12047 bool handleIndirect(Attributor &
A,
Value &V,
12048 SmallSetVector<Value *, 8> &UnderlyingObjects,
12051 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
12053 auto Pred = [&](
Value &
V) {
12057 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
12059 "The forall call should not return false at this position");
12065 SmallSetVector<Value *, 8> IntraAssumedUnderlyingObjects;
12067 SmallSetVector<Value *, 8> InterAssumedUnderlyingObjects;
12070struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
12071 AAUnderlyingObjectsFloating(
const IRPosition &IRP, Attributor &
A)
12072 : AAUnderlyingObjectsImpl(IRP,
A) {}
12075struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12076 AAUnderlyingObjectsArgument(
const IRPosition &IRP, Attributor &
A)
12077 : AAUnderlyingObjectsImpl(IRP,
A) {}
12080struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12081 AAUnderlyingObjectsCallSite(
const IRPosition &IRP, Attributor &
A)
12082 : AAUnderlyingObjectsImpl(IRP,
A) {}
12085struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12086 AAUnderlyingObjectsCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12087 : AAUnderlyingObjectsImpl(IRP,
A) {}
12090struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12091 AAUnderlyingObjectsReturned(
const IRPosition &IRP, Attributor &
A)
12092 : AAUnderlyingObjectsImpl(IRP,
A) {}
12095struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12096 AAUnderlyingObjectsCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12097 : AAUnderlyingObjectsImpl(IRP,
A) {}
12100struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12101 AAUnderlyingObjectsFunction(
const IRPosition &IRP, Attributor &
A)
12102 : AAUnderlyingObjectsImpl(IRP,
A) {}
12108struct AAGlobalValueInfoFloating :
public AAGlobalValueInfo {
12109 AAGlobalValueInfoFloating(
const IRPosition &IRP, Attributor &
A)
12110 : AAGlobalValueInfo(IRP,
A) {}
12115 bool checkUse(Attributor &
A,
const Use &U,
bool &Follow,
12116 SmallVectorImpl<const Value *> &Worklist) {
12123 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12124 << *UInst <<
"\n");
12127 int Idx = &
Cmp->getOperandUse(0) == &
U;
12130 return U == &getAnchorValue();
12135 auto CallSitePred = [&](AbstractCallSite ACS) {
12136 Worklist.
push_back(ACS.getInstruction());
12139 bool UsedAssumedInformation =
false;
12141 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12143 UsedAssumedInformation))
12161 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12170 unsigned NumUsesBefore =
Uses.size();
12172 SmallPtrSet<const Value *, 8> Visited;
12176 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12184 return checkUse(
A, U, Follow, Worklist);
12186 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12187 Uses.insert(&OldU);
12191 while (!Worklist.
empty()) {
12193 if (!Visited.
insert(V).second)
12195 if (!
A.checkForAllUses(UsePred, *
this, *V,
12197 DepClassTy::OPTIONAL,
12198 true, EquivalentUseCB)) {
12199 return indicatePessimisticFixpoint();
12203 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12204 : ChangeStatus::CHANGED;
12207 bool isPotentialUse(
const Use &U)
const override {
12208 return !isValidState() ||
Uses.contains(&U);
12213 return ChangeStatus::UNCHANGED;
12217 const std::string getAsStr(Attributor *
A)
const override {
12218 return "[" + std::to_string(
Uses.size()) +
" uses]";
12221 void trackStatistics()
const override {
12227 SmallPtrSet<const Use *, 8>
Uses;
12233struct AAIndirectCallInfoCallSite :
public AAIndirectCallInfo {
12234 AAIndirectCallInfoCallSite(
const IRPosition &IRP, Attributor &
A)
12235 : AAIndirectCallInfo(IRP,
A) {}
12239 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12240 if (!MD && !
A.isClosedWorldModule())
12244 for (
const auto &
Op : MD->operands())
12246 PotentialCallees.insert(Callee);
12247 }
else if (
A.isClosedWorldModule()) {
12249 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12250 PotentialCallees.insert_range(IndirectlyCallableFunctions);
12253 if (PotentialCallees.empty())
12254 indicateOptimisticFixpoint();
12262 SmallSetVector<Function *, 4> AssumedCalleesNow;
12263 bool AllCalleesKnownNow = AllCalleesKnown;
12265 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12266 bool &UsedAssumedInformation) {
12267 const auto *GIAA =
A.getAAFor<AAGlobalValueInfo>(
12269 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12271 UsedAssumedInformation = !GIAA->isAtFixpoint();
12275 auto AddPotentialCallees = [&]() {
12276 for (
auto *PotentialCallee : PotentialCallees) {
12277 bool UsedAssumedInformation =
false;
12278 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12279 AssumedCalleesNow.
insert(PotentialCallee);
12285 bool UsedAssumedInformation =
false;
12288 AA::ValueScope::AnyScope,
12289 UsedAssumedInformation)) {
12290 if (PotentialCallees.empty())
12291 return indicatePessimisticFixpoint();
12292 AddPotentialCallees();
12297 auto CheckPotentialCallee = [&](
Function &Fn) {
12298 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12301 auto &CachedResult = FilterResults[&Fn];
12302 if (CachedResult.has_value())
12303 return CachedResult.value();
12305 bool UsedAssumedInformation =
false;
12306 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12307 if (!UsedAssumedInformation)
12308 CachedResult =
false;
12317 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12318 bool IsKnown =
false;
12321 DepClassTy::OPTIONAL, IsKnown)) {
12323 CachedResult =
false;
12328 CachedResult =
true;
12334 for (
auto &VAC : Values) {
12338 VAC.getValue()->getType()->getPointerAddressSpace() == 0)
12342 if (CheckPotentialCallee(*VACFn))
12343 AssumedCalleesNow.
insert(VACFn);
12346 if (!PotentialCallees.empty()) {
12347 AddPotentialCallees();
12350 AllCalleesKnownNow =
false;
12353 if (AssumedCalleesNow == AssumedCallees &&
12354 AllCalleesKnown == AllCalleesKnownNow)
12355 return ChangeStatus::UNCHANGED;
12357 std::swap(AssumedCallees, AssumedCalleesNow);
12358 AllCalleesKnown = AllCalleesKnownNow;
12359 return ChangeStatus::CHANGED;
12365 if (!AllCalleesKnown && AssumedCallees.empty())
12366 return ChangeStatus::UNCHANGED;
12369 bool UsedAssumedInformation =
false;
12370 if (
A.isAssumedDead(*CB,
this,
nullptr,
12371 UsedAssumedInformation))
12372 return ChangeStatus::UNCHANGED;
12376 if (
FP->getType()->getPointerAddressSpace())
12377 FP =
new AddrSpaceCastInst(
FP, PointerType::get(
FP->getContext(), 0),
12387 if (AssumedCallees.empty()) {
12388 assert(AllCalleesKnown &&
12389 "Expected all callees to be known if there are none.");
12390 A.changeToUnreachableAfterManifest(CB);
12391 return ChangeStatus::CHANGED;
12395 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12396 auto *NewCallee = AssumedCallees.front();
12399 NumIndirectCallsPromoted++;
12400 return ChangeStatus::CHANGED;
12407 A.deleteAfterManifest(*CB);
12408 return ChangeStatus::CHANGED;
12418 bool SpecializedForAnyCallees =
false;
12419 bool SpecializedForAllCallees = AllCalleesKnown;
12420 ICmpInst *LastCmp =
nullptr;
12423 for (Function *NewCallee : AssumedCallees) {
12424 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee,
12425 AssumedCallees.size())) {
12426 SkippedAssumedCallees.
push_back(NewCallee);
12427 SpecializedForAllCallees =
false;
12430 SpecializedForAnyCallees =
true;
12436 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12437 A.registerManifestAddedBasicBlock(*IP->getParent());
12443 A.registerManifestAddedBasicBlock(*ElseBB);
12445 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12450 CastInst *RetBC =
nullptr;
12451 CallInst *NewCall =
nullptr;
12456 NumIndirectCallsPromoted++;
12464 auto AttachCalleeMetadata = [&](CallBase &IndirectCB) {
12465 if (!AllCalleesKnown)
12466 return ChangeStatus::UNCHANGED;
12467 MDBuilder MDB(IndirectCB.getContext());
12468 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12469 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12470 return ChangeStatus::CHANGED;
12473 if (!SpecializedForAnyCallees)
12474 return AttachCalleeMetadata(*CB);
12477 if (SpecializedForAllCallees) {
12480 new UnreachableInst(IP->getContext(), IP);
12481 IP->eraseFromParent();
12484 CBClone->setName(CB->
getName());
12485 CBClone->insertBefore(*IP->getParent(), IP);
12486 NewCalls.
push_back({CBClone,
nullptr});
12487 AttachCalleeMetadata(*CBClone);
12494 CB->
getParent()->getFirstInsertionPt());
12495 for (
auto &It : NewCalls) {
12496 CallBase *NewCall = It.first;
12497 Instruction *CallRet = It.second ? It.second : It.first;
12509 A.deleteAfterManifest(*CB);
12510 Changed = ChangeStatus::CHANGED;
12516 const std::string getAsStr(Attributor *
A)
const override {
12517 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12518 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12522 void trackStatistics()
const override {
12523 if (AllCalleesKnown) {
12525 Eliminated, CallSites,
12526 "Number of indirect call sites eliminated via specialization")
12529 "Number of indirect call sites specialized")
12533 bool foreachCallee(function_ref<
bool(Function *)> CB)
const override {
12534 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12539 DenseMap<Function *, std::optional<bool>> FilterResults;
12543 SmallSetVector<Function *, 4> PotentialCallees;
12547 SmallSetVector<Function *, 4> AssumedCallees;
12551 bool AllCalleesKnown =
true;
12558struct AAInvariantLoadPointerImpl
12559 :
public StateWrapper<BitIntegerState<uint8_t, 15>,
12560 AAInvariantLoadPointer> {
12564 IS_NOALIAS = 1 << 0,
12567 IS_NOEFFECT = 1 << 1,
12569 IS_LOCALLY_INVARIANT = 1 << 2,
12571 IS_LOCALLY_CONSTRAINED = 1 << 3,
12573 IS_BEST_STATE = IS_NOALIAS | IS_NOEFFECT | IS_LOCALLY_INVARIANT |
12574 IS_LOCALLY_CONSTRAINED,
12576 static_assert(getBestState() == IS_BEST_STATE,
"Unexpected best state");
12579 StateWrapper<BitIntegerState<uint8_t, 15>, AAInvariantLoadPointer>;
12583 AAInvariantLoadPointerImpl(
const IRPosition &IRP, Attributor &
A)
12586 bool isKnownInvariant()
const final {
12587 return isKnownLocallyInvariant() && isKnown(IS_LOCALLY_CONSTRAINED);
12590 bool isKnownLocallyInvariant()
const final {
12591 if (isKnown(IS_LOCALLY_INVARIANT))
12593 return isKnown(IS_NOALIAS | IS_NOEFFECT);
12596 bool isAssumedInvariant()
const final {
12597 return isAssumedLocallyInvariant() && isAssumed(IS_LOCALLY_CONSTRAINED);
12600 bool isAssumedLocallyInvariant()
const final {
12601 if (isAssumed(IS_LOCALLY_INVARIANT))
12603 return isAssumed(IS_NOALIAS | IS_NOEFFECT);
12610 if (requiresNoAlias() && !isAssumed(IS_NOALIAS))
12611 return indicatePessimisticFixpoint();
12615 Changed |= updateLocalInvariance(
A);
12621 if (!isKnownInvariant())
12622 return ChangeStatus::UNCHANGED;
12625 const Value *
Ptr = &getAssociatedValue();
12626 const auto TagInvariantLoads = [&](
const Use &
U,
bool &) {
12627 if (
U.get() !=
Ptr)
12635 if (!
A.isRunOn(
I->getFunction()))
12638 if (
I->hasMetadata(LLVMContext::MD_invariant_load))
12642 LI->setMetadata(LLVMContext::MD_invariant_load,
12644 Changed = ChangeStatus::CHANGED;
12649 (void)
A.checkForAllUses(TagInvariantLoads, *
this, *
Ptr);
12654 const std::string getAsStr(Attributor *)
const override {
12655 if (isKnownInvariant())
12656 return "load-invariant pointer";
12657 return "non-invariant pointer";
12661 void trackStatistics()
const override {}
12665 bool requiresNoAlias()
const {
12666 switch (getPositionKind()) {
12672 case IRP_CALL_SITE:
12674 case IRP_CALL_SITE_RETURNED: {
12679 case IRP_ARGUMENT: {
12680 const Function *
F = getAssociatedFunction();
12681 assert(
F &&
"no associated function for argument");
12687 bool isExternal()
const {
12688 const Function *
F = getAssociatedFunction();
12692 getPositionKind() != IRP_CALL_SITE_RETURNED;
12696 if (isKnown(IS_NOALIAS) || !isAssumed(IS_NOALIAS))
12697 return ChangeStatus::UNCHANGED;
12700 if (
const auto *ANoAlias =
A.getOrCreateAAFor<AANoAlias>(
12701 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12702 if (ANoAlias->isKnownNoAlias()) {
12703 addKnownBits(IS_NOALIAS);
12704 return ChangeStatus::CHANGED;
12707 if (!ANoAlias->isAssumedNoAlias()) {
12708 removeAssumedBits(IS_NOALIAS);
12709 return ChangeStatus::CHANGED;
12712 return ChangeStatus::UNCHANGED;
12717 if (
const Argument *Arg = getAssociatedArgument()) {
12719 addKnownBits(IS_NOALIAS);
12720 return ChangeStatus::UNCHANGED;
12725 removeAssumedBits(IS_NOALIAS);
12726 return ChangeStatus::CHANGED;
12729 return ChangeStatus::UNCHANGED;
12733 if (isKnown(IS_NOEFFECT) || !isAssumed(IS_NOEFFECT))
12734 return ChangeStatus::UNCHANGED;
12736 if (!getAssociatedFunction())
12737 return indicatePessimisticFixpoint();
12740 return indicatePessimisticFixpoint();
12742 const auto HasNoEffectLoads = [&](
const Use &
U,
bool &) {
12744 return !LI || !LI->mayHaveSideEffects();
12746 if (!
A.checkForAllUses(HasNoEffectLoads, *
this, getAssociatedValue()))
12747 return indicatePessimisticFixpoint();
12749 if (
const auto *AMemoryBehavior =
A.getOrCreateAAFor<AAMemoryBehavior>(
12750 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12753 if (!AMemoryBehavior->isAssumedReadOnly())
12754 return indicatePessimisticFixpoint();
12756 if (AMemoryBehavior->isKnownReadOnly()) {
12757 addKnownBits(IS_NOEFFECT);
12758 return ChangeStatus::UNCHANGED;
12761 return ChangeStatus::UNCHANGED;
12764 if (
const Argument *Arg = getAssociatedArgument()) {
12766 addKnownBits(IS_NOEFFECT);
12767 return ChangeStatus::UNCHANGED;
12772 return indicatePessimisticFixpoint();
12775 return ChangeStatus::UNCHANGED;
12779 if (isKnown(IS_LOCALLY_INVARIANT) || !isAssumed(IS_LOCALLY_INVARIANT))
12780 return ChangeStatus::UNCHANGED;
12783 const auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
12784 getIRPosition(),
this, DepClassTy::REQUIRED);
12786 return ChangeStatus::UNCHANGED;
12788 bool UsedAssumedInformation =
false;
12789 const auto IsLocallyInvariantLoadIfPointer = [&](
const Value &
V) {
12790 if (!
V.getType()->isPointerTy())
12792 const auto *IsInvariantLoadPointer =
12794 DepClassTy::REQUIRED);
12796 if (!IsInvariantLoadPointer)
12799 if (IsInvariantLoadPointer->isKnownLocallyInvariant())
12801 if (!IsInvariantLoadPointer->isAssumedLocallyInvariant())
12804 UsedAssumedInformation =
true;
12807 if (!AUO->forallUnderlyingObjects(IsLocallyInvariantLoadIfPointer))
12808 return indicatePessimisticFixpoint();
12814 if (!IsLocallyInvariantLoadIfPointer(*Arg))
12815 return indicatePessimisticFixpoint();
12820 if (!UsedAssumedInformation) {
12822 addKnownBits(IS_LOCALLY_INVARIANT);
12823 return ChangeStatus::CHANGED;
12826 return ChangeStatus::UNCHANGED;
12830struct AAInvariantLoadPointerFloating final : AAInvariantLoadPointerImpl {
12831 AAInvariantLoadPointerFloating(
const IRPosition &IRP, Attributor &
A)
12832 : AAInvariantLoadPointerImpl(IRP,
A) {}
12835struct AAInvariantLoadPointerReturned final : AAInvariantLoadPointerImpl {
12836 AAInvariantLoadPointerReturned(
const IRPosition &IRP, Attributor &
A)
12837 : AAInvariantLoadPointerImpl(IRP,
A) {}
12840 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12844struct AAInvariantLoadPointerCallSiteReturned final
12845 : AAInvariantLoadPointerImpl {
12846 AAInvariantLoadPointerCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12847 : AAInvariantLoadPointerImpl(IRP,
A) {}
12850 const Function *
F = getAssociatedFunction();
12851 assert(
F &&
"no associated function for return from call");
12853 if (!
F->isDeclaration() && !
F->isIntrinsic())
12854 return AAInvariantLoadPointerImpl::initialize(
A);
12859 return AAInvariantLoadPointerImpl::initialize(
A);
12861 if (
F->onlyReadsMemory() &&
F->hasNoSync())
12862 return AAInvariantLoadPointerImpl::initialize(
A);
12866 indicatePessimisticFixpoint();
12870struct AAInvariantLoadPointerArgument final : AAInvariantLoadPointerImpl {
12871 AAInvariantLoadPointerArgument(
const IRPosition &IRP, Attributor &
A)
12872 : AAInvariantLoadPointerImpl(IRP,
A) {}
12875 const Function *
F = getAssociatedFunction();
12876 assert(
F &&
"no associated function for argument");
12879 addKnownBits(IS_LOCALLY_CONSTRAINED);
12883 if (!
F->hasLocalLinkage())
12884 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12888struct AAInvariantLoadPointerCallSiteArgument final
12889 : AAInvariantLoadPointerImpl {
12890 AAInvariantLoadPointerCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12891 : AAInvariantLoadPointerImpl(IRP,
A) {}
12898template <
typename InstType>
12899static bool makeChange(Attributor &
A, InstType *MemInst,
const Use &U,
12900 Value *OriginalValue, PointerType *NewPtrTy,
12901 bool UseOriginalValue) {
12902 if (
U.getOperandNo() != InstType::getPointerOperandIndex())
12905 if (MemInst->isVolatile()) {
12906 auto *
TTI =
A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(
12907 *MemInst->getFunction());
12908 unsigned NewAS = NewPtrTy->getPointerAddressSpace();
12913 if (UseOriginalValue) {
12914 A.changeUseAfterManifest(
const_cast<Use &
>(U), *OriginalValue);
12918 Instruction *CastInst =
new AddrSpaceCastInst(OriginalValue, NewPtrTy);
12920 A.changeUseAfterManifest(
const_cast<Use &
>(U), *CastInst);
12924struct AAAddressSpaceImpl :
public AAAddressSpace {
12925 AAAddressSpaceImpl(
const IRPosition &IRP, Attributor &
A)
12926 : AAAddressSpace(IRP,
A) {}
12929 assert(isValidState() &&
"the AA is invalid");
12930 return AssumedAddressSpace;
12935 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
12936 "Associated value is not a pointer");
12938 if (!
A.getInfoCache().getFlatAddressSpace().has_value()) {
12939 indicatePessimisticFixpoint();
12943 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
12944 unsigned AS = getAssociatedType()->getPointerAddressSpace();
12945 if (AS != FlatAS) {
12946 [[maybe_unused]]
bool R = takeAddressSpace(AS);
12947 assert(R &&
"The take should happen");
12948 indicateOptimisticFixpoint();
12953 uint32_t OldAddressSpace = AssumedAddressSpace;
12954 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
12956 auto CheckAddressSpace = [&](
Value &Obj) {
12962 unsigned ObjAS = Obj.getType()->getPointerAddressSpace();
12963 if (ObjAS != FlatAS)
12964 return takeAddressSpace(ObjAS);
12978 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(*F);
12980 if (AssumedAS != ~0U)
12981 return takeAddressSpace(AssumedAS);
12985 return takeAddressSpace(FlatAS);
12988 auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(getIRPosition(),
this,
12989 DepClassTy::REQUIRED);
12990 if (!AUO->forallUnderlyingObjects(CheckAddressSpace))
12991 return indicatePessimisticFixpoint();
12993 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
12994 : ChangeStatus::CHANGED;
13001 if (NewAS == InvalidAddressSpace ||
13003 return ChangeStatus::UNCHANGED;
13005 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13007 Value *AssociatedValue = &getAssociatedValue();
13008 Value *OriginalValue = peelAddrspacecast(AssociatedValue, FlatAS);
13011 PointerType::get(getAssociatedType()->
getContext(), NewAS);
13012 bool UseOriginalValue =
13017 auto Pred = [&](
const Use &
U,
bool &) {
13018 if (
U.get() != AssociatedValue)
13029 makeChange(
A, LI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13032 makeChange(
A, SI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13035 makeChange(
A, RMW, U, OriginalValue, NewPtrTy, UseOriginalValue);
13038 makeChange(
A, CmpX, U, OriginalValue, NewPtrTy, UseOriginalValue);
13045 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
13048 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13052 const std::string getAsStr(Attributor *
A)
const override {
13053 if (!isValidState())
13054 return "addrspace(<invalid>)";
13055 return "addrspace(" +
13056 (AssumedAddressSpace == InvalidAddressSpace
13058 : std::to_string(AssumedAddressSpace)) +
13063 uint32_t AssumedAddressSpace = InvalidAddressSpace;
13065 bool takeAddressSpace(uint32_t AS) {
13066 if (AssumedAddressSpace == InvalidAddressSpace) {
13067 AssumedAddressSpace = AS;
13070 return AssumedAddressSpace == AS;
13073 static Value *peelAddrspacecast(
Value *V,
unsigned FlatAS) {
13075 assert(
I->getSrcAddressSpace() != FlatAS &&
13076 "there should not be flat AS -> non-flat AS");
13077 return I->getPointerOperand();
13080 if (
C->getOpcode() == Instruction::AddrSpaceCast) {
13081 assert(
C->getOperand(0)->getType()->getPointerAddressSpace() !=
13083 "there should not be flat AS -> non-flat AS X");
13084 return C->getOperand(0);
13090struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
13091 AAAddressSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13092 : AAAddressSpaceImpl(IRP,
A) {}
13094 void trackStatistics()
const override {
13099struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
13100 AAAddressSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13101 : AAAddressSpaceImpl(IRP,
A) {}
13107 (void)indicatePessimisticFixpoint();
13110 void trackStatistics()
const override {
13115struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
13116 AAAddressSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13117 : AAAddressSpaceImpl(IRP,
A) {}
13119 void trackStatistics()
const override {
13124struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
13125 AAAddressSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13126 : AAAddressSpaceImpl(IRP,
A) {}
13131struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
13132 AAAddressSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13133 : AAAddressSpaceImpl(IRP,
A) {}
13139 (void)indicatePessimisticFixpoint();
13142 void trackStatistics()
const override {
13157struct AANoAliasAddrSpaceImpl :
public AANoAliasAddrSpace {
13158 AANoAliasAddrSpaceImpl(
const IRPosition &IRP, Attributor &
A)
13159 : AANoAliasAddrSpace(IRP,
A) {}
13162 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
13163 "Associated value is not a pointer");
13167 std::optional<unsigned> FlatAS =
A.getInfoCache().getFlatAddressSpace();
13168 if (!FlatAS.has_value()) {
13169 indicatePessimisticFixpoint();
13175 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13176 if (AS != *FlatAS) {
13178 indicateOptimisticFixpoint();
13183 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13184 uint32_t OldAssumed = getAssumed();
13186 auto CheckAddressSpace = [&](
Value &Obj) {
13190 unsigned AS = Obj.getType()->getPointerAddressSpace();
13194 removeAS(Obj.getType()->getPointerAddressSpace());
13198 const AAUnderlyingObjects *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
13199 getIRPosition(),
this, DepClassTy::REQUIRED);
13201 return indicatePessimisticFixpoint();
13203 return OldAssumed == getAssumed() ? ChangeStatus::UNCHANGED
13204 : ChangeStatus::CHANGED;
13209 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13211 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13212 if (AS != FlatAS ||
Map.empty())
13213 return ChangeStatus::UNCHANGED;
13215 LLVMContext &Ctx = getAssociatedValue().getContext();
13216 MDNode *NoAliasASNode =
nullptr;
13217 MDBuilder MDB(Ctx);
13219 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13222 unsigned Upper =
I.stop();
13223 unsigned Lower =
I.start();
13224 if (!NoAliasASNode) {
13225 NoAliasASNode = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13228 MDNode *ASRange = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13232 Value *AssociatedValue = &getAssociatedValue();
13235 auto AddNoAliasAttr = [&](
const Use &
U,
bool &) {
13236 if (
U.get() != AssociatedValue)
13239 if (!Inst || Inst->
hasMetadata(LLVMContext::MD_noalias_addrspace))
13246 Inst->
setMetadata(LLVMContext::MD_noalias_addrspace, NoAliasASNode);
13250 (void)
A.checkForAllUses(AddNoAliasAttr, *
this, *AssociatedValue,
13252 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13256 const std::string getAsStr(Attributor *
A)
const override {
13257 if (!isValidState())
13258 return "<invalid>";
13260 raw_string_ostream OS(Str);
13261 OS <<
"CanNotBeAddrSpace(";
13262 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13263 unsigned Upper =
I.stop();
13264 unsigned Lower =
I.start();
13265 OS <<
' ' <<
'[' <<
Upper <<
',' <<
Lower + 1 <<
')';
13272 void removeAS(
unsigned AS) {
13273 RangeMap::iterator
I =
Map.find(AS);
13275 if (
I !=
Map.end()) {
13276 unsigned Upper =
I.stop();
13277 unsigned Lower =
I.start();
13281 if (AS != ~((
unsigned)0) && AS + 1 <=
Upper)
13283 if (AS != 0 &&
Lower <= AS - 1)
13288 void resetASRanges(Attributor &
A) {
13290 Map.insert(0,
A.getInfoCache().getMaxAddrSpace(),
true);
13294struct AANoAliasAddrSpaceFloating final : AANoAliasAddrSpaceImpl {
13295 AANoAliasAddrSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13296 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13298 void trackStatistics()
const override {
13303struct AANoAliasAddrSpaceReturned final : AANoAliasAddrSpaceImpl {
13304 AANoAliasAddrSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13305 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13307 void trackStatistics()
const override {
13312struct AANoAliasAddrSpaceCallSiteReturned final : AANoAliasAddrSpaceImpl {
13313 AANoAliasAddrSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13314 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13316 void trackStatistics()
const override {
13321struct AANoAliasAddrSpaceArgument final : AANoAliasAddrSpaceImpl {
13322 AANoAliasAddrSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13323 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13325 void trackStatistics()
const override {
13330struct AANoAliasAddrSpaceCallSiteArgument final : AANoAliasAddrSpaceImpl {
13331 AANoAliasAddrSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13332 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13334 void trackStatistics()
const override {
13341struct AAAllocationInfoImpl :
public AAAllocationInfo {
13342 AAAllocationInfoImpl(
const IRPosition &IRP, Attributor &
A)
13343 : AAAllocationInfo(IRP,
A) {}
13345 std::optional<TypeSize> getAllocatedSize()
const override {
13346 assert(isValidState() &&
"the AA is invalid");
13347 return AssumedAllocatedSize;
13350 std::optional<TypeSize> findInitialAllocationSize(Instruction *
I,
13351 const DataLayout &
DL) {
13354 switch (
I->getOpcode()) {
13355 case Instruction::Alloca: {
13360 return std::nullopt;
13366 const IRPosition &IRP = getIRPosition();
13371 return indicatePessimisticFixpoint();
13373 bool IsKnownNoCapture;
13375 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
13376 return indicatePessimisticFixpoint();
13378 const AAPointerInfo *PI =
13379 A.getOrCreateAAFor<AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
13382 return indicatePessimisticFixpoint();
13385 return indicatePessimisticFixpoint();
13387 const DataLayout &
DL =
A.getDataLayout();
13388 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
13391 if (!AllocationSize)
13392 return indicatePessimisticFixpoint();
13396 if (*AllocationSize == 0)
13397 return indicatePessimisticFixpoint();
13403 return indicatePessimisticFixpoint();
13405 if (BinSize == 0) {
13406 auto NewAllocationSize = std::make_optional<TypeSize>(0,
false);
13407 if (!changeAllocationSize(NewAllocationSize))
13408 return ChangeStatus::UNCHANGED;
13409 return ChangeStatus::CHANGED;
13413 const auto &It = PI->
begin();
13416 if (It->first.Offset != 0)
13417 return indicatePessimisticFixpoint();
13419 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
13421 if (SizeOfBin >= *AllocationSize)
13422 return indicatePessimisticFixpoint();
13424 auto NewAllocationSize = std::make_optional<TypeSize>(SizeOfBin * 8,
false);
13426 if (!changeAllocationSize(NewAllocationSize))
13427 return ChangeStatus::UNCHANGED;
13429 return ChangeStatus::CHANGED;
13435 assert(isValidState() &&
13436 "Manifest should only be called if the state is valid.");
13440 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
13442 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
13444 switch (
I->getOpcode()) {
13446 case Instruction::Alloca: {
13450 Type *CharType = Type::getInt8Ty(
I->getContext());
13452 auto *NumBytesToValue =
13453 ConstantInt::get(
I->getContext(), APInt(32, NumBytesToAllocate));
13456 insertPt = std::next(insertPt);
13457 AllocaInst *NewAllocaInst =
13462 return ChangeStatus::CHANGED;
13470 return ChangeStatus::UNCHANGED;
13474 const std::string getAsStr(Attributor *
A)
const override {
13475 if (!isValidState())
13476 return "allocationinfo(<invalid>)";
13477 return "allocationinfo(" +
13478 (AssumedAllocatedSize == HasNoAllocationSize
13480 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
13485 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
13489 bool changeAllocationSize(std::optional<TypeSize>
Size) {
13490 if (AssumedAllocatedSize == HasNoAllocationSize ||
13491 AssumedAllocatedSize !=
Size) {
13492 AssumedAllocatedSize =
Size;
13499struct AAAllocationInfoFloating : AAAllocationInfoImpl {
13500 AAAllocationInfoFloating(
const IRPosition &IRP, Attributor &
A)
13501 : AAAllocationInfoImpl(IRP,
A) {}
13503 void trackStatistics()
const override {
13508struct AAAllocationInfoReturned : AAAllocationInfoImpl {
13509 AAAllocationInfoReturned(
const IRPosition &IRP, Attributor &
A)
13510 : AAAllocationInfoImpl(IRP,
A) {}
13516 (void)indicatePessimisticFixpoint();
13519 void trackStatistics()
const override {
13524struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
13525 AAAllocationInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13526 : AAAllocationInfoImpl(IRP,
A) {}
13528 void trackStatistics()
const override {
13533struct AAAllocationInfoArgument : AAAllocationInfoImpl {
13534 AAAllocationInfoArgument(
const IRPosition &IRP, Attributor &
A)
13535 : AAAllocationInfoImpl(IRP,
A) {}
13537 void trackStatistics()
const override {
13542struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
13543 AAAllocationInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13544 : AAAllocationInfoImpl(IRP,
A) {}
13549 (void)indicatePessimisticFixpoint();
13552 void trackStatistics()
const override {
13601#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
13602 case IRPosition::PK: \
13603 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
13605#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
13606 case IRPosition::PK: \
13607 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
13611#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13612 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13613 CLASS *AA = nullptr; \
13614 switch (IRP.getPositionKind()) { \
13615 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13616 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13617 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13618 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13619 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13620 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13621 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13622 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13627#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13628 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13629 CLASS *AA = nullptr; \
13630 switch (IRP.getPositionKind()) { \
13631 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13632 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
13633 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13634 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13635 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13636 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13637 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13638 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13643#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
13644 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13645 CLASS *AA = nullptr; \
13646 switch (IRP.getPositionKind()) { \
13647 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
13649 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
13655#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13656 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13657 CLASS *AA = nullptr; \
13658 switch (IRP.getPositionKind()) { \
13659 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13660 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13661 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13662 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13663 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13664 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13665 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13666 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13671#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13672 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13673 CLASS *AA = nullptr; \
13674 switch (IRP.getPositionKind()) { \
13675 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13676 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13677 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13678 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13679 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13680 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13681 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13682 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13687#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13688 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13689 CLASS *AA = nullptr; \
13690 switch (IRP.getPositionKind()) { \
13691 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13692 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13693 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13694 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13695 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13696 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13697 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13698 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13750#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13751#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13752#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13753#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13754#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13755#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13756#undef SWITCH_PK_CREATE
13757#undef SWITCH_PK_INV
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefAnalysis InstSet & ToRemove
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis false
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static bool isReachableImpl(SmallVectorImpl< BasicBlock * > &Worklist, const StopSetT &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT, const LoopInfo *LI)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
DXIL Forward Handle Accesses
This file defines DenseMapInfo traits for DenseMap.
Machine Check Debug Module
This file implements a map that provides insertion order iteration.
static unsigned getAddressSpace(const Value *V, unsigned MaxLookup)
ConstantRange Range(APInt(BitWidth, Low), APInt(BitWidth, High))
uint64_t IntrinsicInst * II
static StringRef getName(Value *V)
static cl::opt< RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode > Mode("regalloc-enable-advisor", cl::Hidden, cl::init(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Default), cl::desc("Enable regalloc advisor mode"), cl::values(clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Default, "default", "Default"), clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Release, "release", "precompiled"), clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Development, "development", "for training")))
dot regions Print regions of function to dot true view regions View regions of function(with no function bodies)"
Remove Loads Into Fake Uses
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
std::pair< BasicBlock *, BasicBlock * > Edge
BaseType
A given derived pointer can have multiple base pointers through phi/selects.
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, ArrayRef< StringLiteral > StandardNames)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
LLVM_ABI AACallGraphNode * operator*() const
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static LLVM_ABI void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
LLVM_ABI std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
LLVM_ABI bool hasNoAliasAttr() const
Return true if this argument has the noalias attribute.
LLVM_ABI bool onlyReadsMemory() const
Return true if this argument has the readonly or readnone attribute.
LLVM_ABI bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
LLVM_ABI bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
LLVM_ABI bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
Functions, function parameters, and return types can have attributes to indicate how they should be t...
static LLVM_ABI Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
LLVM_ABI FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
LLVM_ABI Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
LLVM_ABI MemoryEffects getMemoryEffects() const
Returns memory effects.
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static LLVM_ABI Attribute getWithCaptureInfo(LLVMContext &Context, CaptureInfo CI)
static bool isEnumAttrKind(AttrKind Kind)
LLVM_ABI CaptureInfo getCaptureInfo() const
Returns information from captures attribute.
LLVM Basic Block Representation.
LLVM_ABI const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Function * getParent() const
Return the enclosing method, or null if none.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Instruction & front() const
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
Conditional or Unconditional Branch instruction.
unsigned getNumSuccessors() const
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
BasicBlock * getSuccessor(unsigned i) const
Value * getCondition() const
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isMustTailCall() const
Tests if this call site must be tail call optimized.
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
LLVM_ABI std::optional< ConstantRange > getRange() const
If this return value has a range attribute, return the value range of the argument.
Value * getArgOperand(unsigned i) const
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
LLVM_ABI Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
LLVM_ABI Function * getCaller()
Helper to get the caller (the parent function).
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
static CaptureInfo none()
Create CaptureInfo that does not capture any components of the pointer.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
LLVM_ABI bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
static LLVM_ABI Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
static LLVM_ABI ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
LLVM_ABI bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
LLVM_ABI bool isEmptySet() const
Return true if this set contains no members.
bool isSingleElement() const
Return true if this set contains exactly one member.
static LLVM_ABI ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
LLVM_ABI bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
LLVM_ABI bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
static LLVM_ABI bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
LLVM_ABI bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
LLVM_ABI bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
LLVM_ABI void insertBefore(InstListType::iterator InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified position.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
LLVM_ABI bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
LLVM_ABI bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
LLVM_ABI void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
LLVM_ABI const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
This is an important class for using LLVM in a threaded context.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
static LLVM_ABI MDNode * getMostGenericRange(MDNode *A, MDNode *B)
static MemoryEffectsBase readOnly()
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase none()
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
static LLVM_ABI std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
LLVM_ABI const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
LLVM_ABI const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
LLVM_ABI unsigned getSmallConstantMaxTripCount(const Loop *L, SmallVectorImpl< const SCEVPredicate * > *Predicates=nullptr)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
LLVM_ABI unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
LLVM_ABI unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
const Use & getOperandUse(unsigned i) const
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
LLVM_ABI bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
LLVM_ABI void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVM_ABI const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr, bool LookThroughIntToPtr=false) const
Accumulate the constant offset this value has compared to a base pointer.
LLVM_ABI LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< use_iterator > uses()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
SetVector< Function * >::iterator I
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
Abstract Attribute helper functions.
LLVM_ABI bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
LLVM_ABI bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
LLVM_ABI std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
LLVM_ABI bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
LLVM_ABI bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
LLVM_ABI bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
LLVM_ABI bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
SmallPtrSet< Instruction *, 4 > InstExclusionSetTy
LLVM_ABI bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
LLVM_ABI bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
LLVM_ABI bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
LLVM_ABI bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool hasAssumedIRAttr(Attributor &A, const AbstractAttribute *QueryingAA, const IRPosition &IRP, DepClassTy DepClass, bool &IsKnown, bool IgnoreSubsumingPositions=false, const AAType **AAPtr=nullptr)
Helper to avoid creating an AA for IR Attributes that might already be set.
LLVM_ABI bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
LLVM_ABI Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
@ C
The default llvm calling convention, compatible with C.
@ BasicBlock
Various leaf nodes.
@ Unsupported
This operation is completely unsupported on the target.
@ Undef
Value of the register doesn't matter.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > dyn_extract_or_null(Y &&MD)
Extract a Value from Metadata, if any, allowing null.
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > extract(Y &&MD)
Extract a Value from Metadata.
@ User
could "use" a pointer
NodeAddr< UseNode * > Use
Context & getContext() const
friend class Instruction
Iterator for Instructions in a `BasicBlock.
LLVM_ABI iterator begin() const
This is an optimization pass for GlobalISel generic memory operations.
bool operator<(int64_t V1, const APSInt &V2)
FunctionAddr VTableAddr Value
LLVM_ATTRIBUTE_ALWAYS_INLINE DynamicAPInt gcd(const DynamicAPInt &A, const DynamicAPInt &B)
LLVM_ABI KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, const SimplifyQuery &SQ, unsigned Depth=0)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
LLVM_ABI bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
LLVM_ABI Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
auto pred_end(const MachineBasicBlock *BB)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
auto successors(const MachineBasicBlock *BB)
LLVM_ABI bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
LLVM_ABI raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
LLVM_ABI Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
auto dyn_cast_if_present(const Y &Val)
dyn_cast_if_present<X> - Functionally identical to dyn_cast, except that a null (or none in the case ...
LLVM_ABI Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
LLVM_ABI bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
LLVM_ABI bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr, bool UseVariableInfo=true, bool IgnoreUBImplyingAttrs=true)
Return true if the instruction does not have any effects besides calculating the result and does not ...
bool isa_and_nonnull(const Y &Val)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
LLVM_ABI ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
auto map_range(ContainerTy &&C, FuncTy F)
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
LLVM_ABI Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
auto dyn_cast_or_null(const Y &Val)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
LLVM_ABI bool isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(const CallBase *Call, bool MustPreserveNullness)
{launder,strip}.invariant.group returns pointer that aliases its argument, and it only captures point...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
void sort(IteratorTy Start, IteratorTy End)
LLVM_ABI bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
LLVM_ABI bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
BumpPtrAllocatorImpl BumpPtrAllocator
The standard BumpPtrAllocator which just uses the default template parameters.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
LLVM_ABI CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
LLVM_ABI bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
LLVM_ABI RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
@ Success
The lock was released successfully.
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
LLVM_ABI bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
PotentialValuesState< APInt > PotentialConstantIntValuesState
std::string join(IteratorT Begin, IteratorT End, StringRef Separator)
Joins the strings in the range [Begin, End), adding Separator between the elements.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
InterleavedRange< Range > interleaved_array(const Range &R, StringRef Separator=", ")
Output range R as an array of interleaved elements.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr, const MetadataPredicate *IdentityMD=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
DWARFExpression::Operation Op
LLVM_ABI bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
ArrayRef(const T &OneElt) -> ArrayRef< T >
LLVM_ABI Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
constexpr unsigned BitWidth
ValueMap< const Value *, WeakTrackingVH > ValueToValueMapTy
auto pred_begin(const MachineBasicBlock *BB)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
LLVM_ABI std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
LLVM_ABI DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
LLVM_ABI UseCaptureInfo DetermineUseCaptureKind(const Use &U, const Value *Base)
Determine what kind of capture behaviour U may exhibit.
LLVM_ABI Value * simplifyCmpInst(CmpPredicate Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
LLVM_ABI bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
LLVM_ABI const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=MaxLookupSearchDepth)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
bool capturesNothing(CaptureComponents CC)
LLVM_ABI bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
constexpr bool isCallableCC(CallingConv::ID CC)
GenericCycleInfo< SSAContext > CycleInfo
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
bool forallInterferingAccesses(AA::RangeTy Range, F CB) const
See AAPointerInfo::forallInterferingAccesses.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
AAPointerInfo::OffsetInfo ReturnedOffsets
Flag to determine if the underlying pointer is reaching a return statement in the associated function...
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
bool forallInterferingAccesses(Instruction &I, F CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
const Instruction * From
Start here,.
Reachable Result
and remember if it worked:
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
const ToTy * To
reach this place,
const AA::InstExclusionSetTy * ExclusionSet
without going through any of these instructions,
unsigned computeHashValue() const
An abstract interface for address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
static LLVM_ABI const char ID
An abstract attribute for getting assumption information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of "denormal-fp-math" and "denormal-fp-ma...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for identifying pointers from which loads can be marked invariant.
static LLVM_ABI const char ID
Unique ID (due to the unique address).
An abstract interface for liveness abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static LLVM_ABI std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for potential address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
@ NO_CAPTURE
If we do not capture the value in memory, through integers, or as a derived pointer we know it is not...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static LLVM_ABI bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static LLVM_ABI bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A helper containing a list of offsets computed for a Use.
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual bool reachesReturn() const =0
OffsetBinsTy::const_iterator const_bin_iterator
virtual const_bin_iterator begin() const =0
DenseMap< AA::RangeTy, SmallSet< unsigned, 4 > > OffsetBinsTy
static LLVM_ABI const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
virtual bool forallUnderlyingObjects(function_ref< bool(Value &)> Pred, AA::ValueScope Scope=AA::Interprocedural) const =0
Check Pred on all underlying objects in Scope collected so far.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual bool isAtFixpoint() const =0
Return if this abstract state is fixed, thus does not need to be updated if information changes as it...
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
AAPointerInfo::Access Access
static Access getTombstoneKey()
DenseMapInfo< Instruction * > Base
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
uint64_t value() const
This is a hole in the type system and should not be abused.
std::function< void( const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >( const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
static constexpr DenormalMode getDefault()
Return the assumed default mode for a function without denormal-fp-math.
static constexpr DenormalMode getInvalid()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
AAPointerInfo::Access Access
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
DenseMapInfo< std::pair< const Instruction *, const ToTy * > > PairDMI
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
DenseMapInfo< const AA::InstExclusionSetTy * > InstSetDMI
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
LLVM_ABI Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
static unsigned MaxPotentialValues
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool isPassthrough() const
LLVM_ABI bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.
std::optional< Value * > SimplifiedAssociatedValue
An assumed simplified value.
Type * Ty
The type of the original value.