54#include "llvm/IR/IntrinsicsAMDGPU.h"
55#include "llvm/IR/IntrinsicsNVPTX.h"
82#define DEBUG_TYPE "attributor"
86 cl::desc(
"Manifest Attributor internal string attributes."),
99 cl::desc(
"Maximum number of potential values to be "
100 "tracked for each position."),
105 "attributor-max-potential-values-iterations",
cl::Hidden,
107 "Maximum number of iterations we keep dismantling potential values."),
110STATISTIC(NumAAs,
"Number of abstract attributes created");
111STATISTIC(NumIndirectCallsPromoted,
"Number of indirect calls promoted");
126#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
127 ("Number of " #TYPE " marked '" #NAME "'")
128#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
129#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
130#define STATS_DECL(NAME, TYPE, MSG) \
131 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
132#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
133#define STATS_DECLTRACK(NAME, TYPE, MSG) \
134 {STATS_DECL(NAME, TYPE, MSG) STATS_TRACK(NAME, TYPE)}
135#define STATS_DECLTRACK_ARG_ATTR(NAME) \
136 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
137#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
138 STATS_DECLTRACK(NAME, CSArguments, \
139 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
140#define STATS_DECLTRACK_FN_ATTR(NAME) \
141 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
142#define STATS_DECLTRACK_CS_ATTR(NAME) \
143 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
144#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
145 STATS_DECLTRACK(NAME, FunctionReturn, \
146 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
147#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
148 STATS_DECLTRACK(NAME, CSReturn, \
149 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
150#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
151 STATS_DECLTRACK(NAME, Floating, \
152 ("Number of floating values known to be '" #NAME "'"))
157#define PIPE_OPERATOR(CLASS) \
158 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
159 return OS << static_cast<const AbstractAttribute &>(AA); \
216 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
219 auto *BB =
I->getParent();
225 return !HeaderOnly || BB ==
C->getHeader();
236 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
241 if (
VectorType *SeqTy = dyn_cast<VectorType>(Ty))
245 if (
ArrayType *SeqTy = dyn_cast<ArrayType>(Ty))
248 if (!isa<StructType>(Ty))
261 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
271 bool AllowVolatile) {
272 if (!AllowVolatile &&
I->isVolatile())
275 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
276 return LI->getPointerOperand();
279 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
280 return SI->getPointerOperand();
283 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(
I)) {
284 return CXI->getPointerOperand();
287 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(
I)) {
288 return RMWI->getPointerOperand();
310 bool GetMinOffset,
bool AllowNonInbounds,
311 bool UseAssumed =
false) {
313 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
318 UseAssumed ? DepClassTy::OPTIONAL
320 if (!ValueConstantRangeAA)
343 const Value *
Ptr, int64_t &BytesOffset,
345 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
348 true, AllowNonInbounds);
356template <
typename AAType,
typename StateType =
typename AAType::StateType,
358 bool RecurseForSelectAndPHI =
true>
360 Attributor &
A,
const AAType &QueryingAA, StateType &S,
362 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
363 << QueryingAA <<
" into " << S <<
"\n");
365 assert((QueryingAA.getIRPosition().getPositionKind() ==
367 QueryingAA.getIRPosition().getPositionKind() ==
369 "Can only clamp returned value states for a function returned or call "
370 "site returned position!");
374 std::optional<StateType>
T;
377 auto CheckReturnValue = [&](
Value &RV) ->
bool {
382 return AA::hasAssumedIRAttr<IRAttributeKind>(
383 A, &QueryingAA, RVPos, DepClassTy::REQUIRED, IsKnown);
387 A.getAAFor<AAType>(QueryingAA, RVPos, DepClassTy::REQUIRED);
391 <<
" AA: " << AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
392 const StateType &AAS = AA->getState();
394 T = StateType::getBestState(AAS);
396 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
398 return T->isValidState();
401 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
402 AA::ValueScope::Intraprocedural,
403 RecurseForSelectAndPHI))
404 S.indicatePessimisticFixpoint();
411template <
typename AAType,
typename BaseType,
412 typename StateType =
typename BaseType::StateType,
413 bool PropagateCallBaseContext =
false,
415 bool RecurseForSelectAndPHI =
true>
416struct AAReturnedFromReturnedValues :
public BaseType {
422 StateType S(StateType::getBestState(this->getState()));
424 RecurseForSelectAndPHI>(
426 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
429 return clampStateAndIndicateChange<StateType>(this->getState(), S);
435template <
typename AAType,
typename StateType =
typename AAType::StateType,
437static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
439 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
440 << QueryingAA <<
" into " << S <<
"\n");
442 assert(QueryingAA.getIRPosition().getPositionKind() ==
444 "Can only clamp call site argument states for an argument position!");
448 std::optional<StateType>
T;
451 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
463 return AA::hasAssumedIRAttr<IRAttributeKind>(
464 A, &QueryingAA, ACSArgPos, DepClassTy::REQUIRED, IsKnown);
468 A.getAAFor<AAType>(QueryingAA, ACSArgPos, DepClassTy::REQUIRED);
471 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
472 <<
" AA: " << AA->getAsStr(&
A) <<
" @" << ACSArgPos
474 const StateType &AAS = AA->getState();
476 T = StateType::getBestState(AAS);
478 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
480 return T->isValidState();
483 bool UsedAssumedInformation =
false;
484 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
485 UsedAssumedInformation))
486 S.indicatePessimisticFixpoint();
493template <
typename AAType,
typename BaseType,
494 typename StateType =
typename AAType::StateType,
496bool getArgumentStateFromCallBaseContext(
Attributor &
A,
500 "Expected an 'argument' position !");
506 assert(ArgNo >= 0 &&
"Invalid Arg No!");
512 return AA::hasAssumedIRAttr<IRAttributeKind>(
513 A, &QueryingAttribute, CBArgPos, DepClassTy::REQUIRED, IsKnown);
517 A.getAAFor<AAType>(QueryingAttribute, CBArgPos, DepClassTy::REQUIRED);
520 const StateType &CBArgumentState =
521 static_cast<const StateType &
>(AA->getState());
523 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
524 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
528 State ^= CBArgumentState;
533template <
typename AAType,
typename BaseType,
534 typename StateType =
typename AAType::StateType,
535 bool BridgeCallBaseContext =
false,
537struct AAArgumentFromCallSiteArguments :
public BaseType {
543 StateType S = StateType::getBestState(this->getState());
545 if (BridgeCallBaseContext) {
547 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
549 A, *
this, this->getIRPosition(), S);
551 return clampStateAndIndicateChange<StateType>(this->getState(), S);
553 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
558 return clampStateAndIndicateChange<StateType>(this->getState(), S);
563template <
typename AAType,
typename BaseType,
564 typename StateType =
typename BaseType::StateType,
565 bool IntroduceCallBaseContext =
false,
567struct AACalleeToCallSite :
public BaseType {
572 auto IRPKind = this->getIRPosition().getPositionKind();
575 "Can only wrap function returned positions for call site "
576 "returned positions!");
577 auto &S = this->getState();
579 CallBase &CB = cast<CallBase>(this->getAnchorValue());
580 if (IntroduceCallBaseContext)
581 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
586 for (
const Function *Callee : Callees) {
590 IntroduceCallBaseContext ? &CB :
nullptr)
592 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
596 if (!AA::hasAssumedIRAttr<IRAttributeKind>(
597 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
603 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
607 if (S.isAtFixpoint())
608 return S.isValidState();
612 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
613 return S.indicatePessimisticFixpoint();
619template <
class AAType,
typename StateType =
typename AAType::StateType>
620static void followUsesInContext(AAType &AA,
Attributor &
A,
625 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
626 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
628 if (
const Instruction *UserI = dyn_cast<Instruction>(
U->getUser())) {
630 if (Found && AA.followUseInMBEC(
A, U, UserI, State))
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &AA,
Attributor &
A, StateType &S,
648 const Value &Val = AA.getIRPosition().getAssociatedValue();
649 if (isa<ConstantData>(Val))
653 A.getInfoCache().getMustBeExecutedContextExplorer();
659 for (
const Use &U : Val.
uses())
662 followUsesInContext<AAType>(AA,
A, *Explorer, &CtxI,
Uses, S);
664 if (S.isAtFixpoint())
669 if (
const BranchInst *Br = dyn_cast<BranchInst>(
I))
670 if (Br->isConditional())
709 StateType ParentState;
713 ParentState.indicateOptimisticFixpoint();
715 for (
const BasicBlock *BB : Br->successors()) {
716 StateType ChildState;
718 size_t BeforeSize =
Uses.size();
719 followUsesInContext(AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
722 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
725 ParentState &= ChildState;
738namespace PointerInfo {
799 R.indicatePessimisticFixpoint();
892 template <
typename F>
899 if (!
Range.mayOverlap(ItRange))
901 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
902 for (
auto Index : It.getSecond()) {
912 template <
typename F>
923 for (
unsigned Index : LocalList->getSecond()) {
926 if (
Range.offsetAndSizeAreUnknown())
942 RemoteI = RemoteI ? RemoteI : &
I;
946 bool AccExists =
false;
948 for (
auto Index : LocalList) {
950 if (
A.getLocalInst() == &
I) {
959 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
961 for (
auto Key : ToAdd) {
970 "New Access should have been at AccIndex");
971 LocalList.push_back(AccIndex);
980 auto Before = Current;
982 if (Current == Before)
985 auto &ExistingRanges = Before.getRanges();
986 auto &NewRanges = Current.getRanges();
993 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
1000 "Expected bin to actually contain the Access.");
1001 Bin.erase(AccIndex);
1022struct AAPointerInfoImpl
1023 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1028 const std::string getAsStr(
Attributor *
A)
const override {
1029 return std::string(
"PointerInfo ") +
1030 (isValidState() ? (std::string(
"#") +
1031 std::to_string(OffsetBins.size()) +
" bins")
1036 [](int64_t O) {
return std::to_string(O); }),
1044 return AAPointerInfo::manifest(
A);
1047 virtual const_bin_iterator
begin()
const override {
return State::begin(); }
1048 virtual const_bin_iterator
end()
const override {
return State::end(); }
1049 virtual int64_t numOffsetBins()
const override {
1050 return State::numOffsetBins();
1052 virtual bool reachesReturn()
const override {
1053 return !ReturnedOffsets.isUnassigned();
1055 virtual void addReturnedOffsetsTo(OffsetInfo &OI)
const override {
1056 if (ReturnedOffsets.isUnknown()) {
1061 OffsetInfo MergedOI;
1062 for (
auto Offset : ReturnedOffsets) {
1063 OffsetInfo TmpOI = OI;
1065 MergedOI.merge(TmpOI);
1067 OI = std::move(MergedOI);
1070 ChangeStatus setReachesReturn(
const OffsetInfo &ReachedReturnedOffsets) {
1071 if (ReturnedOffsets.isUnknown())
1072 return ChangeStatus::UNCHANGED;
1073 if (ReachedReturnedOffsets.isUnknown()) {
1074 ReturnedOffsets.setUnknown();
1075 return ChangeStatus::CHANGED;
1077 if (ReturnedOffsets.merge(ReachedReturnedOffsets))
1078 return ChangeStatus::CHANGED;
1079 return ChangeStatus::UNCHANGED;
1082 bool forallInterferingAccesses(
1086 return State::forallInterferingAccesses(
Range, CB);
1089 bool forallInterferingAccesses(
1091 bool FindInterferingWrites,
bool FindInterferingReads,
1095 HasBeenWrittenTo =
false;
1102 bool IsAssumedNoSync = AA::hasAssumedIRAttr<Attribute::NoSync>(
1107 bool AllInSameNoSyncFn = IsAssumedNoSync;
1108 bool InstIsExecutedByInitialThreadOnly =
1109 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1116 bool InstIsExecutedInAlignedRegion =
1117 FindInterferingReads && ExecDomainAA &&
1118 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1120 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1121 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1124 bool IsThreadLocalObj =
1133 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1134 if (IsThreadLocalObj || AllInSameNoSyncFn)
1136 const auto *FnExecDomainAA =
1137 I.getFunction() == &
Scope
1142 if (!FnExecDomainAA)
1144 if (InstIsExecutedInAlignedRegion ||
1145 (FindInterferingWrites &&
1146 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1147 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1150 if (InstIsExecutedByInitialThreadOnly &&
1151 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1152 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1161 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1162 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1163 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1164 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1168 bool IsKnownNoRecurse;
1169 AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1176 bool InstInKernel =
A.getInfoCache().isKernel(Scope);
1177 bool ObjHasKernelLifetime =
false;
1178 const bool UseDominanceReasoning =
1179 FindInterferingWrites && IsKnownNoRecurse;
1190 case AA::GPUAddressSpace::Shared:
1191 case AA::GPUAddressSpace::Constant:
1192 case AA::GPUAddressSpace::Local:
1204 std::function<
bool(
const Function &)> IsLiveInCalleeCB;
1206 if (
auto *AI = dyn_cast<AllocaInst>(&getAssociatedValue())) {
1210 ObjHasKernelLifetime =
A.getInfoCache().isKernel(*AIFn);
1211 bool IsKnownNoRecurse;
1212 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1214 IsKnownNoRecurse)) {
1215 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1217 }
else if (
auto *GV = dyn_cast<GlobalValue>(&getAssociatedValue())) {
1220 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1221 if (ObjHasKernelLifetime)
1222 IsLiveInCalleeCB = [&
A](
const Function &Fn) {
1223 return !
A.getInfoCache().isKernel(Fn);
1231 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1233 bool AccInSameScope = AccScope == &
Scope;
1237 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1238 A.getInfoCache().isKernel(*AccScope))
1241 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1242 if (Acc.isWrite() || (isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1243 ExclusionSet.
insert(Acc.getRemoteInst());
1246 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1247 (!FindInterferingReads || !Acc.isRead()))
1250 bool Dominates = FindInterferingWrites && DT && Exact &&
1251 Acc.isMustAccess() && AccInSameScope &&
1254 DominatingWrites.
insert(&Acc);
1258 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1260 InterferingAccesses.
push_back({&Acc, Exact});
1263 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1266 HasBeenWrittenTo = !DominatingWrites.
empty();
1270 for (
const Access *Acc : DominatingWrites) {
1271 if (!LeastDominatingWriteInst) {
1272 LeastDominatingWriteInst = Acc->getRemoteInst();
1273 }
else if (DT->
dominates(LeastDominatingWriteInst,
1274 Acc->getRemoteInst())) {
1275 LeastDominatingWriteInst = Acc->getRemoteInst();
1280 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1281 if (SkipCB && SkipCB(Acc))
1283 if (!CanIgnoreThreading(Acc))
1289 bool ReadChecked = !FindInterferingReads;
1290 bool WriteChecked = !FindInterferingWrites;
1296 &ExclusionSet, IsLiveInCalleeCB))
1301 if (!WriteChecked) {
1303 &ExclusionSet, IsLiveInCalleeCB))
1304 WriteChecked =
true;
1318 if (!WriteChecked && HasBeenWrittenTo &&
1319 Acc.getRemoteInst()->getFunction() != &
Scope) {
1323 if (FnReachabilityAA) {
1329 if (!FnReachabilityAA->instructionCanReach(
1330 A, *LeastDominatingWriteInst,
1331 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1332 WriteChecked =
true;
1339 if (ReadChecked && WriteChecked)
1342 if (!DT || !UseDominanceReasoning)
1344 if (!DominatingWrites.count(&Acc))
1346 return LeastDominatingWriteInst != Acc.getRemoteInst();
1351 for (
auto &It : InterferingAccesses) {
1352 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1353 !CanSkipAccess(*It.first, It.second)) {
1354 if (!UserCB(*It.first, It.second))
1364 using namespace AA::PointerInfo;
1366 return indicatePessimisticFixpoint();
1369 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1370 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1371 Changed |= setReachesReturn(OtherAAImpl.ReturnedOffsets);
1374 const auto &State = OtherAAImpl.getState();
1375 for (
const auto &It : State) {
1376 for (
auto Index : It.getSecond()) {
1377 const auto &RAcc = State.getAccess(
Index);
1378 if (IsByval && !RAcc.isRead())
1380 bool UsedAssumedInformation =
false;
1382 auto Content =
A.translateArgumentToCallSiteContent(
1383 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1384 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1385 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1387 Changed |= addAccess(
A, RAcc.getRanges(), CB,
Content, AK,
1388 RAcc.getType(), RAcc.getRemoteInst());
1395 const OffsetInfo &Offsets,
CallBase &CB,
1397 using namespace AA::PointerInfo;
1399 return indicatePessimisticFixpoint();
1401 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1405 const auto &State = OtherAAImpl.getState();
1406 for (
const auto &It : State) {
1407 for (
auto Index : It.getSecond()) {
1408 const auto &RAcc = State.getAccess(
Index);
1409 if (!IsMustAcc && RAcc.isAssumption())
1411 for (
auto Offset : Offsets) {
1415 if (!NewRanges.isUnknown()) {
1416 NewRanges.addToAllOffsets(
Offset);
1421 Changed |= addAccess(
A, NewRanges, CB, RAcc.getContent(), AK,
1422 RAcc.getType(), RAcc.getRemoteInst());
1431 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1435 for (
auto &It : OffsetBins) {
1436 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1437 <<
"] : " << It.getSecond().size() <<
"\n";
1438 for (
auto AccIndex : It.getSecond()) {
1439 auto &Acc = AccessList[AccIndex];
1440 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1441 if (Acc.getLocalInst() != Acc.getRemoteInst())
1442 O <<
" --> " << *Acc.getRemoteInst()
1444 if (!Acc.isWrittenValueYetUndetermined()) {
1445 if (isa_and_nonnull<Function>(Acc.getWrittenValue()))
1446 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1448 else if (Acc.getWrittenValue())
1449 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1451 O <<
" - c: <unknown>\n";
1458struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1461 : AAPointerInfoImpl(IRP,
A) {}
1468 using namespace AA::PointerInfo;
1471 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1480 if (!VT || VT->getElementCount().isScalable() ||
1482 (*Content)->getType() != VT ||
1483 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1494 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1495 auto *ConstContent = cast<Constant>(*
Content);
1499 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1501 ConstContent, ConstantInt::get(Int32Ty, i));
1504 Changed = Changed | addAccess(
A, {ElementOffsets, ElementSize},
I,
1508 for (
auto &ElementOffset : ElementOffsets)
1509 ElementOffset += ElementSize;
1523 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1527 void trackStatistics()
const override {
1528 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1532bool AAPointerInfoFloating::collectConstantsForGEP(
Attributor &
A,
1535 const OffsetInfo &PtrOI,
1537 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1541 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1542 "Don't look for constant values if the offset has already been "
1543 "determined to be unknown.");
1545 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1551 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1555 Union.addToAll(ConstantOffset.getSExtValue());
1560 for (
const auto &VI : VariableOffsets) {
1563 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1569 if (PotentialConstantsAA->undefIsContained())
1577 if (AssumedSet.empty())
1581 for (
const auto &ConstOffset : AssumedSet) {
1582 auto CopyPerOffset =
Union;
1583 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1584 VI.second.getZExtValue());
1585 Product.merge(CopyPerOffset);
1590 UsrOI = std::move(Union);
1595 using namespace AA::PointerInfo;
1598 Value &AssociatedValue = getAssociatedValue();
1601 OffsetInfoMap[&AssociatedValue].
insert(0);
1603 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1614 "CurPtr does not exist in the map!");
1616 auto &UsrOI = OffsetInfoMap[Usr];
1617 auto &PtrOI = OffsetInfoMap[CurPtr];
1618 assert(!PtrOI.isUnassigned() &&
1619 "Cannot pass through if the input Ptr was not visited!");
1625 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1627 User *Usr =
U.getUser();
1628 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1631 "The current pointer offset should have been seeded!");
1632 assert(!OffsetInfoMap[CurPtr].isUnassigned() &&
1633 "Current pointer should be assigned");
1637 return HandlePassthroughUser(Usr, CurPtr, Follow);
1638 if (!isa<GEPOperator>(CE)) {
1639 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1644 if (
auto *
GEP = dyn_cast<GEPOperator>(Usr)) {
1647 auto &UsrOI = OffsetInfoMap[Usr];
1648 auto &PtrOI = OffsetInfoMap[CurPtr];
1650 if (UsrOI.isUnknown())
1653 if (PtrOI.isUnknown()) {
1659 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1662 if (isa<PtrToIntInst>(Usr))
1664 if (isa<CastInst>(Usr) || isa<SelectInst>(Usr))
1665 return HandlePassthroughUser(Usr, CurPtr, Follow);
1669 if (
auto *RI = dyn_cast<ReturnInst>(Usr)) {
1670 if (RI->getFunction() == getAssociatedFunction()) {
1671 auto &PtrOI = OffsetInfoMap[CurPtr];
1672 Changed |= setReachesReturn(PtrOI);
1681 if (
auto *
PHI = dyn_cast<PHINode>(Usr)) {
1685 auto &UsrOI = PhiIt->second;
1686 auto &PtrOI = OffsetInfoMap[CurPtr];
1690 if (PtrOI.isUnknown()) {
1691 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1692 << *CurPtr <<
" in " << *
PHI <<
"\n");
1693 Follow = !UsrOI.isUnknown();
1699 if (UsrOI == PtrOI) {
1700 assert(!PtrOI.isUnassigned() &&
1701 "Cannot assign if the current Ptr was not visited!");
1702 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1712 auto It = OffsetInfoMap.
find(CurPtrBase);
1713 if (It == OffsetInfoMap.
end()) {
1714 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1715 << *CurPtr <<
" in " << *
PHI
1716 <<
" (base: " << *CurPtrBase <<
")\n");
1731 *
PHI->getFunction());
1733 auto BaseOI = It->getSecond();
1734 BaseOI.addToAll(
Offset.getZExtValue());
1735 if (IsFirstPHIUser || BaseOI == UsrOI) {
1736 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1737 <<
" in " << *Usr <<
"\n");
1738 return HandlePassthroughUser(Usr, CurPtr, Follow);
1742 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1743 << *CurPtr <<
" in " << *
PHI <<
"\n");
1754 if (
auto *LoadI = dyn_cast<LoadInst>(Usr)) {
1762 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1763 OffsetInfoMap[CurPtr].Offsets, Changed,
1768 if (
auto *
II = dyn_cast<IntrinsicInst>(&
I))
1769 return II->isAssumeLikeIntrinsic();
1780 }
while (FromI && FromI != ToI);
1786 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1789 if (IntrI.getParent() == BB) {
1790 if (IsImpactedInRange(LoadI->getNextNode(), &IntrI))
1796 if ((*PredIt) != BB)
1801 if (SuccBB == IntrBB)
1803 if (isa<UnreachableInst>(SuccBB->getTerminator()))
1807 if (IsImpactedInRange(LoadI->getNextNode(), BB->
getTerminator()))
1809 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1815 std::pair<Value *, IntrinsicInst *> Assumption;
1816 for (
const Use &LoadU : LoadI->
uses()) {
1817 if (
auto *CmpI = dyn_cast<CmpInst>(LoadU.getUser())) {
1818 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1820 for (
const Use &CmpU : CmpI->
uses()) {
1821 if (
auto *IntrI = dyn_cast<IntrinsicInst>(CmpU.getUser())) {
1822 if (!IsValidAssume(*IntrI))
1824 int Idx = CmpI->getOperandUse(0) == LoadU;
1825 Assumption = {CmpI->getOperand(
Idx), IntrI};
1830 if (Assumption.first)
1835 if (!Assumption.first || !Assumption.second)
1839 << *Assumption.second <<
": " << *LoadI
1840 <<
" == " << *Assumption.first <<
"\n");
1841 bool UsedAssumedInformation =
false;
1842 std::optional<Value *>
Content =
nullptr;
1843 if (Assumption.first)
1845 A.getAssumedSimplified(*Assumption.first, *
this,
1847 return handleAccess(
1848 A, *Assumption.second,
Content, AccessKind::AK_ASSUMPTION,
1849 OffsetInfoMap[CurPtr].Offsets, Changed, *LoadI->getType());
1854 for (
auto *OtherOp : OtherOps) {
1855 if (OtherOp == CurPtr) {
1858 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1870 bool UsedAssumedInformation =
false;
1871 std::optional<Value *>
Content =
nullptr;
1875 return handleAccess(
A,
I,
Content, AK, OffsetInfoMap[CurPtr].Offsets,
1879 if (
auto *StoreI = dyn_cast<StoreInst>(Usr))
1880 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1881 *StoreI->getValueOperand()->getType(),
1882 {StoreI->getValueOperand()}, AccessKind::AK_W);
1883 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(Usr))
1884 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1885 {RMWI->getValOperand()}, AccessKind::AK_RW);
1886 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(Usr))
1887 return HandleStoreLike(
1888 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1889 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1892 if (
auto *CB = dyn_cast<CallBase>(Usr)) {
1896 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1907 Changed = translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB,
1910 if (!CSArgPI->reachesReturn())
1911 return isValidState();
1914 if (!Callee ||
Callee->arg_size() <= ArgNo)
1916 bool UsedAssumedInformation =
false;
1917 auto ReturnedValue =
A.getAssumedSimplified(
1921 dyn_cast_or_null<Argument>(ReturnedValue.value_or(
nullptr));
1922 auto *Arg =
Callee->getArg(ArgNo);
1923 if (ReturnedArg && Arg != ReturnedArg)
1925 bool IsRetMustAcc = IsArgMustAcc && (ReturnedArg == Arg);
1930 OffsetInfo OI = OffsetInfoMap[CurPtr];
1931 CSArgPI->addReturnedOffsetsTo(OI);
1933 translateAndAddState(
A, *CSRetPI, OI, *CB, IsRetMustAcc) | Changed;
1934 return isValidState();
1936 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1941 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1944 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1945 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1946 assert(!OffsetInfoMap[OldU].isUnassigned() &&
"Old use should be assinged");
1947 if (OffsetInfoMap.
count(NewU)) {
1949 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1950 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1951 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1955 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1958 return HandlePassthroughUser(NewU.get(), OldU.
get(), Unused);
1960 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1962 true, EquivalentUseCB)) {
1963 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1964 return indicatePessimisticFixpoint();
1968 dbgs() <<
"Accesses by bin after update:\n";
1975struct AAPointerInfoReturned final : AAPointerInfoImpl {
1977 : AAPointerInfoImpl(IRP,
A) {}
1981 return indicatePessimisticFixpoint();
1985 void trackStatistics()
const override {
1986 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1990struct AAPointerInfoArgument final : AAPointerInfoFloating {
1992 : AAPointerInfoFloating(IRP,
A) {}
1995 void trackStatistics()
const override {
1996 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2000struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
2002 : AAPointerInfoFloating(IRP,
A) {}
2006 using namespace AA::PointerInfo;
2010 if (
auto *
MI = dyn_cast_or_null<MemIntrinsic>(getCtxI())) {
2012 if (
auto Length =
MI->getLengthInBytes())
2013 LengthVal =
Length->getSExtValue();
2014 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
2017 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
2019 return indicatePessimisticFixpoint();
2022 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
2024 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
2027 dbgs() <<
"Accesses by bin after update:\n";
2038 Argument *Arg = getAssociatedArgument();
2043 if (ArgAA && ArgAA->getState().isValidState())
2044 return translateAndAddStateFromCallee(
A, *ArgAA,
2045 *cast<CallBase>(getCtxI()));
2047 return indicatePessimisticFixpoint();
2050 bool IsKnownNoCapture;
2051 if (!AA::hasAssumedIRAttr<Attribute::Captures>(
2052 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2053 return indicatePessimisticFixpoint();
2055 bool IsKnown =
false;
2057 return ChangeStatus::UNCHANGED;
2060 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2066 void trackStatistics()
const override {
2067 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2071struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2073 : AAPointerInfoFloating(IRP,
A) {}
2076 void trackStatistics()
const override {
2077 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2091 assert(!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2092 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2096 const std::string getAsStr(
Attributor *
A)
const override {
2097 return getAssumed() ?
"nounwind" :
"may-unwind";
2103 (
unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2104 (
unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2105 (
unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2108 if (!
I.mayThrow(
true))
2111 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
2112 bool IsKnownNoUnwind;
2113 return AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2120 bool UsedAssumedInformation =
false;
2121 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2122 UsedAssumedInformation))
2123 return indicatePessimisticFixpoint();
2125 return ChangeStatus::UNCHANGED;
2129struct AANoUnwindFunction final :
public AANoUnwindImpl {
2131 : AANoUnwindImpl(IRP,
A) {}
2138struct AANoUnwindCallSite final
2139 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2141 : AACalleeToCallSite<
AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2152 case Intrinsic::nvvm_barrier_cta_sync_aligned_all:
2153 case Intrinsic::nvvm_barrier_cta_sync_aligned_count:
2154 case Intrinsic::nvvm_barrier0_and:
2155 case Intrinsic::nvvm_barrier0_or:
2156 case Intrinsic::nvvm_barrier0_popc:
2158 case Intrinsic::amdgcn_s_barrier:
2159 if (ExecutedAligned)
2172 if (
auto *FI = dyn_cast<FenceInst>(
I))
2175 if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
2182 switch (
I->getOpcode()) {
2183 case Instruction::AtomicRMW:
2184 Ordering = cast<AtomicRMWInst>(
I)->getOrdering();
2186 case Instruction::Store:
2187 Ordering = cast<StoreInst>(
I)->getOrdering();
2189 case Instruction::Load:
2190 Ordering = cast<LoadInst>(
I)->getOrdering();
2194 "New atomic operations need to be known in the attributor.");
2205 if (
auto *
MI = dyn_cast<MemIntrinsic>(
I))
2206 return !
MI->isVolatile();
2217 assert(!AA::hasAssumedIRAttr<Attribute::NoSync>(
A,
nullptr, getIRPosition(),
2218 DepClassTy::NONE, IsKnown));
2222 const std::string getAsStr(
Attributor *
A)
const override {
2223 return getAssumed() ?
"nosync" :
"may-sync";
2239 if (
I.mayReadOrWriteMemory())
2244 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
2253 bool UsedAssumedInformation =
false;
2254 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2255 UsedAssumedInformation) ||
2256 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2257 UsedAssumedInformation))
2258 return indicatePessimisticFixpoint();
2263struct AANoSyncFunction final :
public AANoSyncImpl {
2265 : AANoSyncImpl(IRP,
A) {}
2272struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2274 : AACalleeToCallSite<
AANoSync, AANoSyncImpl>(IRP,
A) {}
2284struct AANoFreeImpl :
public AANoFree {
2290 assert(!AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
nullptr, getIRPosition(),
2291 DepClassTy::NONE, IsKnown));
2299 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2301 DepClassTy::REQUIRED, IsKnown);
2304 bool UsedAssumedInformation =
false;
2305 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2306 UsedAssumedInformation))
2307 return indicatePessimisticFixpoint();
2308 return ChangeStatus::UNCHANGED;
2312 const std::string getAsStr(
Attributor *
A)
const override {
2313 return getAssumed() ?
"nofree" :
"may-free";
2317struct AANoFreeFunction final :
public AANoFreeImpl {
2319 : AANoFreeImpl(IRP,
A) {}
2326struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2328 : AACalleeToCallSite<
AANoFree, AANoFreeImpl>(IRP,
A) {}
2335struct AANoFreeFloating : AANoFreeImpl {
2337 : AANoFreeImpl(IRP,
A) {}
2347 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this,
2349 DepClassTy::OPTIONAL, IsKnown))
2350 return ChangeStatus::UNCHANGED;
2352 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2353 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2355 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
2363 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2365 DepClassTy::REQUIRED, IsKnown);
2368 if (isa<GetElementPtrInst>(UserI) || isa<PHINode>(UserI) ||
2369 isa<SelectInst>(UserI)) {
2373 if (isa<StoreInst>(UserI) || isa<LoadInst>(UserI))
2376 if (isa<ReturnInst>(UserI) && getIRPosition().isArgumentPosition())
2382 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2383 return indicatePessimisticFixpoint();
2385 return ChangeStatus::UNCHANGED;
2390struct AANoFreeArgument final : AANoFreeFloating {
2392 : AANoFreeFloating(IRP,
A) {}
2399struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2401 : AANoFreeFloating(IRP,
A) {}
2409 Argument *Arg = getAssociatedArgument();
2411 return indicatePessimisticFixpoint();
2414 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this, ArgPos,
2415 DepClassTy::REQUIRED, IsKnown))
2416 return ChangeStatus::UNCHANGED;
2417 return indicatePessimisticFixpoint();
2425struct AANoFreeReturned final : AANoFreeFloating {
2427 : AANoFreeFloating(IRP,
A) {
2442 void trackStatistics()
const override {}
2446struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2448 : AANoFreeFloating(IRP,
A) {}
2451 return ChangeStatus::UNCHANGED;
2462 bool IgnoreSubsumingPositions) {
2464 AttrKinds.
push_back(Attribute::NonNull);
2467 AttrKinds.
push_back(Attribute::Dereferenceable);
2468 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2475 if (!Fn->isDeclaration()) {
2485 bool UsedAssumedInformation =
false;
2486 if (!
A.checkForAllInstructions(
2488 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2492 UsedAssumedInformation,
false,
true))
2504 Attribute::NonNull)});
2509static int64_t getKnownNonNullAndDerefBytesForUse(
2511 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2514 const Value *UseV =
U->get();
2521 if (isa<CastInst>(
I)) {
2526 if (isa<GetElementPtrInst>(
I)) {
2536 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
2539 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2556 bool IsKnownNonNull;
2557 AA::hasAssumedIRAttr<Attribute::NonNull>(
A, &QueryingAA, IRP,
2559 IsNonNull |= IsKnownNonNull;
2566 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2567 Loc->Size.isScalable() ||
I->isVolatile())
2573 if (
Base &&
Base == &AssociatedValue) {
2574 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2576 return std::max(int64_t(0), DerefBytes);
2583 int64_t DerefBytes = Loc->Size.getValue();
2585 return std::max(int64_t(0), DerefBytes);
2596 Value &
V = *getAssociatedValue().stripPointerCasts();
2597 if (isa<ConstantPointerNull>(V)) {
2598 indicatePessimisticFixpoint();
2603 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2609 bool IsNonNull =
false;
2610 bool TrackUse =
false;
2611 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2612 IsNonNull, TrackUse);
2613 State.setKnown(IsNonNull);
2618 const std::string getAsStr(
Attributor *
A)
const override {
2619 return getAssumed() ?
"nonnull" :
"may-null";
2624struct AANonNullFloating :
public AANonNullImpl {
2626 : AANonNullImpl(IRP,
A) {}
2631 bool IsKnownNonNull;
2632 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2633 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2637 bool UsedAssumedInformation =
false;
2638 Value *AssociatedValue = &getAssociatedValue();
2640 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2645 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2649 if (
auto *
PHI = dyn_cast<PHINode>(AssociatedValue))
2651 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2652 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2655 return ChangeStatus::UNCHANGED;
2656 if (
auto *
Select = dyn_cast<SelectInst>(AssociatedValue))
2657 if (AA::hasAssumedIRAttr<Attribute::NonNull>(
2659 DepClassTy::OPTIONAL, IsKnown) &&
2660 AA::hasAssumedIRAttr<Attribute::NonNull>(
2662 DepClassTy::OPTIONAL, IsKnown))
2663 return ChangeStatus::UNCHANGED;
2670 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2671 return indicatePessimisticFixpoint();
2672 return ChangeStatus::UNCHANGED;
2675 for (
const auto &VAC : Values)
2677 return indicatePessimisticFixpoint();
2679 return ChangeStatus::UNCHANGED;
2687struct AANonNullReturned final
2688 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2689 false, AANonNull::IRAttributeKind, false> {
2696 const std::string getAsStr(
Attributor *
A)
const override {
2697 return getAssumed() ?
"nonnull" :
"may-null";
2705struct AANonNullArgument final
2706 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2708 : AAArgumentFromCallSiteArguments<
AANonNull, AANonNullImpl>(IRP,
A) {}
2714struct AANonNullCallSiteArgument final : AANonNullFloating {
2716 : AANonNullFloating(IRP,
A) {}
2723struct AANonNullCallSiteReturned final
2724 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2726 : AACalleeToCallSite<
AANonNull, AANonNullImpl>(IRP,
A) {}
2742 assert(!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2743 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2748 const std::string getAsStr(
Attributor *
A)
const override {
2749 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2753struct AAMustProgressFunction final : AAMustProgressImpl {
2755 : AAMustProgressImpl(IRP,
A) {}
2760 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
2761 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2763 return indicateOptimisticFixpoint();
2764 return ChangeStatus::UNCHANGED;
2769 bool IsKnownMustProgress;
2770 return AA::hasAssumedIRAttr<Attribute::MustProgress>(
2771 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2775 bool AllCallSitesKnown =
true;
2776 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2779 return indicatePessimisticFixpoint();
2781 return ChangeStatus::UNCHANGED;
2785 void trackStatistics()
const override {
2791struct AAMustProgressCallSite final : AAMustProgressImpl {
2793 : AAMustProgressImpl(IRP,
A) {}
2802 bool IsKnownMustProgress;
2803 if (!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2804 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2805 return indicatePessimisticFixpoint();
2806 return ChangeStatus::UNCHANGED;
2810 void trackStatistics()
const override {
2825 assert(!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2826 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2831 const std::string getAsStr(
Attributor *
A)
const override {
2832 return getAssumed() ?
"norecurse" :
"may-recurse";
2836struct AANoRecurseFunction final : AANoRecurseImpl {
2838 : AANoRecurseImpl(IRP,
A) {}
2845 bool IsKnownNoRecurse;
2846 if (!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2849 DepClassTy::NONE, IsKnownNoRecurse))
2851 return IsKnownNoRecurse;
2853 bool UsedAssumedInformation =
false;
2854 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2855 UsedAssumedInformation)) {
2861 if (!UsedAssumedInformation)
2862 indicateOptimisticFixpoint();
2863 return ChangeStatus::UNCHANGED;
2868 DepClassTy::REQUIRED);
2869 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2870 return indicatePessimisticFixpoint();
2871 return ChangeStatus::UNCHANGED;
2878struct AANoRecurseCallSite final
2879 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2881 : AACalleeToCallSite<
AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2896 const std::string getAsStr(
Attributor *
A)
const override {
2897 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2901struct AANonConvergentFunction final : AANonConvergentImpl {
2903 : AANonConvergentImpl(IRP,
A) {}
2909 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2910 CallBase &CB = cast<CallBase>(Inst);
2912 if (!Callee ||
Callee->isIntrinsic()) {
2915 if (
Callee->isDeclaration()) {
2916 return !
Callee->hasFnAttribute(Attribute::Convergent);
2923 bool UsedAssumedInformation =
false;
2924 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2925 UsedAssumedInformation)) {
2926 return indicatePessimisticFixpoint();
2928 return ChangeStatus::UNCHANGED;
2932 if (isKnownNotConvergent() &&
2933 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2934 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2935 return ChangeStatus::CHANGED;
2937 return ChangeStatus::UNCHANGED;
2954 const size_t UBPrevSize = KnownUBInsts.size();
2955 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2959 if (
I.isVolatile() &&
I.mayWriteToMemory())
2963 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2972 "Expected pointer operand of memory accessing instruction");
2976 std::optional<Value *> SimplifiedPtrOp =
2977 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2978 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2980 const Value *PtrOpVal = *SimplifiedPtrOp;
2985 if (!isa<ConstantPointerNull>(PtrOpVal)) {
2986 AssumedNoUBInsts.insert(&
I);
2998 AssumedNoUBInsts.insert(&
I);
3000 KnownUBInsts.insert(&
I);
3009 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3013 auto *BrInst = cast<BranchInst>(&
I);
3016 if (BrInst->isUnconditional())
3021 std::optional<Value *> SimplifiedCond =
3022 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3023 if (!SimplifiedCond || !*SimplifiedCond)
3025 AssumedNoUBInsts.insert(&
I);
3033 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3042 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3048 if (idx >=
Callee->arg_size())
3060 bool IsKnownNoUndef;
3061 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3062 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3063 if (!IsKnownNoUndef)
3065 bool UsedAssumedInformation =
false;
3066 std::optional<Value *> SimplifiedVal =
3069 if (UsedAssumedInformation)
3071 if (SimplifiedVal && !*SimplifiedVal)
3073 if (!SimplifiedVal || isa<UndefValue>(**SimplifiedVal)) {
3074 KnownUBInsts.insert(&
I);
3078 !isa<ConstantPointerNull>(**SimplifiedVal))
3080 bool IsKnownNonNull;
3081 AA::hasAssumedIRAttr<Attribute::NonNull>(
3082 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3084 KnownUBInsts.insert(&
I);
3090 auto &RI = cast<ReturnInst>(
I);
3093 std::optional<Value *> SimplifiedRetValue =
3094 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3095 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3112 if (isa<ConstantPointerNull>(*SimplifiedRetValue)) {
3113 bool IsKnownNonNull;
3114 AA::hasAssumedIRAttr<Attribute::NonNull>(
3118 KnownUBInsts.insert(&
I);
3124 bool UsedAssumedInformation =
false;
3125 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3126 {Instruction::Load, Instruction::Store,
3127 Instruction::AtomicCmpXchg,
3128 Instruction::AtomicRMW},
3129 UsedAssumedInformation,
3131 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3132 UsedAssumedInformation,
3134 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3135 UsedAssumedInformation);
3139 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3141 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3142 bool IsKnownNoUndef;
3143 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3144 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3146 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3147 {Instruction::Ret}, UsedAssumedInformation,
3152 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3153 UBPrevSize != KnownUBInsts.size())
3154 return ChangeStatus::CHANGED;
3155 return ChangeStatus::UNCHANGED;
3159 return KnownUBInsts.count(
I);
3162 bool isAssumedToCauseUB(
Instruction *
I)
const override {
3169 switch (
I->getOpcode()) {
3170 case Instruction::Load:
3171 case Instruction::Store:
3172 case Instruction::AtomicCmpXchg:
3173 case Instruction::AtomicRMW:
3174 return !AssumedNoUBInsts.count(
I);
3175 case Instruction::Br: {
3176 auto *BrInst = cast<BranchInst>(
I);
3177 if (BrInst->isUnconditional())
3179 return !AssumedNoUBInsts.count(
I);
3188 if (KnownUBInsts.empty())
3189 return ChangeStatus::UNCHANGED;
3191 A.changeToUnreachableAfterManifest(
I);
3192 return ChangeStatus::CHANGED;
3196 const std::string getAsStr(
Attributor *
A)
const override {
3197 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3242 bool UsedAssumedInformation =
false;
3243 std::optional<Value *> SimplifiedV =
3246 if (!UsedAssumedInformation) {
3251 KnownUBInsts.insert(
I);
3252 return std::nullopt;
3258 if (isa<UndefValue>(V)) {
3259 KnownUBInsts.insert(
I);
3260 return std::nullopt;
3266struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3268 : AAUndefinedBehaviorImpl(IRP,
A) {}
3271 void trackStatistics()
const override {
3273 "Number of instructions known to have UB");
3275 KnownUBInsts.size();
3296 if (SCCI.hasCycle())
3306 for (
auto *L : LI->getLoopsInPreorder()) {
3320 assert(!AA::hasAssumedIRAttr<Attribute::WillReturn>(
3321 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3326 bool isImpliedByMustprogressAndReadonly(
Attributor &
A,
bool KnownOnly) {
3327 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3332 return IsKnown || !KnownOnly;
3338 if (isImpliedByMustprogressAndReadonly(
A,
false))
3339 return ChangeStatus::UNCHANGED;
3344 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
3345 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3351 bool IsKnownNoRecurse;
3352 return AA::hasAssumedIRAttr<Attribute::NoRecurse>(
3353 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3356 bool UsedAssumedInformation =
false;
3357 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3358 UsedAssumedInformation))
3359 return indicatePessimisticFixpoint();
3361 return ChangeStatus::UNCHANGED;
3365 const std::string getAsStr(
Attributor *
A)
const override {
3366 return getAssumed() ?
"willreturn" :
"may-noreturn";
3370struct AAWillReturnFunction final : AAWillReturnImpl {
3372 : AAWillReturnImpl(IRP,
A) {}
3376 AAWillReturnImpl::initialize(
A);
3379 assert(
F &&
"Did expect an anchor function");
3380 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3381 indicatePessimisticFixpoint();
3389struct AAWillReturnCallSite final
3390 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3392 : AACalleeToCallSite<
AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3396 if (isImpliedByMustprogressAndReadonly(
A,
false))
3397 return ChangeStatus::UNCHANGED;
3399 return AACalleeToCallSite::updateImpl(
A);
3421 const ToTy *To =
nullptr;
3431 assert(Hash == 0 &&
"Computed hash twice!");
3435 detail::combineHashValue(PairDMI ::getHashValue({
From, To}),
3436 InstSetDMI::getHashValue(ExclusionSet));
3446 :
From(&
From), To(&To), ExclusionSet(ES) {
3448 if (!ES || ES->
empty()) {
3449 ExclusionSet =
nullptr;
3450 }
else if (MakeUnique) {
3451 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3456 :
From(RQI.
From), To(RQI.To), ExclusionSet(RQI.ExclusionSet) {}
3469 return &TombstoneKey;
3476 if (!PairDMI::isEqual({
LHS->From,
LHS->To}, {
RHS->From,
RHS->To}))
3478 return InstSetDMI::isEqual(
LHS->ExclusionSet,
RHS->ExclusionSet);
3482#define DefineKeys(ToTy) \
3484 ReachabilityQueryInfo<ToTy> \
3485 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3486 ReachabilityQueryInfo<ToTy>( \
3487 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3488 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3490 ReachabilityQueryInfo<ToTy> \
3491 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3492 ReachabilityQueryInfo<ToTy>( \
3493 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3494 DenseMapInfo<const ToTy *>::getTombstoneKey());
3503template <
typename BaseTy,
typename ToTy>
3504struct CachedReachabilityAA :
public BaseTy {
3510 bool isQueryAA()
const override {
return true; }
3515 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3516 RQITy *RQI = QueryVector[
u];
3517 if (RQI->Result == RQITy::Reachable::No &&
3519 Changed = ChangeStatus::CHANGED;
3525 bool IsTemporaryRQI) = 0;
3528 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3533 QueryCache.erase(&RQI);
3539 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3540 RQITy PlainRQI(RQI.From, RQI.To);
3541 if (!QueryCache.count(&PlainRQI)) {
3542 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3544 QueryVector.push_back(RQIPtr);
3545 QueryCache.insert(RQIPtr);
3550 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3551 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3552 "Did not expect empty set!");
3553 RQITy *RQIPtr =
new (
A.Allocator)
3554 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3555 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3557 assert(!QueryCache.count(RQIPtr));
3558 QueryVector.push_back(RQIPtr);
3559 QueryCache.insert(RQIPtr);
3562 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3563 A.registerForUpdate(*
this);
3564 return Result == RQITy::Reachable::Yes;
3567 const std::string getAsStr(
Attributor *
A)
const override {
3569 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3572 bool checkQueryCache(
Attributor &
A, RQITy &StackRQI,
3573 typename RQITy::Reachable &
Result) {
3574 if (!this->getState().isValidState()) {
3575 Result = RQITy::Reachable::Yes;
3581 if (StackRQI.ExclusionSet) {
3582 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3583 auto It = QueryCache.find(&PlainRQI);
3584 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3585 Result = RQITy::Reachable::No;
3590 auto It = QueryCache.find(&StackRQI);
3591 if (It != QueryCache.end()) {
3598 QueryCache.insert(&StackRQI);
3607struct AAIntraFnReachabilityFunction final
3608 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3609 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3616 bool isAssumedReachable(
3619 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3623 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
3624 typename RQITy::Reachable
Result;
3625 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3626 return NonConstThis->isReachableImpl(
A, StackRQI,
3628 return Result == RQITy::Reachable::Yes;
3635 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3638 [&](
const auto &DeadEdge) {
3639 return LivenessAA->isEdgeDead(DeadEdge.first,
3643 return LivenessAA->isAssumedDead(BB);
3645 return ChangeStatus::UNCHANGED;
3649 return Base::updateImpl(
A);
3653 bool IsTemporaryRQI)
override {
3655 bool UsedExclusionSet =
false;
3660 while (IP && IP != &To) {
3661 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3662 UsedExclusionSet =
true;
3673 "Not an intra-procedural query!");
3677 if (FromBB == ToBB &&
3678 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3679 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3684 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3685 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3690 if (RQI.ExclusionSet)
3691 for (
auto *
I : *RQI.ExclusionSet)
3692 if (
I->getFunction() == Fn)
3693 ExclusionBlocks.
insert(
I->getParent());
3696 if (ExclusionBlocks.
count(FromBB) &&
3699 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3702 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3703 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3704 DeadBlocks.insert(ToBB);
3705 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3714 while (!Worklist.
empty()) {
3716 if (!Visited.
insert(BB).second)
3719 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3720 LocalDeadEdges.
insert({BB, SuccBB});
3725 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3728 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3731 if (ExclusionBlocks.
count(SuccBB)) {
3732 UsedExclusionSet =
true;
3739 DeadEdges.insert_range(LocalDeadEdges);
3740 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3745 void trackStatistics()
const override {}
3765 bool IgnoreSubsumingPositions) {
3766 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3767 "Unexpected attribute kind");
3770 if (isa<AllocaInst>(Val))
3773 IgnoreSubsumingPositions =
true;
3776 if (isa<UndefValue>(Val))
3779 if (isa<ConstantPointerNull>(Val) &&
3784 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3785 IgnoreSubsumingPositions, Attribute::NoAlias))
3795 "Noalias is a pointer attribute");
3798 const std::string getAsStr(
Attributor *
A)
const override {
3799 return getAssumed() ?
"noalias" :
"may-alias";
3804struct AANoAliasFloating final : AANoAliasImpl {
3806 : AANoAliasImpl(IRP,
A) {}
3811 return indicatePessimisticFixpoint();
3815 void trackStatistics()
const override {
3821struct AANoAliasArgument final
3822 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3823 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3835 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
3837 DepClassTy::OPTIONAL, IsKnownNoSycn))
3838 return Base::updateImpl(
A);
3843 return Base::updateImpl(
A);
3847 bool UsedAssumedInformation =
false;
3848 if (
A.checkForAllCallSites(
3850 true, UsedAssumedInformation))
3851 return Base::updateImpl(
A);
3859 return indicatePessimisticFixpoint();
3866struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3868 : AANoAliasImpl(IRP,
A) {}
3874 const CallBase &CB,
unsigned OtherArgNo) {
3876 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3888 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3889 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3896 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3898 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3899 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3905 AAR =
A.getInfoCache().getAnalysisResultForFunction<
AAManager>(
3909 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3911 "callsite arguments: "
3912 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3913 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3918 bool isKnownNoAliasDueToNoAliasPreservation(
3932 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3943 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
3948 bool IsKnownNoCapture;
3949 if (AA::hasAssumedIRAttr<Attribute::Captures>(
3951 DepClassTy::OPTIONAL, IsKnownNoCapture))
3957 A, *UserI, *getCtxI(), *
this,
nullptr,
3958 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3973 LLVM_DEBUG(
dbgs() <<
"[AANoAliasCSArg] Unknown user: " << *UserI <<
"\n");
3977 bool IsKnownNoCapture;
3979 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::Captures>(
3980 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3981 if (!IsAssumedNoCapture &&
3983 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3985 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3986 <<
" cannot be noalias as it is potentially captured\n");
3991 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3996 const auto &CB = cast<CallBase>(getAnchorValue());
3997 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3998 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
4008 auto *MemBehaviorAA =
4011 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4012 return ChangeStatus::UNCHANGED;
4015 bool IsKnownNoAlias;
4017 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4018 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
4020 <<
" is not no-alias at the definition\n");
4021 return indicatePessimisticFixpoint();
4025 if (MemBehaviorAA &&
4026 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
4028 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4029 return ChangeStatus::UNCHANGED;
4032 return indicatePessimisticFixpoint();
4040struct AANoAliasReturned final : AANoAliasImpl {
4042 : AANoAliasImpl(IRP,
A) {}
4047 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4048 if (
Constant *
C = dyn_cast<Constant>(&RV))
4049 if (
C->isNullValue() || isa<UndefValue>(
C))
4054 if (!isa<CallBase>(&RV))
4058 bool IsKnownNoAlias;
4059 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4060 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4063 bool IsKnownNoCapture;
4065 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::Captures>(
4066 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4068 return IsAssumedNoCapture ||
4072 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4073 return indicatePessimisticFixpoint();
4075 return ChangeStatus::UNCHANGED;
4083struct AANoAliasCallSiteReturned final
4084 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4086 : AACalleeToCallSite<
AANoAlias, AANoAliasImpl>(IRP,
A) {}
4096struct AAIsDeadValueImpl :
public AAIsDead {
4100 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4103 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4106 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4109 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4112 bool isAssumedDead(
const Instruction *
I)
const override {
4113 return I == getCtxI() && isAssumedDead();
4117 bool isKnownDead(
const Instruction *
I)
const override {
4118 return isAssumedDead(
I) && isKnownDead();
4122 const std::string getAsStr(
Attributor *
A)
const override {
4123 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4129 if (
V.getType()->isVoidTy() ||
V.use_empty())
4133 if (!isa<Constant>(V)) {
4134 if (
auto *
I = dyn_cast<Instruction>(&V))
4135 if (!
A.isRunOn(*
I->getFunction()))
4137 bool UsedAssumedInformation =
false;
4138 std::optional<Constant *>
C =
4139 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4144 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4149 return A.checkForAllUses(UsePred, *
this, V,
false,
4150 DepClassTy::REQUIRED,
4159 auto *CB = dyn_cast<CallBase>(
I);
4160 if (!CB || isa<IntrinsicInst>(CB))
4165 bool IsKnownNoUnwind;
4166 if (!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4167 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4175struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4177 : AAIsDeadValueImpl(IRP,
A) {}
4181 AAIsDeadValueImpl::initialize(
A);
4183 if (isa<UndefValue>(getAssociatedValue())) {
4184 indicatePessimisticFixpoint();
4188 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4189 if (!isAssumedSideEffectFree(
A,
I)) {
4190 if (!isa_and_nonnull<StoreInst>(
I) && !isa_and_nonnull<FenceInst>(
I))
4191 indicatePessimisticFixpoint();
4193 removeAssumedBits(HAS_NO_EFFECT);
4200 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4202 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4209 if (
SI.isVolatile())
4215 bool UsedAssumedInformation =
false;
4216 if (!AssumeOnlyInst) {
4217 PotentialCopies.clear();
4219 UsedAssumedInformation)) {
4222 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4226 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4227 <<
" potential copies.\n");
4232 UsedAssumedInformation))
4234 if (
auto *LI = dyn_cast<LoadInst>(V)) {
4236 auto &UserI = cast<Instruction>(*U.getUser());
4237 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4239 AssumeOnlyInst->insert(&UserI);
4242 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4248 <<
" is assumed live!\n");
4254 const std::string getAsStr(
Attributor *
A)
const override {
4255 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4256 if (isa_and_nonnull<StoreInst>(
I))
4258 return "assumed-dead-store";
4259 if (isa_and_nonnull<FenceInst>(
I))
4261 return "assumed-dead-fence";
4262 return AAIsDeadValueImpl::getAsStr(
A);
4267 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4268 if (
auto *SI = dyn_cast_or_null<StoreInst>(
I)) {
4269 if (!isDeadStore(
A, *SI))
4270 return indicatePessimisticFixpoint();
4271 }
else if (
auto *FI = dyn_cast_or_null<FenceInst>(
I)) {
4272 if (!isDeadFence(
A, *FI))
4273 return indicatePessimisticFixpoint();
4275 if (!isAssumedSideEffectFree(
A,
I))
4276 return indicatePessimisticFixpoint();
4277 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4278 return indicatePessimisticFixpoint();
4283 bool isRemovableStore()
const override {
4284 return isAssumed(IS_REMOVABLE) && isa<StoreInst>(&getAssociatedValue());
4289 Value &
V = getAssociatedValue();
4290 if (
auto *
I = dyn_cast<Instruction>(&V)) {
4295 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
4297 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4300 A.deleteAfterManifest(*
I);
4301 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4303 for (
auto *Usr : AOI->
users())
4304 AssumeOnlyInst.
insert(cast<Instruction>(Usr));
4305 A.deleteAfterManifest(*AOI);
4309 if (
auto *FI = dyn_cast<FenceInst>(
I)) {
4311 A.deleteAfterManifest(*FI);
4314 if (isAssumedSideEffectFree(
A,
I) && !isa<InvokeInst>(
I)) {
4315 A.deleteAfterManifest(*
I);
4323 void trackStatistics()
const override {
4332struct AAIsDeadArgument :
public AAIsDeadFloating {
4334 : AAIsDeadFloating(IRP,
A) {}
4338 Argument &Arg = *getAssociatedArgument();
4339 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4340 if (
A.registerFunctionSignatureRewrite(
4344 return ChangeStatus::CHANGED;
4346 return ChangeStatus::UNCHANGED;
4353struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4355 : AAIsDeadValueImpl(IRP,
A) {}
4359 AAIsDeadValueImpl::initialize(
A);
4360 if (isa<UndefValue>(getAssociatedValue()))
4361 indicatePessimisticFixpoint();
4370 Argument *Arg = getAssociatedArgument();
4372 return indicatePessimisticFixpoint();
4374 auto *ArgAA =
A.getAAFor<
AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4376 return indicatePessimisticFixpoint();
4382 CallBase &CB = cast<CallBase>(getAnchorValue());
4384 assert(!isa<UndefValue>(
U.get()) &&
4385 "Expected undef values to be filtered out!");
4387 if (
A.changeUseAfterManifest(U, UV))
4388 return ChangeStatus::CHANGED;
4389 return ChangeStatus::UNCHANGED;
4396struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4398 : AAIsDeadFloating(IRP,
A) {}
4401 bool isAssumedDead()
const override {
4402 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4407 AAIsDeadFloating::initialize(
A);
4408 if (isa<UndefValue>(getAssociatedValue())) {
4409 indicatePessimisticFixpoint();
4414 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4420 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4421 IsAssumedSideEffectFree =
false;
4422 Changed = ChangeStatus::CHANGED;
4424 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4425 return indicatePessimisticFixpoint();
4430 void trackStatistics()
const override {
4431 if (IsAssumedSideEffectFree)
4438 const std::string getAsStr(
Attributor *
A)
const override {
4439 return isAssumedDead()
4441 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4445 bool IsAssumedSideEffectFree =
true;
4448struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4450 : AAIsDeadValueImpl(IRP,
A) {}
4455 bool UsedAssumedInformation =
false;
4456 A.checkForAllInstructions([](
Instruction &) {
return true; }, *
this,
4457 {Instruction::Ret}, UsedAssumedInformation);
4460 if (ACS.isCallbackCall() || !ACS.getInstruction())
4462 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4465 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4466 UsedAssumedInformation))
4467 return indicatePessimisticFixpoint();
4469 return ChangeStatus::UNCHANGED;
4475 bool AnyChange =
false;
4483 bool UsedAssumedInformation =
false;
4484 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4485 UsedAssumedInformation);
4486 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4493struct AAIsDeadFunction :
public AAIsDead {
4499 assert(
F &&
"Did expect an anchor function");
4500 if (!isAssumedDeadInternalFunction(
A)) {
4501 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4502 assumeLive(
A,
F->getEntryBlock());
4506 bool isAssumedDeadInternalFunction(
Attributor &
A) {
4507 if (!getAnchorScope()->hasLocalLinkage())
4509 bool UsedAssumedInformation =
false;
4511 true, UsedAssumedInformation);
4515 const std::string getAsStr(
Attributor *
A)
const override {
4516 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4517 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4518 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4519 std::to_string(KnownDeadEnds.size()) +
"]";
4524 assert(getState().isValidState() &&
4525 "Attempted to manifest an invalid state!");
4530 if (AssumedLiveBlocks.empty()) {
4531 A.deleteAfterManifest(
F);
4532 return ChangeStatus::CHANGED;
4538 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4540 KnownDeadEnds.set_union(ToBeExploredFrom);
4541 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4542 auto *CB = dyn_cast<CallBase>(DeadEndI);
4545 bool IsKnownNoReturn;
4546 bool MayReturn = !AA::hasAssumedIRAttr<Attribute::NoReturn>(
4549 if (MayReturn && (!Invoke2CallAllowed || !isa<InvokeInst>(CB)))
4552 if (
auto *
II = dyn_cast<InvokeInst>(DeadEndI))
4553 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4555 A.changeToUnreachableAfterManifest(
4556 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4557 HasChanged = ChangeStatus::CHANGED;
4562 if (!AssumedLiveBlocks.count(&BB)) {
4563 A.deleteAfterManifest(BB);
4565 HasChanged = ChangeStatus::CHANGED;
4575 assert(
From->getParent() == getAnchorScope() &&
4577 "Used AAIsDead of the wrong function");
4578 return isValidState() && !AssumedLiveEdges.count(std::make_pair(
From, To));
4582 void trackStatistics()
const override {}
4585 bool isAssumedDead()
const override {
return false; }
4588 bool isKnownDead()
const override {
return false; }
4591 bool isAssumedDead(
const BasicBlock *BB)
const override {
4593 "BB must be in the same anchor scope function.");
4597 return !AssumedLiveBlocks.count(BB);
4601 bool isKnownDead(
const BasicBlock *BB)
const override {
4602 return getKnown() && isAssumedDead(BB);
4606 bool isAssumedDead(
const Instruction *
I)
const override {
4607 assert(
I->getParent()->getParent() == getAnchorScope() &&
4608 "Instruction must be in the same anchor scope function.");
4615 if (!AssumedLiveBlocks.count(
I->getParent()))
4621 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4629 bool isKnownDead(
const Instruction *
I)
const override {
4630 return getKnown() && isAssumedDead(
I);
4636 if (!AssumedLiveBlocks.insert(&BB).second)
4644 if (
const auto *CB = dyn_cast<CallBase>(&
I))
4646 if (
F->hasLocalLinkage())
4647 A.markLiveInternalFunction(*
F);
4671 bool IsKnownNoReturn;
4672 if (AA::hasAssumedIRAttr<Attribute::NoReturn>(
4674 return !IsKnownNoReturn;
4686 bool UsedAssumedInformation =
4687 identifyAliveSuccessors(
A, cast<CallBase>(
II), AA, AliveSuccessors);
4692 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4693 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4697 bool IsKnownNoUnwind;
4698 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4700 UsedAssumedInformation |= !IsKnownNoUnwind;
4702 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4705 return UsedAssumedInformation;
4712 bool UsedAssumedInformation =
false;
4716 std::optional<Constant *>
C =
4717 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4718 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4720 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4722 BI.
getSuccessor(1 - cast<ConstantInt>(*C)->getValue().getZExtValue());
4727 UsedAssumedInformation =
false;
4730 return UsedAssumedInformation;
4737 bool UsedAssumedInformation =
false;
4741 UsedAssumedInformation)) {
4748 if (Values.
empty() ||
4749 (Values.
size() == 1 &&
4750 isa_and_nonnull<UndefValue>(Values.
front().getValue()))) {
4752 return UsedAssumedInformation;
4755 Type &Ty = *
SI.getCondition()->getType();
4757 auto CheckForConstantInt = [&](
Value *
V) {
4758 if (
auto *CI = dyn_cast_if_present<ConstantInt>(
AA::getWithType(*V, Ty))) {
4766 return CheckForConstantInt(
VAC.getValue());
4770 return UsedAssumedInformation;
4773 unsigned MatchedCases = 0;
4774 for (
const auto &CaseIt :
SI.cases()) {
4775 if (
Constants.count(CaseIt.getCaseValue())) {
4777 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4784 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4785 return UsedAssumedInformation;
4791 if (AssumedLiveBlocks.empty()) {
4792 if (isAssumedDeadInternalFunction(
A))
4796 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4797 assumeLive(
A,
F->getEntryBlock());
4801 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4802 << getAnchorScope()->
size() <<
"] BBs and "
4803 << ToBeExploredFrom.size() <<
" exploration points and "
4804 << KnownDeadEnds.size() <<
" known dead ends\n");
4809 ToBeExploredFrom.end());
4810 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4813 while (!Worklist.
empty()) {
4819 while (!
I->isTerminator() && !isa<CallBase>(
I))
4820 I =
I->getNextNode();
4822 AliveSuccessors.
clear();
4824 bool UsedAssumedInformation =
false;
4825 switch (
I->getOpcode()) {
4829 "Expected non-terminators to be handled already!");
4833 case Instruction::Call:
4834 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<CallInst>(*
I),
4835 *
this, AliveSuccessors);
4837 case Instruction::Invoke:
4838 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<InvokeInst>(*
I),
4839 *
this, AliveSuccessors);
4841 case Instruction::Br:
4842 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<BranchInst>(*
I),
4843 *
this, AliveSuccessors);
4845 case Instruction::Switch:
4846 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<SwitchInst>(*
I),
4847 *
this, AliveSuccessors);
4851 if (UsedAssumedInformation) {
4852 NewToBeExploredFrom.insert(
I);
4853 }
else if (AliveSuccessors.
empty() ||
4854 (
I->isTerminator() &&
4855 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4856 if (KnownDeadEnds.insert(
I))
4861 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4862 << UsedAssumedInformation <<
"\n");
4864 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4865 if (!
I->isTerminator()) {
4866 assert(AliveSuccessors.size() == 1 &&
4867 "Non-terminator expected to have a single successor!");
4871 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4872 if (AssumedLiveEdges.insert(Edge).second)
4874 if (assumeLive(
A, *AliveSuccessor->getParent()))
4881 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4883 return !ToBeExploredFrom.count(I);
4886 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4895 if (ToBeExploredFrom.empty() &&
4896 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4898 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4900 return indicatePessimisticFixpoint();
4905struct AAIsDeadCallSite final : AAIsDeadFunction {
4907 : AAIsDeadFunction(IRP,
A) {}
4916 "supported for call sites yet!");
4921 return indicatePessimisticFixpoint();
4925 void trackStatistics()
const override {}
4939 Value &
V = *getAssociatedValue().stripPointerCasts();
4941 A.getAttrs(getIRPosition(),
4942 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4945 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4948 bool IsKnownNonNull;
4949 AA::hasAssumedIRAttr<Attribute::NonNull>(
4950 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4952 bool CanBeNull, CanBeFreed;
4953 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4954 A.getDataLayout(), CanBeNull, CanBeFreed));
4957 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4962 StateType &getState()
override {
return *
this; }
4963 const StateType &getState()
const override {
return *
this; }
4969 const Value *UseV =
U->get();
4974 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4979 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4980 if (
Base &&
Base == &getAssociatedValue())
4981 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4987 bool IsNonNull =
false;
4988 bool TrackUse =
false;
4989 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4990 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4991 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4992 <<
" for instruction " << *
I <<
"\n");
4994 addAccessedBytesForUse(
A, U,
I, State);
4995 State.takeKnownDerefBytesMaximum(DerefBytes);
5002 bool IsKnownNonNull;
5003 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
5004 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5005 if (IsAssumedNonNull &&
5006 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
5007 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
5008 return ChangeStatus::CHANGED;
5016 bool IsKnownNonNull;
5017 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
5018 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5019 if (IsAssumedNonNull)
5021 Ctx, getAssumedDereferenceableBytes()));
5024 Ctx, getAssumedDereferenceableBytes()));
5028 const std::string getAsStr(
Attributor *
A)
const override {
5029 if (!getAssumedDereferenceableBytes())
5030 return "unknown-dereferenceable";
5031 bool IsKnownNonNull;
5032 bool IsAssumedNonNull =
false;
5034 IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
5035 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5036 return std::string(
"dereferenceable") +
5037 (IsAssumedNonNull ?
"" :
"_or_null") +
5038 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5039 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5040 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5041 (!
A ?
" [non-null is unknown]" :
"");
5046struct AADereferenceableFloating : AADereferenceableImpl {
5048 : AADereferenceableImpl(IRP,
A) {}
5053 bool UsedAssumedInformation =
false;
5055 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5057 Values.
push_back({getAssociatedValue(), getCtxI()});
5060 Stripped = Values.
size() != 1 ||
5061 Values.
front().getValue() != &getAssociatedValue();
5067 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5069 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5077 int64_t DerefBytes = 0;
5078 if (!AA || (!Stripped &&
this == AA)) {
5081 bool CanBeNull, CanBeFreed;
5083 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5084 T.GlobalState.indicatePessimisticFixpoint();
5087 DerefBytes =
DS.DerefBytesState.getAssumed();
5088 T.GlobalState &=
DS.GlobalState;
5094 int64_t OffsetSExt =
Offset.getSExtValue();
5098 T.takeAssumedDerefBytesMinimum(
5099 std::max(int64_t(0), DerefBytes - OffsetSExt));
5104 T.takeKnownDerefBytesMaximum(
5105 std::max(int64_t(0), DerefBytes - OffsetSExt));
5106 T.indicatePessimisticFixpoint();
5107 }
else if (OffsetSExt > 0) {
5113 T.indicatePessimisticFixpoint();
5117 return T.isValidState();
5120 for (
const auto &VAC : Values)
5121 if (!VisitValueCB(*
VAC.getValue()))
5122 return indicatePessimisticFixpoint();
5128 void trackStatistics()
const override {
5134struct AADereferenceableReturned final
5135 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5137 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5142 void trackStatistics()
const override {
5148struct AADereferenceableArgument final
5149 : AAArgumentFromCallSiteArguments<AADereferenceable,
5150 AADereferenceableImpl> {
5152 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5157 void trackStatistics()
const override {
5163struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5165 : AADereferenceableFloating(IRP,
A) {}
5168 void trackStatistics()
const override {
5174struct AADereferenceableCallSiteReturned final
5175 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5176 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5181 void trackStatistics()
const override {
5191 Value &AssociatedValue,
const Use *U,
5195 if (isa<CastInst>(
I)) {
5197 TrackUse = !isa<PtrToIntInst>(
I);
5200 if (
auto *
GEP = dyn_cast<GetElementPtrInst>(
I)) {
5201 if (
GEP->hasAllConstantIndices())
5207 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
5221 const Value *UseV =
U->get();
5222 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
5223 if (
SI->getPointerOperand() == UseV)
5224 MA =
SI->getAlign();
5225 }
else if (
auto *LI = dyn_cast<LoadInst>(
I)) {
5226 if (LI->getPointerOperand() == UseV)
5227 MA = LI->getAlign();
5228 }
else if (
auto *AI = dyn_cast<AtomicRMWInst>(
I)) {
5229 if (AI->getPointerOperand() == UseV)
5230 MA = AI->getAlign();
5231 }
else if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
5232 if (AI->getPointerOperand() == UseV)
5233 MA = AI->getAlign();
5239 unsigned Alignment = MA->value();
5243 if (
Base == &AssociatedValue) {
5262 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5264 takeKnownMaximum(Attr.getValueAsInt());
5266 Value &
V = *getAssociatedValue().stripPointerCasts();
5267 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5270 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5278 Value &AssociatedValue = getAssociatedValue();
5279 if (isa<ConstantData>(AssociatedValue))
5280 return ChangeStatus::UNCHANGED;
5282 for (
const Use &U : AssociatedValue.
uses()) {
5283 if (
auto *SI = dyn_cast<StoreInst>(
U.getUser())) {
5284 if (
SI->getPointerOperand() == &AssociatedValue)
5285 if (
SI->getAlign() < getAssumedAlign()) {
5287 "Number of times alignment added to a store");
5288 SI->setAlignment(getAssumedAlign());
5289 InstrChanged = ChangeStatus::CHANGED;
5291 }
else if (
auto *LI = dyn_cast<LoadInst>(
U.getUser())) {
5292 if (LI->getPointerOperand() == &AssociatedValue)
5293 if (LI->getAlign() < getAssumedAlign()) {
5294 LI->setAlignment(getAssumedAlign());
5296 "Number of times alignment added to a load");
5297 InstrChanged = ChangeStatus::CHANGED;
5299 }
else if (
auto *RMW = dyn_cast<AtomicRMWInst>(
U.getUser())) {
5300 if (RMW->getPointerOperand() == &AssociatedValue) {
5301 if (RMW->getAlign() < getAssumedAlign()) {
5303 "Number of times alignment added to atomicrmw");
5305 RMW->setAlignment(getAssumedAlign());
5306 InstrChanged = ChangeStatus::CHANGED;
5309 }
else if (
auto *CAS = dyn_cast<AtomicCmpXchgInst>(
U.getUser())) {
5310 if (CAS->getPointerOperand() == &AssociatedValue) {
5311 if (CAS->getAlign() < getAssumedAlign()) {
5313 "Number of times alignment added to cmpxchg");
5314 CAS->setAlignment(getAssumedAlign());
5315 InstrChanged = ChangeStatus::CHANGED;
5323 Align InheritAlign =
5324 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5325 if (InheritAlign >= getAssumedAlign())
5326 return InstrChanged;
5327 return Changed | InstrChanged;
5337 if (getAssumedAlign() > 1)
5345 bool TrackUse =
false;
5347 unsigned int KnownAlign =
5348 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5349 State.takeKnownMaximum(KnownAlign);
5355 const std::string getAsStr(
Attributor *
A)
const override {
5356 return "align<" + std::to_string(getKnownAlign().
value()) +
"-" +
5357 std::to_string(getAssumedAlign().
value()) +
">";
5362struct AAAlignFloating : AAAlignImpl {
5370 bool UsedAssumedInformation =
false;
5372 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5374 Values.
push_back({getAssociatedValue(), getCtxI()});
5377 Stripped = Values.
size() != 1 ||
5378 Values.
front().getValue() != &getAssociatedValue();
5382 auto VisitValueCB = [&](
Value &
V) ->
bool {
5383 if (isa<UndefValue>(V) || isa<ConstantPointerNull>(V))
5386 DepClassTy::REQUIRED);
5387 if (!AA || (!Stripped &&
this == AA)) {
5389 unsigned Alignment = 1;
5402 Alignment =
V.getPointerAlignment(
DL).value();
5405 T.takeKnownMaximum(Alignment);
5406 T.indicatePessimisticFixpoint();
5412 return T.isValidState();
5415 for (
const auto &VAC : Values) {
5416 if (!VisitValueCB(*
VAC.getValue()))
5417 return indicatePessimisticFixpoint();
5430struct AAAlignReturned final
5431 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5432 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5440struct AAAlignArgument final
5441 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5442 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5450 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5451 return ChangeStatus::UNCHANGED;
5452 return Base::manifest(
A);
5459struct AAAlignCallSiteArgument final : AAAlignFloating {
5461 : AAAlignFloating(IRP,
A) {}
5468 if (
Argument *Arg = getAssociatedArgument())
5469 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5470 return ChangeStatus::UNCHANGED;
5472 Align InheritAlign =
5473 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5474 if (InheritAlign >= getAssumedAlign())
5475 Changed = ChangeStatus::UNCHANGED;
5482 if (
Argument *Arg = getAssociatedArgument()) {
5485 const auto *ArgAlignAA =
A.getAAFor<
AAAlign>(
5488 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5498struct AAAlignCallSiteReturned final
5499 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5500 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5517 assert(!AA::hasAssumedIRAttr<Attribute::NoReturn>(
5518 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5523 const std::string getAsStr(
Attributor *
A)
const override {
5524 return getAssumed() ?
"noreturn" :
"may-return";
5529 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5530 bool UsedAssumedInformation =
false;
5531 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5532 {(unsigned)Instruction::Ret},
5533 UsedAssumedInformation))
5534 return indicatePessimisticFixpoint();
5535 return ChangeStatus::UNCHANGED;
5539struct AANoReturnFunction final : AANoReturnImpl {
5541 : AANoReturnImpl(IRP,
A) {}
5548struct AANoReturnCallSite final
5549 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5551 : AACalleeToCallSite<
AANoReturn, AANoReturnImpl>(IRP,
A) {}
5568 Value &
V = getAssociatedValue();
5569 if (
auto *
C = dyn_cast<Constant>(&V)) {
5570 if (
C->isThreadDependent())
5571 indicatePessimisticFixpoint();
5573 indicateOptimisticFixpoint();
5576 if (
auto *CB = dyn_cast<CallBase>(&V))
5579 indicateOptimisticFixpoint();
5582 if (
auto *
I = dyn_cast<Instruction>(&V)) {
5587 indicatePessimisticFixpoint();
5597 Value &
V = getAssociatedValue();
5599 if (
auto *
I = dyn_cast<Instruction>(&V))
5600 Scope =
I->getFunction();
5601 if (
auto *
A = dyn_cast<Argument>(&V)) {
5603 if (!
Scope->hasLocalLinkage())
5607 return indicateOptimisticFixpoint();
5609 bool IsKnownNoRecurse;
5610 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
5615 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5616 const Instruction *UserI = dyn_cast<Instruction>(
U.getUser());
5617 if (!UserI || isa<GetElementPtrInst>(UserI) || isa<CastInst>(UserI) ||
5618 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
5622 if (isa<LoadInst>(UserI) || isa<CmpInst>(UserI) ||
5623 (isa<StoreInst>(UserI) &&
5624 cast<StoreInst>(UserI)->getValueOperand() !=
U.get()))
5626 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
5630 if (!Callee || !
Callee->hasLocalLinkage())
5636 DepClassTy::OPTIONAL);
5637 if (!ArgInstanceInfoAA ||
5638 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5643 A, *CB, *Scope, *
this,
nullptr,
5651 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5652 if (
auto *SI = dyn_cast<StoreInst>(OldU.
getUser())) {
5653 auto *
Ptr =
SI->getPointerOperand()->stripPointerCasts();
5661 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5662 DepClassTy::OPTIONAL,
5663 true, EquivalentUseCB))
5664 return indicatePessimisticFixpoint();
5670 const std::string getAsStr(
Attributor *
A)
const override {
5671 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5675 void trackStatistics()
const override {}
5679struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5681 : AAInstanceInfoImpl(IRP,
A) {}
5685struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5687 : AAInstanceInfoFloating(IRP,
A) {}
5691struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5693 : AAInstanceInfoImpl(IRP,
A) {}
5701 Argument *Arg = getAssociatedArgument();
5703 return indicatePessimisticFixpoint();
5708 return indicatePessimisticFixpoint();
5714struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5716 : AAInstanceInfoImpl(IRP,
A) {
5732struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5734 : AAInstanceInfoFloating(IRP,
A) {}
5741 bool IgnoreSubsumingPositions) {
5742 assert(ImpliedAttributeKind == Attribute::Captures &&
5743 "Unexpected attribute kind");
5746 return V.use_empty();
5752 if (isa<UndefValue>(V) || (isa<ConstantPointerNull>(V) &&
5753 V.getType()->getPointerAddressSpace() == 0)) {
5758 A.getAttrs(IRP, {Attribute::Captures}, Attrs,
5768 {Attribute::Captures, Attribute::ByVal}, Attrs,
5784 determineFunctionCaptureCapabilities(IRP, *
F, State);
5785 if (State.isKnown(NO_CAPTURE)) {
5805 bool ReadOnly =
F.onlyReadsMemory();
5806 bool NoThrow =
F.doesNotThrow();
5807 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5808 if (ReadOnly && NoThrow && IsVoidReturn) {
5821 if (NoThrow && IsVoidReturn)
5826 if (!NoThrow || ArgNo < 0 ||
5827 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5830 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5831 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5832 if (U ==
unsigned(ArgNo))
5850 assert(!AA::hasAssumedIRAttr<Attribute::Captures>(
5851 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5861 if (!isAssumedNoCaptureMaybeReturned())
5864 if (isArgumentPosition()) {
5865 if (isAssumedNoCapture())
5873 const std::string getAsStr(
Attributor *
A)
const override {
5874 if (isKnownNoCapture())
5875 return "known not-captured";
5876 if (isAssumedNoCapture())
5877 return "assumed not-captured";
5878 if (isKnownNoCaptureMaybeReturned())
5879 return "known not-captured-maybe-returned";
5880 if (isAssumedNoCaptureMaybeReturned())
5881 return "assumed not-captured-maybe-returned";
5882 return "assumed-captured";
5890 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5894 if (isa<PtrToIntInst>(UInst)) {
5896 return isCapturedIn(State,
true,
true,
5902 if (isa<StoreInst>(UInst))
5903 return isCapturedIn(State,
true,
true,
5907 if (isa<ReturnInst>(UInst)) {
5909 return isCapturedIn(State,
false,
false,
5911 return isCapturedIn(State,
true,
true,
5917 auto *CB = dyn_cast<CallBase>(UInst);
5919 return isCapturedIn(State,
true,
true,
5926 bool IsKnownNoCapture;
5928 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::Captures>(
5929 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
5931 if (IsAssumedNoCapture)
5932 return isCapturedIn(State,
false,
false,
5936 return isCapturedIn(State,
false,
false,
5941 return isCapturedIn(State,
true,
true,
5949 bool CapturedInInt,
bool CapturedInRet) {
5950 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
5951 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
5967 return indicatePessimisticFixpoint();
5974 return indicatePessimisticFixpoint();
5982 T.addKnownBits(NOT_CAPTURED_IN_MEM);
5984 addKnownBits(NOT_CAPTURED_IN_MEM);
5991 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
5995 UsedAssumedInformation))
5997 bool SeenConstant =
false;
5999 if (isa<Constant>(
VAC.getValue())) {
6002 SeenConstant =
true;
6003 }
else if (!isa<Argument>(
VAC.getValue()) ||
6004 VAC.getValue() == getAssociatedArgument())
6010 bool IsKnownNoUnwind;
6011 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
6013 bool IsVoidTy =
F->getReturnType()->isVoidTy();
6014 bool UsedAssumedInformation =
false;
6015 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
6016 T.addKnownBits(NOT_CAPTURED_IN_RET);
6017 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
6019 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
6020 addKnownBits(NOT_CAPTURED_IN_RET);
6021 if (isKnown(NOT_CAPTURED_IN_MEM))
6022 return indicateOptimisticFixpoint();
6027 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
6036 return checkUse(
A,
T, U, Follow);
6039 if (!
A.checkForAllUses(UseCheck, *
this, *V))
6040 return indicatePessimisticFixpoint();
6043 auto Assumed = S.getAssumed();
6044 S.intersectAssumedBits(
T.getAssumed());
6045 if (!isAssumedNoCaptureMaybeReturned())
6046 return indicatePessimisticFixpoint();
6052struct AANoCaptureArgument final : AANoCaptureImpl {
6054 : AANoCaptureImpl(IRP,
A) {}
6061struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
6063 : AANoCaptureImpl(IRP,
A) {}
6071 Argument *Arg = getAssociatedArgument();
6073 return indicatePessimisticFixpoint();
6075 bool IsKnownNoCapture;
6077 if (AA::hasAssumedIRAttr<Attribute::Captures>(
6078 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6080 return ChangeStatus::UNCHANGED;
6082 return indicatePessimisticFixpoint();
6087 void trackStatistics()
const override {
6093struct AANoCaptureFloating final : AANoCaptureImpl {
6095 : AANoCaptureImpl(IRP,
A) {}
6098 void trackStatistics()
const override {
6104struct AANoCaptureReturned final : AANoCaptureImpl {
6106 : AANoCaptureImpl(IRP,
A) {
6121 void trackStatistics()
const override {}
6125struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6127 : AANoCaptureImpl(IRP,
A) {}
6133 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6137 void trackStatistics()
const override {
6148 SimplifiedAssociatedValue,
Other, Ty);
6149 if (SimplifiedAssociatedValue == std::optional<Value *>(
nullptr))
6153 if (SimplifiedAssociatedValue)
6154 dbgs() <<
"[ValueSimplify] is assumed to be "
6155 << **SimplifiedAssociatedValue <<
"\n";
6157 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6169 if (getAssociatedValue().
getType()->isVoidTy())
6170 indicatePessimisticFixpoint();
6171 if (
A.hasSimplificationCallback(getIRPosition()))
6172 indicatePessimisticFixpoint();
6176 const std::string getAsStr(
Attributor *
A)
const override {
6178 dbgs() <<
"SAV: " << (
bool)SimplifiedAssociatedValue <<
" ";
6179 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6180 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6182 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6187 void trackStatistics()
const override {}
6190 std::optional<Value *>
6191 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6192 return SimplifiedAssociatedValue;
6203 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6205 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6218 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6219 if (
Check && (
I.mayReadFromMemory() ||
6224 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6226 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6252 if (
const auto &NewV = VMap.
lookup(&V))
6254 bool UsedAssumedInformation =
false;
6255 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6257 if (!SimpleV.has_value())
6261 EffectiveV = *SimpleV;
6262 if (
auto *
C = dyn_cast<Constant>(EffectiveV))
6266 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6267 if (
auto *
I = dyn_cast<Instruction>(EffectiveV))
6268 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6269 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6276 Value *NewV = SimplifiedAssociatedValue
6277 ? *SimplifiedAssociatedValue
6279 if (NewV && NewV != &getAssociatedValue()) {
6283 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6285 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6294 const IRPosition &IRP,
bool Simplify =
true) {
6295 bool UsedAssumedInformation =
false;
6298 QueryingValueSimplified =
A.getAssumedSimplified(
6300 return unionAssumed(QueryingValueSimplified);
6304 template <
typename AAType>
bool askSimplifiedValueFor(
Attributor &
A) {
6305 if (!getAssociatedValue().
getType()->isIntegerTy())
6310 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6314 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6317 SimplifiedAssociatedValue = std::nullopt;
6318 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6321 if (
auto *
C = *COpt) {
6322 SimplifiedAssociatedValue =
C;
6323 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6329 bool askSimplifiedValueForOtherAAs(
Attributor &
A) {
6330 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6332 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6340 for (
auto &U : getAssociatedValue().
uses()) {
6344 if (
auto *
PHI = dyn_cast_or_null<PHINode>(IP))
6345 IP =
PHI->getIncomingBlock(U)->getTerminator();
6346 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6348 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6349 if (
A.changeUseAfterManifest(U, *NewV))
6350 Changed = ChangeStatus::CHANGED;
6354 return Changed | AAValueSimplify::manifest(
A);
6359 SimplifiedAssociatedValue = &getAssociatedValue();
6360 return AAValueSimplify::indicatePessimisticFixpoint();
6364struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6366 : AAValueSimplifyImpl(IRP,
A) {}
6369 AAValueSimplifyImpl::initialize(
A);
6370 if (
A.hasAttr(getIRPosition(),
6371 {Attribute::InAlloca, Attribute::Preallocated,
6372 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6374 indicatePessimisticFixpoint();
6381 Argument *Arg = getAssociatedArgument();
6387 return indicatePessimisticFixpoint();
6390 auto Before = SimplifiedAssociatedValue;
6404 bool UsedAssumedInformation =
false;
6405 std::optional<Constant *> SimpleArgOp =
6406 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6413 return unionAssumed(*SimpleArgOp);
6418 bool UsedAssumedInformation =
false;
6419 if (hasCallBaseContext() &&
6420 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6424 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6425 UsedAssumedInformation);
6428 if (!askSimplifiedValueForOtherAAs(
A))
6429 return indicatePessimisticFixpoint();
6432 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6433 : ChangeStatus ::CHANGED;
6437 void trackStatistics()
const override {
6442struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6444 : AAValueSimplifyImpl(IRP,
A) {}
6447 std::optional<Value *>
6448 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6449 if (!isValidState())
6451 return SimplifiedAssociatedValue;
6456 auto Before = SimplifiedAssociatedValue;
6459 auto &RI = cast<ReturnInst>(
I);
6460 return checkAndUpdate(
6465 bool UsedAssumedInformation =
false;
6466 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6467 UsedAssumedInformation))
6468 if (!askSimplifiedValueForOtherAAs(
A))
6469 return indicatePessimisticFixpoint();
6472 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6473 : ChangeStatus ::CHANGED;
6479 return ChangeStatus::UNCHANGED;
6483 void trackStatistics()
const override {
6488struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6490 : AAValueSimplifyImpl(IRP,
A) {}
6494 AAValueSimplifyImpl::initialize(
A);
6495 Value &
V = getAnchorValue();
6498 if (isa<Constant>(V))
6499 indicatePessimisticFixpoint();
6504 auto Before = SimplifiedAssociatedValue;
6505 if (!askSimplifiedValueForOtherAAs(
A))
6506 return indicatePessimisticFixpoint();
6509 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6510 : ChangeStatus ::CHANGED;
6514 void trackStatistics()
const override {
6519struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6521 : AAValueSimplifyImpl(IRP,
A) {}
6525 SimplifiedAssociatedValue =
nullptr;
6526 indicateOptimisticFixpoint();
6531 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6534 void trackStatistics()
const override {
6539struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6541 : AAValueSimplifyFunction(IRP,
A) {}
6543 void trackStatistics()
const override {
6548struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6550 : AAValueSimplifyImpl(IRP,
A) {}
6553 AAValueSimplifyImpl::initialize(
A);
6554 Function *Fn = getAssociatedFunction();
6555 assert(Fn &&
"Did expect an associted function");
6561 checkAndUpdate(
A, *
this, IRP))
6562 indicateOptimisticFixpoint();
6564 indicatePessimisticFixpoint();
6572 return indicatePessimisticFixpoint();
6575 void trackStatistics()
const override {
6580struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6582 : AAValueSimplifyFloating(IRP,
A) {}
6590 if (FloatAA && FloatAA->getState().isValidState())
6593 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6594 Use &
U = cast<CallBase>(&getAnchorValue())
6595 ->getArgOperandUse(getCallSiteArgNo());
6596 if (
A.changeUseAfterManifest(U, *NewV))
6597 Changed = ChangeStatus::CHANGED;
6600 return Changed | AAValueSimplify::manifest(
A);
6603 void trackStatistics()
const override {
6613 struct AllocationInfo {
6625 }
Status = STACK_DUE_TO_USE;
6629 bool HasPotentiallyFreeingUnknownUses =
false;
6633 bool MoveAllocaIntoEntry =
true;
6639 struct DeallocationInfo {
6647 bool MightFreeUnknownObjects =
false;
6656 ~AAHeapToStackFunction() {
6659 for (
auto &It : AllocationInfos)
6660 It.second->~AllocationInfo();
6661 for (
auto &It : DeallocationInfos)
6662 It.second->~DeallocationInfo();
6666 AAHeapToStack::initialize(
A);
6669 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6676 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6685 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6686 AllocationInfos[CB] = AI;
6688 TLI->getLibFunc(*CB, AI->LibraryFunctionId);
6694 bool UsedAssumedInformation =
false;
6695 bool Success =
A.checkForAllCallLikeInstructions(
6696 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6700 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6704 bool &) -> std::optional<Value *> {
return nullptr; };
6705 for (
const auto &It : AllocationInfos)
6708 for (
const auto &It : DeallocationInfos)
6713 const std::string getAsStr(
Attributor *
A)
const override {
6714 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6715 for (
const auto &It : AllocationInfos) {
6716 if (It.second->Status == AllocationInfo::INVALID)
6717 ++NumInvalidMallocs;
6721 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6722 std::to_string(NumInvalidMallocs);
6726 void trackStatistics()
const override {
6729 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6730 for (
const auto &It : AllocationInfos)
6731 if (It.second->Status != AllocationInfo::INVALID)
6735 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6737 if (AllocationInfo *AI =
6738 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6739 return AI->Status != AllocationInfo::INVALID;
6743 bool isAssumedHeapToStackRemovedFree(
CallBase &CB)
const override {
6744 if (!isValidState())
6747 for (
const auto &It : AllocationInfos) {
6748 AllocationInfo &AI = *It.second;
6749 if (AI.Status == AllocationInfo::INVALID)
6752 if (AI.PotentialFreeCalls.count(&CB))
6760 assert(getState().isValidState() &&
6761 "Attempted to manifest an invalid state!");
6765 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6767 for (
auto &It : AllocationInfos) {
6768 AllocationInfo &AI = *It.second;
6769 if (AI.Status == AllocationInfo::INVALID)
6772 for (
CallBase *FreeCall : AI.PotentialFreeCalls) {
6773 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6774 A.deleteAfterManifest(*FreeCall);
6775 HasChanged = ChangeStatus::CHANGED;
6778 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6783 if (TLI->getLibFunc(*AI.CB, IsAllocShared))
6784 if (IsAllocShared == LibFunc___kmpc_alloc_shared)
6785 return OR <<
"Moving globalized variable to the stack.";
6786 return OR <<
"Moving memory allocation from the heap to the stack.";
6788 if (AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
6795 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6797 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6804 cast<ConstantInt>(SizeOffsetPair.
Offset)->isZero());
6809 ?
F->getEntryBlock().begin()
6810 : AI.CB->getIterator();
6813 if (
MaybeAlign RetAlign = AI.CB->getRetAlign())
6814 Alignment = std::max(Alignment, *RetAlign);
6816 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *
Align);
6817 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6818 "Expected an alignment during manifest!");
6820 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6824 unsigned AS =
DL.getAllocaAddrSpace();
6827 AI.CB->getName() +
".h2s", IP);
6829 if (Alloca->
getType() != AI.CB->getType())
6830 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6831 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6836 "Must be able to materialize initial memory state of allocation");
6840 if (
auto *
II = dyn_cast<InvokeInst>(AI.CB)) {
6841 auto *NBB =
II->getNormalDest();
6843 A.deleteAfterManifest(*AI.CB);
6845 A.deleteAfterManifest(*AI.CB);
6851 if (!isa<UndefValue>(InitVal)) {
6854 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6856 HasChanged = ChangeStatus::CHANGED;
6864 bool UsedAssumedInformation =
false;
6865 std::optional<Constant *> SimpleV =
6866 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6868 return APInt(64, 0);
6869 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*SimpleV))
6870 return CI->getValue();
6871 return std::nullopt;
6875 AllocationInfo &AI) {
6876 auto Mapper = [&](
const Value *
V) ->
const Value * {
6877 bool UsedAssumedInformation =
false;
6878 if (std::optional<Constant *> SimpleV =
6879 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6886 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6904 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6906 const auto *LivenessAA =
6910 A.getInfoCache().getMustBeExecutedContextExplorer();
6912 bool StackIsAccessibleByOtherThreads =
6913 A.getInfoCache().stackIsAccessibleByOtherThreads();
6916 A.getInfoCache().getAnalysisResultForFunction<
LoopAnalysis>(*F);
6917 std::optional<bool> MayContainIrreducibleControl;
6919 if (&
F->getEntryBlock() == &BB)
6921 if (!MayContainIrreducibleControl.has_value())
6923 if (*MayContainIrreducibleControl)
6932 bool HasUpdatedFrees =
false;
6934 auto UpdateFrees = [&]() {
6935 HasUpdatedFrees =
true;
6937 for (
auto &It : DeallocationInfos) {
6938 DeallocationInfo &DI = *It.second;
6941 if (DI.MightFreeUnknownObjects)
6945 bool UsedAssumedInformation =
false;
6946 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
6953 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
6954 DI.MightFreeUnknownObjects =
true;
6960 if (isa<ConstantPointerNull>(Obj) || isa<UndefValue>(Obj))
6963 CallBase *ObjCB = dyn_cast<CallBase>(Obj);
6967 DI.MightFreeUnknownObjects =
true;
6971 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
6973 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
6975 DI.MightFreeUnknownObjects =
true;
6979 DI.PotentialAllocationCalls.insert(ObjCB);
6983 auto FreeCheck = [&](AllocationInfo &AI) {
6987 if (!StackIsAccessibleByOtherThreads) {
6989 if (!AA::hasAssumedIRAttr<Attribute::NoSync>(
6992 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
6993 "other threads and function is not nosync:\n");
6997 if (!HasUpdatedFrees)
7001 if (AI.PotentialFreeCalls.size() != 1) {
7003 << AI.PotentialFreeCalls.size() <<
"\n");
7006 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
7007 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
7010 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
7011 << *UniqueFree <<
"\n");
7014 if (DI->MightFreeUnknownObjects) {
7016 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
7019 if (DI->PotentialAllocationCalls.empty())
7021 if (DI->PotentialAllocationCalls.size() > 1) {
7023 << DI->PotentialAllocationCalls.size()
7024 <<
" different allocations\n");
7027 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
7030 <<
"[H2S] unique free call not known to free this allocation but "
7031 << **DI->PotentialAllocationCalls.begin() <<
"\n");
7036 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared) {
7038 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
7039 LLVM_DEBUG(
dbgs() <<
"[H2S] unique free call might not be executed "
7040 "with the allocation "
7041 << *UniqueFree <<
"\n");
7048 auto UsesCheck = [&](AllocationInfo &AI) {
7049 bool ValidUsesOnly =
true;
7051 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
7053 if (isa<LoadInst>(UserI))
7055 if (
auto *SI = dyn_cast<StoreInst>(UserI)) {
7056 if (
SI->getValueOperand() ==
U.get()) {
7058 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
7059 ValidUsesOnly =
false;
7065 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
7068 if (DeallocationInfos.count(CB)) {
7069 AI.PotentialFreeCalls.insert(CB);
7076 bool IsKnownNoCapture;
7077 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::Captures>(
7082 bool IsAssumedNoFree = AA::hasAssumedIRAttr<Attribute::NoFree>(
7085 if (!IsAssumedNoCapture ||
7086 (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7087 !IsAssumedNoFree)) {
7088 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7093 <<
"Could not move globalized variable to the stack. "
7094 "Variable is potentially captured in call. Mark "
7095 "parameter as `__attribute__((noescape))` to override.";
7098 if (ValidUsesOnly &&
7099 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
7103 ValidUsesOnly =
false;
7108 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
7109 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
7116 ValidUsesOnly =
false;
7119 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7121 [&](
const Use &OldU,
const Use &NewU) {
7122 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7123 return !SI || StackIsAccessibleByOtherThreads ||
7124 AA::isAssumedThreadLocalObject(
7125 A, *SI->getPointerOperand(), *this);
7128 return ValidUsesOnly;
7133 for (
auto &It : AllocationInfos) {
7134 AllocationInfo &AI = *It.second;
7135 if (AI.Status == AllocationInfo::INVALID)
7139 std::optional<APInt> APAlign = getAPInt(
A, *
this, *
Align);
7143 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7145 AI.Status = AllocationInfo::INVALID;
7150 !APAlign->isPowerOf2()) {
7151 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7153 AI.Status = AllocationInfo::INVALID;
7160 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7165 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7167 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7171 AI.Status = AllocationInfo::INVALID;
7177 switch (AI.Status) {
7178 case AllocationInfo::STACK_DUE_TO_USE:
7181 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7183 case AllocationInfo::STACK_DUE_TO_FREE:
7186 AI.Status = AllocationInfo::INVALID;
7189 case AllocationInfo::INVALID:
7196 bool IsGlobalizedLocal =
7197 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared;
7198 if (AI.MoveAllocaIntoEntry &&
7199 (!
Size.has_value() ||
7200 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7201 AI.MoveAllocaIntoEntry =
false;
7215 AAPrivatizablePtr::indicatePessimisticFixpoint();
7216 PrivatizableType =
nullptr;
7217 return ChangeStatus::CHANGED;
7223 virtual std::optional<Type *> identifyPrivatizableType(
Attributor &
A) = 0;
7227 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7228 std::optional<Type *>
T1) {
7238 std::optional<Type *> getPrivatizableType()
const override {
7239 return PrivatizableType;
7242 const std::string getAsStr(
Attributor *
A)
const override {
7243 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7247 std::optional<Type *> PrivatizableType;
7252struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7254 : AAPrivatizablePtrImpl(IRP,
A) {}
7257 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7260 bool UsedAssumedInformation =
false;
7262 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7264 if (!
Attrs.empty() &&
7266 true, UsedAssumedInformation))
7267 return Attrs[0].getValueAsType();
7269 std::optional<Type *> Ty;
7270 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7293 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7297 dbgs() <<
"<nullptr>";
7302 Ty = combineTypes(Ty, CSTy);
7305 dbgs() <<
" : New Type: ";
7307 (*Ty)->print(
dbgs());
7309 dbgs() <<
"<nullptr>";
7318 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7319 UsedAssumedInformation))
7326 PrivatizableType = identifyPrivatizableType(
A);
7327 if (!PrivatizableType)
7328 return ChangeStatus::UNCHANGED;
7329 if (!*PrivatizableType)
7330 return indicatePessimisticFixpoint();
7335 DepClassTy::OPTIONAL);
7338 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7341 return indicatePessimisticFixpoint();
7347 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7351 Function &Fn = *getIRPosition().getAnchorScope();
7355 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7357 return indicatePessimisticFixpoint();
7367 bool UsedAssumedInformation =
false;
7368 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7369 UsedAssumedInformation)) {
7371 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7373 return indicatePessimisticFixpoint();
7377 Argument *Arg = getAssociatedArgument();
7378 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7380 return indicatePessimisticFixpoint();
7387 auto IsCompatiblePrivArgOfCallback = [&](
CallBase &CB) {
7390 for (
const Use *U : CallbackUses) {
7392 assert(CBACS && CBACS.isCallbackCall());
7393 for (
Argument &CBArg : CBACS.getCalledFunction()->args()) {
7394 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7398 <<
"[AAPrivatizablePtr] Argument " << *Arg
7399 <<
"check if can be privatized in the context of its parent ("
7401 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7403 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7404 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7405 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7407 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7408 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7411 if (CBArgNo !=
int(ArgNo))
7415 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7419 if (*CBArgPrivTy == PrivatizableType)
7424 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7425 <<
" cannot be privatized in the context of its parent ("
7427 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7429 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7430 <<
").\n[AAPrivatizablePtr] for which the argument "
7431 "privatization is not compatible.\n";
7445 "Expected a direct call operand for callback call operand");
7450 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7451 <<
" check if be privatized in the context of its parent ("
7453 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7455 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7458 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7461 DepClassTy::REQUIRED);
7462 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7466 if (*DCArgPrivTy == PrivatizableType)
7472 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7473 <<
" cannot be privatized in the context of its parent ("
7475 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7478 <<
").\n[AAPrivatizablePtr] for which the argument "
7479 "privatization is not compatible.\n";
7491 return IsCompatiblePrivArgOfDirectCS(ACS);
7495 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7496 UsedAssumedInformation))
7497 return indicatePessimisticFixpoint();
7499 return ChangeStatus::UNCHANGED;
7505 identifyReplacementTypes(
Type *PrivType,
7509 assert(PrivType &&
"Expected privatizable type!");
7512 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7513 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7514 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7515 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7516 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7517 PrivArrayType->getElementType());
7528 assert(PrivType &&
"Expected privatizable type!");
7534 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7535 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7536 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7541 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7542 Type *PointeeTy = PrivArrayType->getElementType();
7543 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7544 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7555 void createReplacementValues(
Align Alignment,
Type *PrivType,
7559 assert(PrivType &&
"Expected privatizable type!");
7566 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7567 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7568 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7569 Type *PointeeTy = PrivStructType->getElementType(u);
7573 L->setAlignment(Alignment);
7576 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7577 Type *PointeeTy = PrivArrayType->getElementType();
7578 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7579 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7582 L->setAlignment(Alignment);
7587 L->setAlignment(Alignment);
7594 if (!PrivatizableType)
7595 return ChangeStatus::UNCHANGED;
7596 assert(*PrivatizableType &&
"Expected privatizable type!");
7602 bool UsedAssumedInformation =
false;
7603 if (!
A.checkForAllInstructions(
7605 CallInst &CI = cast<CallInst>(I);
7606 if (CI.isTailCall())
7607 TailCalls.push_back(&CI);
7610 *
this, {Instruction::Call}, UsedAssumedInformation))
7611 return ChangeStatus::UNCHANGED;
7613 Argument *Arg = getAssociatedArgument();
7616 const auto *AlignAA =
7625 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7628 unsigned AS =
DL.getAllocaAddrSpace();
7630 Arg->
getName() +
".priv", IP);
7631 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7632 ArgIt->getArgNo(), IP);
7635 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7640 CI->setTailCall(
false);
7651 createReplacementValues(
7652 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7653 *PrivatizableType, ACS,
7661 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7664 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7665 std::move(FnRepairCB),
7666 std::move(ACSRepairCB)))
7667 return ChangeStatus::CHANGED;
7668 return ChangeStatus::UNCHANGED;
7672 void trackStatistics()
const override {
7677struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7679 : AAPrivatizablePtrImpl(IRP,
A) {}
7684 indicatePessimisticFixpoint();
7689 "updateImpl will not be called");
7693 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7696 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7700 if (
auto *AI = dyn_cast<AllocaInst>(Obj))
7701 if (
auto *CI = dyn_cast<ConstantInt>(AI->getArraySize()))
7703 return AI->getAllocatedType();
7704 if (
auto *Arg = dyn_cast<Argument>(Obj)) {
7707 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7711 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7712 "alloca nor privatizable argument: "
7718 void trackStatistics()
const override {
7723struct AAPrivatizablePtrCallSiteArgument final
7724 :
public AAPrivatizablePtrFloating {
7726 : AAPrivatizablePtrFloating(IRP,
A) {}
7730 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7731 indicateOptimisticFixpoint();
7736 PrivatizableType = identifyPrivatizableType(
A);
7737 if (!PrivatizableType)
7738 return ChangeStatus::UNCHANGED;
7739 if (!*PrivatizableType)
7740 return indicatePessimisticFixpoint();
7743 bool IsKnownNoCapture;
7744 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::Captures>(
7745 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7746 if (!IsAssumedNoCapture) {
7747 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7748 return indicatePessimisticFixpoint();
7751 bool IsKnownNoAlias;
7752 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
7753 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7754 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7755 return indicatePessimisticFixpoint();
7760 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7761 return indicatePessimisticFixpoint();
7764 return ChangeStatus::UNCHANGED;
7768 void trackStatistics()
const override {
7773struct AAPrivatizablePtrCallSiteReturned final
7774 :
public AAPrivatizablePtrFloating {
7776 : AAPrivatizablePtrFloating(IRP,
A) {}
7781 indicatePessimisticFixpoint();
7785 void trackStatistics()
const override {
7790struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7792 : AAPrivatizablePtrFloating(IRP,
A) {}
7797 indicatePessimisticFixpoint();
7801 void trackStatistics()
const override {
7817 intersectAssumedBits(BEST_STATE);
7818 getKnownStateFromValue(
A, getIRPosition(), getState());
7819 AAMemoryBehavior::initialize(
A);
7825 bool IgnoreSubsumingPositions =
false) {
7827 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7829 switch (Attr.getKindAsEnum()) {
7830 case Attribute::ReadNone:
7833 case Attribute::ReadOnly:
7836 case Attribute::WriteOnly:
7845 if (!
I->mayReadFromMemory())
7847 if (!
I->mayWriteToMemory())
7860 else if (isAssumedWriteOnly())
7869 if (
A.hasAttr(IRP, Attribute::ReadNone,
7871 return ChangeStatus::UNCHANGED;
7880 return ChangeStatus::UNCHANGED;
7883 A.removeAttrs(IRP, AttrKinds);
7886 A.removeAttrs(IRP, Attribute::Writable);
7893 const std::string getAsStr(
Attributor *
A)
const override {
7898 if (isAssumedWriteOnly())
7900 return "may-read/write";
7908 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7911struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7913 : AAMemoryBehaviorImpl(IRP,
A) {}
7919 void trackStatistics()
const override {
7924 else if (isAssumedWriteOnly())
7939struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
7941 : AAMemoryBehaviorFloating(IRP,
A) {}
7945 intersectAssumedBits(BEST_STATE);
7950 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
7952 getKnownStateFromValue(
A, IRP, getState(),
7959 return ChangeStatus::UNCHANGED;
7963 if (
A.hasAttr(getIRPosition(),
7964 {Attribute::InAlloca, Attribute::Preallocated})) {
7965 removeKnownBits(NO_WRITES);
7966 removeAssumedBits(NO_WRITES);
7968 A.removeAttrs(getIRPosition(), AttrKinds);
7969 return AAMemoryBehaviorFloating::manifest(
A);
7973 void trackStatistics()
const override {
7978 else if (isAssumedWriteOnly())
7983struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
7985 : AAMemoryBehaviorArgument(IRP,
A) {}
7991 Argument *Arg = getAssociatedArgument();
7993 indicatePessimisticFixpoint();
7997 addKnownBits(NO_WRITES);
7998 removeKnownBits(NO_READS);
7999 removeAssumedBits(NO_READS);
8001 AAMemoryBehaviorArgument::initialize(
A);
8002 if (getAssociatedFunction()->isDeclaration())
8003 indicatePessimisticFixpoint();
8012 Argument *Arg = getAssociatedArgument();
8017 return indicatePessimisticFixpoint();
8022 void trackStatistics()
const override {
8027 else if (isAssumedWriteOnly())
8033struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
8035 : AAMemoryBehaviorFloating(IRP,
A) {}
8039 AAMemoryBehaviorImpl::initialize(
A);
8044 return ChangeStatus::UNCHANGED;
8048 void trackStatistics()
const override {}
8052struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
8054 : AAMemoryBehaviorImpl(IRP,
A) {}
8064 Function &
F = cast<Function>(getAnchorValue());
8070 else if (isAssumedWriteOnly())
8073 A.removeAttrs(getIRPosition(), AttrKinds);
8078 return A.manifestAttrs(getIRPosition(),
8083 void trackStatistics()
const override {
8088 else if (isAssumedWriteOnly())
8094struct AAMemoryBehaviorCallSite final
8095 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8102 CallBase &CB = cast<CallBase>(getAnchorValue());
8108 else if (isAssumedWriteOnly())
8111 A.removeAttrs(getIRPosition(), AttrKinds);
8116 Attribute::Writable);
8117 return A.manifestAttrs(
8122 void trackStatistics()
const override {
8127 else if (isAssumedWriteOnly())
8135 auto AssumedState = getAssumed();
8141 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
8144 if (MemBehaviorAA) {
8145 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8146 return !isAtFixpoint();
8151 if (
I.mayReadFromMemory())
8152 removeAssumedBits(NO_READS);
8153 if (
I.mayWriteToMemory())
8154 removeAssumedBits(NO_WRITES);
8155 return !isAtFixpoint();
8158 bool UsedAssumedInformation =
false;
8159 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8160 UsedAssumedInformation))
8161 return indicatePessimisticFixpoint();
8180 const auto *FnMemAA =
8184 S.addKnownBits(FnMemAA->getKnown());
8185 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8191 auto AssumedState = S.getAssumed();
8197 bool IsKnownNoCapture;
8199 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::Captures>(
8203 if (!IsAssumedNoCapture &&
8205 S.intersectAssumedBits(FnMemAssumedState);
8211 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8213 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8221 Follow = followUsersOfUseIn(
A, U, UserI);
8225 analyzeUseIn(
A, U, UserI);
8227 return !isAtFixpoint();
8230 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8231 return indicatePessimisticFixpoint();
8237bool AAMemoryBehaviorFloating::followUsersOfUseIn(
Attributor &
A,
const Use &U,
8241 if (isa<LoadInst>(UserI) || isa<ReturnInst>(UserI))
8246 const auto *CB = dyn_cast<CallBase>(UserI);
8256 if (
U.get()->getType()->isPointerTy()) {
8258 bool IsKnownNoCapture;
8259 return !AA::hasAssumedIRAttr<Attribute::Captures>(
8267void AAMemoryBehaviorFloating::analyzeUseIn(
Attributor &
A,
const Use &U,
8275 case Instruction::Load:
8277 removeAssumedBits(NO_READS);
8280 case Instruction::Store:
8285 removeAssumedBits(NO_WRITES);
8287 indicatePessimisticFixpoint();
8290 case Instruction::Call:
8291 case Instruction::CallBr:
8292 case Instruction::Invoke: {
8295 const auto *CB = cast<CallBase>(UserI);
8299 indicatePessimisticFixpoint();
8306 removeAssumedBits(NO_READS);
8313 if (
U.get()->getType()->isPointerTy())
8317 const auto *MemBehaviorAA =
8323 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8331 removeAssumedBits(NO_READS);
8333 removeAssumedBits(NO_WRITES);
8345 return "all memory";
8348 std::string S =
"memory:";
8354 S +=
"internal global,";
8356 S +=
"external global,";
8360 S +=
"inaccessible,";
8374 AccessKind2Accesses.fill(
nullptr);
8377 ~AAMemoryLocationImpl() {
8380 for (AccessSet *AS : AccessKind2Accesses)
8387 intersectAssumedBits(BEST_STATE);
8388 getKnownStateFromValue(
A, getIRPosition(), getState());
8389 AAMemoryLocation::initialize(
A);
8395 bool IgnoreSubsumingPositions =
false) {
8404 bool UseArgMemOnly =
true;
8406 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8410 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8419 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8424 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8428 A.manifestAttrs(IRP,
8438 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8442 A.manifestAttrs(IRP,
8461 else if (isAssumedInaccessibleMemOnly())
8464 else if (isAssumedArgMemOnly())
8467 else if (isAssumedInaccessibleOrArgMemOnly())
8482 if (DeducedAttrs.
size() != 1)
8483 return ChangeStatus::UNCHANGED;
8491 bool checkForAllAccessesToMemoryKind(
8493 MemoryLocationsKind)>
8495 MemoryLocationsKind RequestedMLK)
const override {
8496 if (!isValidState())
8499 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8500 if (AssumedMLK == NO_LOCATIONS)
8504 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8505 CurMLK *= 2, ++
Idx) {
8506 if (CurMLK & RequestedMLK)
8509 if (
const AccessSet *
Accesses = AccessKind2Accesses[
Idx])
8510 for (
const AccessInfo &AI : *
Accesses)
8511 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8523 bool Changed =
false;
8524 MemoryLocationsKind KnownMLK = getKnown();
8525 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
8526 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8527 if (!(CurMLK & KnownMLK))
8528 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr, Changed,
8529 getAccessKindFromInst(
I));
8530 return AAMemoryLocation::indicatePessimisticFixpoint();
8550 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8554 return LHS.Ptr <
RHS.Ptr;
8555 if (
LHS.Kind !=
RHS.Kind)
8556 return LHS.Kind <
RHS.Kind;
8564 std::array<AccessSet *, llvm::CTLog2<VALID_STATE>()> AccessKind2Accesses;
8581 AK =
I->mayReadFromMemory() ? READ :
NONE;
8599 Changed |=
Accesses->insert(AccessInfo{
I,
Ptr, AK}).second;
8600 if (MLK == NO_UNKOWN_MEM)
8602 State.removeAssumedBits(MLK);
8609 unsigned AccessAS = 0);
8615void AAMemoryLocationImpl::categorizePtrValue(
8618 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8620 << getMemoryLocationsAsStr(State.getAssumed()) <<
"]\n");
8622 auto Pred = [&](
Value &Obj) {
8625 MemoryLocationsKind MLK = NO_LOCATIONS;
8635 if (isa<UndefValue>(&Obj))
8637 if (isa<Argument>(&Obj)) {
8644 MLK = NO_ARGUMENT_MEM;
8645 }
else if (
auto *GV = dyn_cast<GlobalValue>(&Obj)) {
8649 if (
auto *GVar = dyn_cast<GlobalVariable>(GV))
8650 if (GVar->isConstant())
8653 if (GV->hasLocalLinkage())
8654 MLK = NO_GLOBAL_INTERNAL_MEM;
8656 MLK = NO_GLOBAL_EXTERNAL_MEM;
8657 }
else if (isa<ConstantPointerNull>(&Obj) &&
8661 }
else if (isa<AllocaInst>(&Obj)) {
8663 }
else if (
const auto *CB = dyn_cast<CallBase>(&Obj)) {
8664 bool IsKnownNoAlias;
8665 if (AA::hasAssumedIRAttr<Attribute::NoAlias>(
8668 MLK = NO_MALLOCED_MEM;
8670 MLK = NO_UNKOWN_MEM;
8672 MLK = NO_UNKOWN_MEM;
8675 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8676 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8677 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8678 updateStateAndAccessesMap(State, MLK, &
I, &Obj, Changed,
8679 getAccessKindFromInst(&
I));
8688 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8689 updateStateAndAccessesMap(State, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8690 getAccessKindFromInst(&
I));
8695 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8696 << getMemoryLocationsAsStr(State.getAssumed()) <<
"\n");
8699void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8702 for (
unsigned ArgNo = 0, E = CB.
arg_size(); ArgNo < E; ++ArgNo) {
8711 const auto *ArgOpMemLocationAA =
8714 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8719 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs, Changed);
8726 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8730 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8732 if (
auto *CB = dyn_cast<CallBase>(&
I)) {
8738 <<
" [" << CBMemLocationAA <<
"]\n");
8739 if (!CBMemLocationAA) {
8740 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8741 Changed, getAccessKindFromInst(&
I));
8742 return NO_UNKOWN_MEM;
8745 if (CBMemLocationAA->isAssumedReadNone())
8746 return NO_LOCATIONS;
8748 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8749 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8750 Changed, getAccessKindFromInst(&
I));
8751 return AccessedLocs.getAssumed();
8754 uint32_t CBAssumedNotAccessedLocs =
8755 CBMemLocationAA->getAssumedNotAccessedLocation();
8758 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8759 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8761 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8762 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8764 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr, Changed,
8765 getAccessKindFromInst(&
I));
8770 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8771 if (HasGlobalAccesses) {
8774 updateStateAndAccessesMap(AccessedLocs, MLK, &
I,
Ptr, Changed,
8775 getAccessKindFromInst(&
I));
8778 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8779 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8780 return AccessedLocs.getWorstState();
8784 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8785 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8788 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8790 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs, Changed);
8793 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8794 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8796 return AccessedLocs.getAssumed();
8801 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8802 <<
I <<
" [" << *
Ptr <<
"]\n");
8803 categorizePtrValue(
A,
I, *
Ptr, AccessedLocs, Changed,
8804 Ptr->getType()->getPointerAddressSpace());
8805 return AccessedLocs.getAssumed();
8808 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8810 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8811 getAccessKindFromInst(&
I));
8812 return AccessedLocs.getAssumed();
8816struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8818 : AAMemoryLocationImpl(IRP,
A) {}
8823 const auto *MemBehaviorAA =
8827 return indicateOptimisticFixpoint();
8829 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8830 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8831 return ChangeStatus::UNCHANGED;
8835 auto AssumedState = getAssumed();
8836 bool Changed =
false;
8839 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I, Changed);
8840 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8841 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8842 removeAssumedBits(inverseLocation(MLK,
false,
false));
8845 return getAssumedNotAccessedLocation() != VALID_STATE;
8848 bool UsedAssumedInformation =
false;
8849 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8850 UsedAssumedInformation))
8851 return indicatePessimisticFixpoint();
8853 Changed |= AssumedState != getAssumed();
8854 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8858 void trackStatistics()
const override {
8861 else if (isAssumedArgMemOnly())
8863 else if (isAssumedInaccessibleMemOnly())
8865 else if (isAssumedInaccessibleOrArgMemOnly())
8871struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8873 : AAMemoryLocationImpl(IRP,
A) {}
8886 return indicatePessimisticFixpoint();
8887 bool Changed =
false;
8890 updateStateAndAccessesMap(getState(), MLK,
I,
Ptr, Changed,
8891 getAccessKindFromInst(
I));
8894 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8895 return indicatePessimisticFixpoint();
8896 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8900 void trackStatistics()
const override {
8914 const std::string getAsStr(
Attributor *
A)
const override {
8915 std::string Str(
"AADenormalFPMath[");
8918 DenormalState Known = getKnown();
8919 if (Known.Mode.isValid())
8920 OS <<
"denormal-fp-math=" << Known.Mode;
8924 if (Known.ModeF32.isValid())
8925 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
8931struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
8933 : AADenormalFPMathImpl(IRP,
A) {}
8945 Known = DenormalState{
Mode, ModeF32};
8956 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
8964 CallerInfo->getState());
8968 bool AllCallSitesKnown =
true;
8969 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
8970 return indicatePessimisticFixpoint();
8972 if (Change == ChangeStatus::CHANGED && isModeFixed())
8978 LLVMContext &Ctx = getAssociatedFunction()->getContext();
8983 AttrToRemove.
push_back(
"denormal-fp-math");
8989 if (Known.ModeF32 != Known.Mode) {
8991 Attribute::get(Ctx,
"denormal-fp-math-f32", Known.ModeF32.str()));
8993 AttrToRemove.
push_back(
"denormal-fp-math-f32");
8996 auto &IRP = getIRPosition();
8999 return A.removeAttrs(IRP, AttrToRemove) |
9000 A.manifestAttrs(IRP, AttrToAdd,
true);
9003 void trackStatistics()
const override {
9019 if (
A.hasSimplificationCallback(getIRPosition())) {
9020 indicatePessimisticFixpoint();
9025 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
9028 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
9032 const std::string getAsStr(
Attributor *
A)
const override {
9036 getKnown().print(
OS);
9038 getAssumed().print(
OS);
9046 if (!getAnchorScope())
9059 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9070 if (!getAnchorScope())
9077 const SCEV *S = getSCEV(
A,
I);
9089 if (!getAnchorScope())
9108 bool isValidCtxInstructionForOutsideAnalysis(
Attributor &
A,
9110 bool AllowAACtxI)
const {
9111 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9122 if (
auto *
I = dyn_cast<Instruction>(&getAssociatedValue())) {
9136 const Instruction *CtxI =
nullptr)
const override {
9137 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9143 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9149 const Instruction *CtxI =
nullptr)
const override {
9154 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9156 return getAssumed();
9160 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9168 Ty, AssumedConstantRange.
getLower())),
9170 Ty, AssumedConstantRange.
getUpper()))};
9180 std::optional<ConstantRange> Known;
9182 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
9184 }
else if (
MDNode *KnownRanges =
I.getMetadata(LLVMContext::MD_range)) {
9190 if (KnownRanges->getNumOperands() > 2)
9194 mdconst::extract<ConstantInt>(KnownRanges->getOperand(0));
9196 mdconst::extract<ConstantInt>(KnownRanges->getOperand(1));
9198 Known.emplace(
Lower->getValue(),
Upper->getValue());
9200 return !Known || (*Known != Assumed && Known->
contains(Assumed));
9207 if (isBetterRange(AssumedConstantRange, *
I)) {
9208 I->setMetadata(LLVMContext::MD_range,
9209 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9210 AssumedConstantRange));
9220 if (isBetterRange(AssumedConstantRange, *
I)) {
9221 A.manifestAttrs(IRP,
9223 AssumedConstantRange),
9236 auto &
V = getAssociatedValue();
9240 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9241 "not the context instruction");
9242 if (isa<LoadInst>(
I))
9243 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9244 Changed = ChangeStatus::CHANGED;
9245 if (isa<CallInst>(
I))
9246 if (setRangeRetAttrIfisBetterRange(
A, getIRPosition(),
I,
9247 AssumedConstantRange))
9248 Changed = ChangeStatus::CHANGED;
9256struct AAValueConstantRangeArgument final
9257 : AAArgumentFromCallSiteArguments<
9258 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9260 using Base = AAArgumentFromCallSiteArguments<
9267 void trackStatistics()
const override {
9272struct AAValueConstantRangeReturned
9273 : AAReturnedFromReturnedValues<AAValueConstantRange,
9274 AAValueConstantRangeImpl,
9275 AAValueConstantRangeImpl::StateType,
9279 AAValueConstantRangeImpl,
9287 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9288 indicatePessimisticFixpoint();
9292 void trackStatistics()
const override {
9297struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9299 : AAValueConstantRangeImpl(IRP,
A) {}
9303 AAValueConstantRangeImpl::initialize(
A);
9307 Value &
V = getAssociatedValue();
9309 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9311 indicateOptimisticFixpoint();
9315 if (isa<UndefValue>(&V)) {
9318 indicateOptimisticFixpoint();
9322 if (isa<CallBase>(&V))
9325 if (isa<BinaryOperator>(&V) || isa<CmpInst>(&V) || isa<CastInst>(&V))
9329 if (
LoadInst *LI = dyn_cast<LoadInst>(&V))
9330 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9337 if (isa<SelectInst>(V) || isa<PHINode>(V))
9341 indicatePessimisticFixpoint();
9344 << getAssociatedValue() <<
"\n");
9347 bool calculateBinaryOperator(
9355 bool UsedAssumedInformation =
false;
9356 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9359 if (!SimplifiedLHS.has_value())
9361 if (!*SimplifiedLHS)
9363 LHS = *SimplifiedLHS;
9365 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9368 if (!SimplifiedRHS.has_value())
9370 if (!*SimplifiedRHS)
9372 RHS = *SimplifiedRHS;
9380 DepClassTy::REQUIRED);
9384 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9388 DepClassTy::REQUIRED);
9392 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9394 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9396 T.unionAssumed(AssumedRange);
9400 return T.isValidState();
9403 bool calculateCastInst(
9412 bool UsedAssumedInformation =
false;
9413 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9416 if (!SimplifiedOpV.has_value())
9418 if (!*SimplifiedOpV)
9420 OpV = *SimplifiedOpV;
9427 DepClassTy::REQUIRED);
9431 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9433 return T.isValidState();
9444 bool UsedAssumedInformation =
false;
9445 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9448 if (!SimplifiedLHS.has_value())
9450 if (!*SimplifiedLHS)
9452 LHS = *SimplifiedLHS;
9454 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9457 if (!SimplifiedRHS.has_value())
9459 if (!*SimplifiedRHS)
9461 RHS = *SimplifiedRHS;
9469 DepClassTy::REQUIRED);
9475 DepClassTy::REQUIRED);
9479 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9480 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9483 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9486 bool MustTrue =
false, MustFalse =
false;
9488 auto AllowedRegion =
9491 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9497 assert((!MustTrue || !MustFalse) &&
9498 "Either MustTrue or MustFalse should be false!");
9507 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9508 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9509 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9513 return T.isValidState();
9522 if (!
I || isa<CallBase>(
I)) {
9525 bool UsedAssumedInformation =
false;
9526 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9529 if (!SimplifiedOpV.has_value())
9531 if (!*SimplifiedOpV)
9533 Value *VPtr = *SimplifiedOpV;
9538 DepClassTy::REQUIRED);
9542 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9546 return T.isValidState();
9550 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I)) {
9551 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9553 }
else if (
auto *CmpI = dyn_cast<CmpInst>(
I)) {
9554 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9556 }
else if (
auto *CastI = dyn_cast<CastInst>(
I)) {
9557 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9563 T.indicatePessimisticFixpoint();
9571 if (QueriedAA !=
this)
9574 if (
T.getAssumed() == getState().getAssumed())
9576 T.indicatePessimisticFixpoint();
9579 return T.isValidState();
9582 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9583 return indicatePessimisticFixpoint();
9588 return ChangeStatus::UNCHANGED;
9589 if (++NumChanges > MaxNumChanges) {
9590 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9591 <<
" but only " << MaxNumChanges
9592 <<
" are allowed to avoid cyclic reasoning.");
9593 return indicatePessimisticFixpoint();
9595 return ChangeStatus::CHANGED;
9599 void trackStatistics()
const override {
9608 static constexpr int MaxNumChanges = 5;
9611struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9613 : AAValueConstantRangeImpl(IRP,
A) {}
9617 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9625struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9627 : AAValueConstantRangeFunction(IRP,
A) {}
9633struct AAValueConstantRangeCallSiteReturned
9634 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9635 AAValueConstantRangeImpl::StateType,
9639 AAValueConstantRangeImpl::StateType,
9645 if (
CallInst *CI = dyn_cast<CallInst>(&getAssociatedValue())) {
9646 if (std::optional<ConstantRange>
Range = CI->getRange())
9647 intersectKnown(*
Range);
9650 AAValueConstantRangeImpl::initialize(
A);
9654 void trackStatistics()
const override {
9658struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9660 : AAValueConstantRangeFloating(IRP,
A) {}
9664 return ChangeStatus::UNCHANGED;
9668 void trackStatistics()
const override {
9685 if (
A.hasSimplificationCallback(getIRPosition()))
9686 indicatePessimisticFixpoint();
9688 AAPotentialConstantValues::initialize(
A);
9692 bool &ContainsUndef,
bool ForSelf) {
9694 bool UsedAssumedInformation =
false;
9696 UsedAssumedInformation)) {
9704 *
this, IRP, DepClassTy::REQUIRED);
9705 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9707 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9708 S = PotentialValuesAA->getState().getAssumedSet();
9715 ContainsUndef =
false;
9716 for (
auto &It : Values) {
9717 if (isa<UndefValue>(It.getValue())) {
9718 ContainsUndef =
true;
9721 auto *CI = dyn_cast<ConstantInt>(It.getValue());
9724 S.insert(CI->getValue());
9726 ContainsUndef &= S.empty();
9732 const std::string getAsStr(
Attributor *
A)
const override {
9741 return indicatePessimisticFixpoint();
9745struct AAPotentialConstantValuesArgument final
9746 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9747 AAPotentialConstantValuesImpl,
9748 PotentialConstantIntValuesState> {
9750 AAPotentialConstantValuesImpl,
9756 void trackStatistics()
const override {
9761struct AAPotentialConstantValuesReturned
9762 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9763 AAPotentialConstantValuesImpl> {
9765 AAPotentialConstantValuesImpl>;
9770 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9771 indicatePessimisticFixpoint();
9772 Base::initialize(
A);
9776 void trackStatistics()
const override {
9781struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9783 : AAPotentialConstantValuesImpl(IRP,
A) {}
9787 AAPotentialConstantValuesImpl::initialize(
A);
9791 Value &
V = getAssociatedValue();
9793 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9794 unionAssumed(
C->getValue());
9795 indicateOptimisticFixpoint();
9799 if (isa<UndefValue>(&V)) {
9800 unionAssumedWithUndef();
9801 indicateOptimisticFixpoint();
9805 if (isa<BinaryOperator>(&V) || isa<ICmpInst>(&V) || isa<CastInst>(&V))
9808 if (isa<SelectInst>(V) || isa<PHINode>(V) || isa<LoadInst>(V))
9811 indicatePessimisticFixpoint();
9814 << getAssociatedValue() <<
"\n");
9828 case Instruction::Trunc:
9829 return Src.trunc(ResultBitWidth);
9830 case Instruction::SExt:
9831 return Src.sext(ResultBitWidth);
9832 case Instruction::ZExt:
9833 return Src.zext(ResultBitWidth);
9834 case Instruction::BitCast:
9841 bool &SkipOperation,
bool &Unsupported) {
9848 switch (BinOpcode) {
9852 case Instruction::Add:
9854 case Instruction::Sub:
9856 case Instruction::Mul:
9858 case Instruction::UDiv:
9860 SkipOperation =
true;
9864 case Instruction::SDiv:
9866 SkipOperation =
true;
9870 case Instruction::URem:
9872 SkipOperation =
true;
9876 case Instruction::SRem:
9878 SkipOperation =
true;
9882 case Instruction::Shl:
9884 case Instruction::LShr:
9886 case Instruction::AShr:
9888 case Instruction::And:
9890 case Instruction::Or:
9892 case Instruction::Xor:
9897 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9899 bool SkipOperation =
false;
9902 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9907 unionAssumed(Result);
9908 return isValidState();
9912 auto AssumedBefore = getAssumed();
9916 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9917 SetTy LHSAAPVS, RHSAAPVS;
9919 LHSContainsUndef,
false) ||
9921 RHSContainsUndef,
false))
9922 return indicatePessimisticFixpoint();
9925 bool MaybeTrue =
false, MaybeFalse =
false;
9927 if (LHSContainsUndef && RHSContainsUndef) {
9930 unionAssumedWithUndef();
9931 }
else if (LHSContainsUndef) {
9932 for (
const APInt &R : RHSAAPVS) {
9933 bool CmpResult = calculateICmpInst(ICI, Zero, R);
9934 MaybeTrue |= CmpResult;
9935 MaybeFalse |= !CmpResult;
9936 if (MaybeTrue & MaybeFalse)
9937 return indicatePessimisticFixpoint();
9939 }
else if (RHSContainsUndef) {
9940 for (
const APInt &L : LHSAAPVS) {
9941 bool CmpResult = calculateICmpInst(ICI, L, Zero);
9942 MaybeTrue |= CmpResult;
9943 MaybeFalse |= !CmpResult;
9944 if (MaybeTrue & MaybeFalse)
9945 return indicatePessimisticFixpoint();
9948 for (
const APInt &L : LHSAAPVS) {
9949 for (
const APInt &R : RHSAAPVS) {
9950 bool CmpResult = calculateICmpInst(ICI, L, R);
9951 MaybeTrue |= CmpResult;
9952 MaybeFalse |= !CmpResult;
9953 if (MaybeTrue & MaybeFalse)
9954 return indicatePessimisticFixpoint();
9959 unionAssumed(
APInt( 1, 1));
9961 unionAssumed(
APInt( 1, 0));
9962 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9963 : ChangeStatus::CHANGED;
9967 auto AssumedBefore = getAssumed();
9971 bool UsedAssumedInformation =
false;
9972 std::optional<Constant *>
C =
A.getAssumedConstant(
9973 *
SI->getCondition(), *
this, UsedAssumedInformation);
9976 bool OnlyLeft =
false, OnlyRight =
false;
9977 if (
C && *
C && (*C)->isOneValue())
9979 else if (
C && *
C && (*C)->isZeroValue())
9982 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9983 SetTy LHSAAPVS, RHSAAPVS;
9986 LHSContainsUndef,
false))
9987 return indicatePessimisticFixpoint();
9991 RHSContainsUndef,
false))
9992 return indicatePessimisticFixpoint();
9994 if (OnlyLeft || OnlyRight) {
9996 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
9997 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
10000 unionAssumedWithUndef();
10002 for (
const auto &It : *OpAA)
10006 }
else if (LHSContainsUndef && RHSContainsUndef) {
10008 unionAssumedWithUndef();
10010 for (
const auto &It : LHSAAPVS)
10012 for (
const auto &It : RHSAAPVS)
10015 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10016 : ChangeStatus::CHANGED;
10020 auto AssumedBefore = getAssumed();
10022 return indicatePessimisticFixpoint();
10027 bool SrcContainsUndef =
false;
10030 SrcContainsUndef,
false))
10031 return indicatePessimisticFixpoint();
10033 if (SrcContainsUndef)
10034 unionAssumedWithUndef();
10036 for (
const APInt &S : SrcPVS) {
10037 APInt T = calculateCastInst(CI, S, ResultBitWidth);
10041 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10042 : ChangeStatus::CHANGED;
10046 auto AssumedBefore = getAssumed();
10050 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10051 SetTy LHSAAPVS, RHSAAPVS;
10053 LHSContainsUndef,
false) ||
10055 RHSContainsUndef,
false))
10056 return indicatePessimisticFixpoint();
10061 if (LHSContainsUndef && RHSContainsUndef) {
10062 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
10063 return indicatePessimisticFixpoint();
10064 }
else if (LHSContainsUndef) {
10065 for (
const APInt &R : RHSAAPVS) {
10066 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
10067 return indicatePessimisticFixpoint();
10069 }
else if (RHSContainsUndef) {
10070 for (
const APInt &L : LHSAAPVS) {
10071 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
10072 return indicatePessimisticFixpoint();
10075 for (
const APInt &L : LHSAAPVS) {
10076 for (
const APInt &R : RHSAAPVS) {
10077 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
10078 return indicatePessimisticFixpoint();
10082 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10083 : ChangeStatus::CHANGED;
10087 auto AssumedBefore = getAssumed();
10089 bool ContainsUndef;
10091 ContainsUndef,
true))
10092 return indicatePessimisticFixpoint();
10093 if (ContainsUndef) {
10094 unionAssumedWithUndef();
10099 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10100 : ChangeStatus::CHANGED;
10105 Value &
V = getAssociatedValue();
10108 if (
auto *ICI = dyn_cast<ICmpInst>(
I))
10109 return updateWithICmpInst(
A, ICI);
10111 if (
auto *SI = dyn_cast<SelectInst>(
I))
10112 return updateWithSelectInst(
A, SI);
10114 if (
auto *CI = dyn_cast<CastInst>(
I))
10115 return updateWithCastInst(
A, CI);
10117 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I))
10118 return updateWithBinaryOperator(
A, BinOp);
10120 if (isa<PHINode>(
I) || isa<LoadInst>(
I))
10121 return updateWithInstruction(
A,
I);
10123 return indicatePessimisticFixpoint();
10127 void trackStatistics()
const override {
10132struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10134 : AAPotentialConstantValuesImpl(IRP,
A) {}
10139 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10144 void trackStatistics()
const override {
10149struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10151 : AAPotentialConstantValuesFunction(IRP,
A) {}
10154 void trackStatistics()
const override {
10159struct AAPotentialConstantValuesCallSiteReturned
10160 : AACalleeToCallSite<AAPotentialConstantValues,
10161 AAPotentialConstantValuesImpl> {
10162 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10165 AAPotentialConstantValuesImpl>(IRP,
A) {}
10168 void trackStatistics()
const override {
10173struct AAPotentialConstantValuesCallSiteArgument
10174 : AAPotentialConstantValuesFloating {
10175 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10177 : AAPotentialConstantValuesFloating(IRP,
A) {}
10181 AAPotentialConstantValuesImpl::initialize(
A);
10182 if (isAtFixpoint())
10185 Value &
V = getAssociatedValue();
10187 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
10188 unionAssumed(
C->getValue());
10189 indicateOptimisticFixpoint();
10193 if (isa<UndefValue>(&V)) {
10194 unionAssumedWithUndef();
10195 indicateOptimisticFixpoint();
10202 Value &
V = getAssociatedValue();
10203 auto AssumedBefore = getAssumed();
10207 return indicatePessimisticFixpoint();
10208 const auto &S = AA->getAssumed();
10210 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10211 : ChangeStatus::CHANGED;
10215 void trackStatistics()
const override {
10224 bool IgnoreSubsumingPositions) {
10225 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10226 "Unexpected attribute kind");
10227 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10228 Attribute::NoUndef))
10248 Value &V = getAssociatedValue();
10249 if (isa<UndefValue>(V))
10250 indicatePessimisticFixpoint();
10251 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10257 const Value *UseV =
U->get();
10266 bool TrackUse =
false;
10269 if (isa<CastInst>(*
I) || isa<GetElementPtrInst>(*
I))
10275 const std::string getAsStr(
Attributor *
A)
const override {
10276 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10283 bool UsedAssumedInformation =
false;
10284 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10285 UsedAssumedInformation))
10286 return ChangeStatus::UNCHANGED;
10290 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10293 return ChangeStatus::UNCHANGED;
10294 return AANoUndef::manifest(
A);
10298struct AANoUndefFloating :
public AANoUndefImpl {
10300 : AANoUndefImpl(IRP,
A) {}
10304 AANoUndefImpl::initialize(
A);
10305 if (!getState().isAtFixpoint() && getAnchorScope() &&
10306 !getAnchorScope()->isDeclaration())
10308 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10313 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10314 bool IsKnownNoUndef;
10315 return AA::hasAssumedIRAttr<Attribute::NoUndef>(
10316 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10320 bool UsedAssumedInformation =
false;
10321 Value *AssociatedValue = &getAssociatedValue();
10323 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10328 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10336 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10337 return indicatePessimisticFixpoint();
10338 return ChangeStatus::UNCHANGED;
10341 for (
const auto &VAC : Values)
10343 return indicatePessimisticFixpoint();
10345 return ChangeStatus::UNCHANGED;
10352struct AANoUndefReturned final
10353 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10355 : AAReturnedFromReturnedValues<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10361struct AANoUndefArgument final
10362 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10364 : AAArgumentFromCallSiteArguments<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10370struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10372 : AANoUndefFloating(IRP,
A) {}
10378struct AANoUndefCallSiteReturned final
10379 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10381 : AACalleeToCallSite<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10396 if (isa<UndefValue>(V)) {
10397 indicateOptimisticFixpoint();
10402 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10403 for (
const auto &Attr : Attrs) {
10414 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10421 auto *CB = dyn_cast<CallBase>(
I);
10430 if (
auto *NoFPAA =
A.getAAFor<
AANoFPClass>(*
this, IRP, DepClassTy::NONE))
10431 State.addKnownBits(NoFPAA->getState().getKnown());
10435 const std::string getAsStr(
Attributor *
A)
const override {
10436 std::string
Result =
"nofpclass";
10438 OS << getKnownNoFPClass() <<
'/' << getAssumedNoFPClass();
10448struct AANoFPClassFloating :
public AANoFPClassImpl {
10450 : AANoFPClassImpl(IRP,
A) {}
10455 bool UsedAssumedInformation =
false;
10456 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10458 Values.
push_back({getAssociatedValue(), getCtxI()});
10464 DepClassTy::REQUIRED);
10465 if (!AA ||
this == AA) {
10466 T.indicatePessimisticFixpoint();
10472 return T.isValidState();
10475 for (
const auto &VAC : Values)
10476 if (!VisitValueCB(*
VAC.getValue(),
VAC.getCtxI()))
10477 return indicatePessimisticFixpoint();
10483 void trackStatistics()
const override {
10488struct AANoFPClassReturned final
10489 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10490 AANoFPClassImpl::StateType, false,
10491 Attribute::None, false> {
10493 : AAReturnedFromReturnedValues<
AANoFPClass, AANoFPClassImpl,
10494 AANoFPClassImpl::StateType,
false,
10498 void trackStatistics()
const override {
10503struct AANoFPClassArgument final
10504 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10506 : AAArgumentFromCallSiteArguments<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10512struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10514 : AANoFPClassFloating(IRP,
A) {}
10517 void trackStatistics()
const override {
10522struct AANoFPClassCallSiteReturned final
10523 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10525 : AACalleeToCallSite<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10528 void trackStatistics()
const override {
10537 return CalledFunctions;
10540 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10542 bool hasNonAsmUnknownCallee()
const override {
10543 return HasUnknownCalleeNonAsm;
10546 const std::string getAsStr(
Attributor *
A)
const override {
10547 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10548 std::to_string(CalledFunctions.size()) +
"]";
10551 void trackStatistics()
const override {}
10555 if (CalledFunctions.insert(Fn)) {
10556 Change = ChangeStatus::CHANGED;
10562 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10563 if (!HasUnknownCallee)
10564 Change = ChangeStatus::CHANGED;
10565 if (NonAsm && !HasUnknownCalleeNonAsm)
10566 Change = ChangeStatus::CHANGED;
10567 HasUnknownCalleeNonAsm |= NonAsm;
10568 HasUnknownCallee =
true;
10576 bool HasUnknownCallee =
false;
10579 bool HasUnknownCalleeNonAsm =
false;
10582struct AACallEdgesCallSite :
public AACallEdgesImpl {
10584 : AACallEdgesImpl(IRP,
A) {}
10590 if (
Function *Fn = dyn_cast<Function>(&V)) {
10591 addCalledFunction(Fn, Change);
10593 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10594 setHasUnknownCallee(
true, Change);
10604 if (isa<Constant>(V)) {
10605 VisitValue(*V, CtxI);
10609 bool UsedAssumedInformation =
false;
10615 for (
auto &VAC : Values)
10616 VisitValue(*
VAC.getValue(),
VAC.getCtxI());
10619 CallBase *CB = cast<CallBase>(getCtxI());
10622 if (
IA->hasSideEffects() &&
10625 setHasUnknownCallee(
false, Change);
10632 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10633 if (IndirectCallAA->foreachCallee(
10634 [&](
Function *Fn) { return VisitValue(*Fn, CB); }))
10643 for (
const Use *U : CallbackUses)
10644 ProcessCalledOperand(
U->get(), CB);
10650struct AACallEdgesFunction :
public AACallEdgesImpl {
10652 : AACallEdgesImpl(IRP,
A) {}
10659 CallBase &CB = cast<CallBase>(Inst);
10665 if (CBEdges->hasNonAsmUnknownCallee())
10666 setHasUnknownCallee(
true, Change);
10667 if (CBEdges->hasUnknownCallee())
10668 setHasUnknownCallee(
false, Change);
10670 for (
Function *
F : CBEdges->getOptimisticEdges())
10671 addCalledFunction(
F, Change);
10677 bool UsedAssumedInformation =
false;
10678 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10679 UsedAssumedInformation,
10683 setHasUnknownCallee(
true, Change);
10692struct AAInterFnReachabilityFunction
10693 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10694 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10698 bool instructionCanReach(
10701 assert(
From.getFunction() == getAnchorScope() &&
"Queried the wrong AA!");
10702 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10704 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
10705 typename RQITy::Reachable
Result;
10706 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10707 return NonConstThis->isReachableImpl(
A, StackRQI,
10709 return Result == RQITy::Reachable::Yes;
10713 bool IsTemporaryRQI)
override {
10716 if (EntryI != RQI.From &&
10717 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10718 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10721 auto CheckReachableCallBase = [&](
CallBase *CB) {
10724 if (!CBEdges || !CBEdges->getState().isValidState())
10727 if (CBEdges->hasUnknownCallee())
10730 for (
Function *Fn : CBEdges->getOptimisticEdges()) {
10741 if (Fn == getAnchorScope()) {
10742 if (EntryI == RQI.From)
10749 DepClassTy::OPTIONAL);
10752 if (!InterFnReachability ||
10762 DepClassTy::OPTIONAL);
10768 if (CheckReachableCallBase(cast<CallBase>(&CBInst)))
10771 A, *RQI.From, CBInst, RQI.ExclusionSet);
10774 bool UsedExclusionSet =
true;
10775 bool UsedAssumedInformation =
false;
10776 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10777 UsedAssumedInformation,
10779 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10782 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10786 void trackStatistics()
const override {}
10790template <
typename AAType>
10791static std::optional<Constant *>
10802 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
10804 if (!COpt.has_value()) {
10806 return std::nullopt;
10808 if (
auto *
C = *COpt) {
10819 std::optional<Value *> V;
10820 for (
auto &It : Values) {
10822 if (V.has_value() && !*V)
10825 if (!V.has_value())
10839 if (
A.hasSimplificationCallback(getIRPosition())) {
10840 indicatePessimisticFixpoint();
10843 Value *Stripped = getAssociatedValue().stripPointerCasts();
10844 if (isa<Constant>(Stripped) && !isa<ConstantExpr>(Stripped)) {
10845 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10847 indicateOptimisticFixpoint();
10850 AAPotentialValues::initialize(
A);
10854 const std::string getAsStr(
Attributor *
A)
const override {
10861 template <
typename AAType>
10862 static std::optional<Value *> askOtherAA(
Attributor &
A,
10867 std::optional<Constant *>
C = askForAssumedConstant<AAType>(
A, AA, IRP, Ty);
10869 return std::nullopt;
10881 if (
auto *CB = dyn_cast_or_null<CallBase>(CtxI)) {
10882 for (
const auto &U : CB->
args()) {
10892 Type &Ty = *getAssociatedType();
10893 std::optional<Value *> SimpleV =
10894 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10895 if (SimpleV.has_value() && !*SimpleV) {
10897 *
this, ValIRP, DepClassTy::OPTIONAL);
10898 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10899 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
10900 State.
unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
10901 if (PotentialConstantsAA->undefIsContained())
10906 if (!SimpleV.has_value())
10913 if (isa<ConstantInt>(VPtr))
10918 State.unionAssumed({{*VPtr, CtxI}, S});
10928 return II.I ==
I &&
II.S == S;
10931 return std::tie(
I, S) < std::tie(
II.I,
II.S);
10941 bool UsedAssumedInformation =
false;
10943 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
10944 UsedAssumedInformation))
10947 for (
auto &It : Values)
10948 ValueScopeMap[It] += CS;
10950 for (
auto &It : ValueScopeMap)
10951 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
10958 auto NewS = StateType::getBestState(getState());
10959 for (
const auto &It : getAssumedSet()) {
10962 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
10965 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10973 getState() = StateType::getBestState(getState());
10974 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
10981 return indicatePessimisticFixpoint();
10989 if (!getAssumedSimplifiedValues(
A, Values, S))
10991 Value &OldV = getAssociatedValue();
10992 if (isa<UndefValue>(OldV))
10994 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
10995 if (!NewV || NewV == &OldV)
11000 if (
A.changeAfterManifest(getIRPosition(), *NewV))
11006 bool getAssumedSimplifiedValues(
11008 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
11009 if (!isValidState())
11011 bool UsedAssumedInformation =
false;
11012 for (
const auto &It : getAssumedSet())
11013 if (It.second & S) {
11014 if (RecurseForSelectAndPHI && (isa<PHINode>(It.first.getValue()) ||
11015 isa<SelectInst>(It.first.getValue()))) {
11016 if (
A.getAssumedSimplifiedValues(
11018 this, Values, S, UsedAssumedInformation))
11023 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11028struct AAPotentialValuesFloating : AAPotentialValuesImpl {
11030 : AAPotentialValuesImpl(IRP,
A) {}
11034 auto AssumedBefore = getAssumed();
11036 genericValueTraversal(
A, &getAssociatedValue());
11038 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11039 : ChangeStatus::CHANGED;
11043 struct LivenessInfo {
11044 const AAIsDead *LivenessAA =
nullptr;
11045 bool AnyDead =
false;
11058 bool UsedAssumedInformation =
false;
11060 auto GetSimplifiedValues = [&](
Value &
V,
11062 if (!
A.getAssumedSimplifiedValues(
11068 return Values.
empty();
11070 if (GetSimplifiedValues(*
LHS, LHSValues))
11072 if (GetSimplifiedValues(*
RHS, RHSValues))
11084 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11092 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11093 if (isa<UndefValue>(LHSV) || isa<UndefValue>(RHSV)) {
11095 nullptr,
II.S, getAnchorScope());
11101 if (&LHSV == &RHSV &&
11105 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11112 if (TypedLHS && TypedRHS) {
11114 if (NewV && NewV != &Cmp) {
11115 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11125 bool LHSIsNull = isa<ConstantPointerNull>(LHSV);
11126 bool RHSIsNull = isa<ConstantPointerNull>(RHSV);
11127 if (!LHSIsNull && !RHSIsNull)
11133 assert((LHSIsNull || RHSIsNull) &&
11134 "Expected nullptr versus non-nullptr comparison at this point");
11137 unsigned PtrIdx = LHSIsNull;
11138 bool IsKnownNonNull;
11139 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
11141 DepClassTy::REQUIRED, IsKnownNonNull);
11142 if (!IsAssumedNonNull)
11148 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11153 for (
auto &LHSValue : LHSValues)
11154 for (
auto &RHSValue : RHSValues)
11155 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11163 bool UsedAssumedInformation =
false;
11165 std::optional<Constant *>
C =
11166 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11167 bool NoValueYet = !
C.has_value();
11168 if (NoValueYet || isa_and_nonnull<UndefValue>(*
C))
11170 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*
C)) {
11175 }
else if (&SI == &getAssociatedValue()) {
11180 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11182 if (!SimpleV.has_value())
11185 addValue(
A, getState(), **SimpleV, CtxI,
II.S, getAnchorScope());
11197 bool UsedAssumedInformation =
false;
11199 PotentialValueOrigins, *
this,
11200 UsedAssumedInformation,
11202 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11203 "loaded values for load instruction "
11214 if (!
I || isa<AssumeInst>(
I))
11216 if (
auto *SI = dyn_cast<StoreInst>(
I))
11217 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11219 UsedAssumedInformation,
11221 return A.isAssumedDead(*
I,
this,
nullptr,
11222 UsedAssumedInformation,
11225 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11226 "and we cannot delete all the stores: "
11237 bool AllLocal = ScopeIsLocal;
11242 if (!DynamicallyUnique) {
11243 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11244 "values are dynamically unique: "
11249 for (
auto *PotentialCopy : PotentialCopies) {
11251 Worklist.
push_back({{*PotentialCopy, CtxI},
II.S});
11256 if (!AllLocal && ScopeIsLocal)
11261 bool handlePHINode(
11265 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11266 LivenessInfo &LI = LivenessAAs[&
F];
11267 if (!LI.LivenessAA)
11273 if (&
PHI == &getAssociatedValue()) {
11274 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11277 *
PHI.getFunction());
11281 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11283 if (LI.LivenessAA &&
11284 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11294 if (CyclePHI && isa<Instruction>(V) &&
11295 (!
C ||
C->contains(cast<Instruction>(V)->getParent())))
11303 bool UsedAssumedInformation =
false;
11304 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11306 if (!SimpleV.has_value())
11310 addValue(
A, getState(), **SimpleV, &
PHI,
II.S, getAnchorScope());
11319 bool SomeSimplified =
false;
11320 bool UsedAssumedInformation =
false;
11325 const auto &SimplifiedOp =
A.getAssumedSimplified(
11330 if (!SimplifiedOp.has_value())
11334 NewOps[
Idx] = *SimplifiedOp;
11338 SomeSimplified |= (NewOps[
Idx] !=
Op);
11344 if (!SomeSimplified)
11351 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11357 if (!NewV || NewV == &
I)
11360 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11370 if (
auto *CI = dyn_cast<CmpInst>(&
I))
11372 CI->getPredicate(),
II, Worklist);
11374 switch (
I.getOpcode()) {
11375 case Instruction::Select:
11376 return handleSelectInst(
A, cast<SelectInst>(
I),
II, Worklist);
11377 case Instruction::PHI:
11378 return handlePHINode(
A, cast<PHINode>(
I),
II, Worklist, LivenessAAs);
11379 case Instruction::Load:
11380 return handleLoadInst(
A, cast<LoadInst>(
I),
II, Worklist);
11382 return handleGenericInst(
A,
I,
II, Worklist);
11409 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11410 << Iteration <<
"!\n");
11411 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11417 Value *NewV =
nullptr;
11418 if (
V->getType()->isPointerTy()) {
11421 if (
auto *CB = dyn_cast<CallBase>(V))
11431 if (NewV && NewV != V) {
11432 Worklist.
push_back({{*NewV, CtxI}, S});
11436 if (
auto *
I = dyn_cast<Instruction>(V)) {
11441 if (V != InitialV || isa<Argument>(V))
11446 if (V == InitialV && CtxI == getCtxI()) {
11447 indicatePessimisticFixpoint();
11451 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11452 }
while (!Worklist.
empty());
11456 for (
auto &It : LivenessAAs)
11457 if (It.second.AnyDead)
11458 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11462 void trackStatistics()
const override {
11467struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11468 using Base = AAPotentialValuesImpl;
11474 auto &Arg = cast<Argument>(getAssociatedValue());
11476 indicatePessimisticFixpoint();
11481 auto AssumedBefore = getAssumed();
11483 unsigned ArgNo = getCalleeArgNo();
11485 bool UsedAssumedInformation =
false;
11489 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11492 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11494 UsedAssumedInformation))
11497 return isValidState();
11500 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11502 UsedAssumedInformation))
11503 return indicatePessimisticFixpoint();
11505 Function *Fn = getAssociatedFunction();
11506 bool AnyNonLocal =
false;
11507 for (
auto &It : Values) {
11508 if (isa<Constant>(It.getValue())) {
11509 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11514 return indicatePessimisticFixpoint();
11516 if (
auto *Arg = dyn_cast<Argument>(It.getValue()))
11518 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11524 AnyNonLocal =
true;
11526 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11528 giveUpOnIntraprocedural(
A);
11530 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11531 : ChangeStatus::CHANGED;
11535 void trackStatistics()
const override {
11540struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11541 using Base = AAPotentialValuesFloating;
11548 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11549 indicatePessimisticFixpoint();
11556 ReturnedArg = &Arg;
11559 if (!
A.isFunctionIPOAmendable(*
F) ||
11560 A.hasSimplificationCallback(getIRPosition())) {
11562 indicatePessimisticFixpoint();
11564 indicateOptimisticFixpoint();
11570 auto AssumedBefore = getAssumed();
11571 bool UsedAssumedInformation =
false;
11574 Function *AnchorScope = getAnchorScope();
11580 UsedAssumedInformation,
11586 bool AllInterAreIntra =
false;
11594 addValue(
A, getState(), *
VAC.getValue(),
11595 VAC.getCtxI() ?
VAC.getCtxI() : CtxI,
11598 if (AllInterAreIntra)
11605 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11608 bool AddValues =
true;
11609 if (isa<PHINode>(RetI.getOperand(0)) ||
11610 isa<SelectInst>(RetI.getOperand(0))) {
11611 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11615 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11618 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11619 UsedAssumedInformation,
11621 return indicatePessimisticFixpoint();
11624 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11625 : ChangeStatus::CHANGED;
11630 return ChangeStatus::UNCHANGED;
11632 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11634 return ChangeStatus::UNCHANGED;
11635 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11637 return ChangeStatus::UNCHANGED;
11640 if (
auto *Arg = dyn_cast<Argument>(NewVal)) {
11642 "Number of function with unique return");
11643 Changed |=
A.manifestAttrs(
11650 Value *RetOp = RetI.getOperand(0);
11651 if (isa<UndefValue>(RetOp) || RetOp == NewVal)
11654 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11655 Changed = ChangeStatus::CHANGED;
11658 bool UsedAssumedInformation =
false;
11659 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11660 UsedAssumedInformation,
11670 void trackStatistics()
const override{
11677struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11679 : AAPotentialValuesImpl(IRP,
A) {}
11688 void trackStatistics()
const override {
11693struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11695 : AAPotentialValuesFunction(IRP,
A) {}
11698 void trackStatistics()
const override {
11703struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11705 : AAPotentialValuesImpl(IRP,
A) {}
11709 auto AssumedBefore = getAssumed();
11713 return indicatePessimisticFixpoint();
11715 bool UsedAssumedInformation =
false;
11716 auto *CB = cast<CallBase>(getCtxI());
11719 UsedAssumedInformation))
11720 return indicatePessimisticFixpoint();
11727 Values, S, UsedAssumedInformation))
11730 for (
auto &It : Values) {
11731 Value *
V = It.getValue();
11732 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11733 V, *CB, *
this, UsedAssumedInformation);
11734 if (!CallerV.has_value()) {
11738 V = *CallerV ? *CallerV :
V;
11744 giveUpOnIntraprocedural(
A);
11747 addValue(
A, getState(), *V, CB, S, getAnchorScope());
11752 return indicatePessimisticFixpoint();
11754 return indicatePessimisticFixpoint();
11755 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11756 : ChangeStatus::CHANGED;
11760 return AAPotentialValues::indicatePessimisticFixpoint();
11764 void trackStatistics()
const override {
11769struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11771 : AAPotentialValuesFloating(IRP,
A) {}
11774 void trackStatistics()
const override {
11790 if (getKnown().isUniversal())
11791 return ChangeStatus::UNCHANGED;
11795 getAssumed().getSet().
end());
11797 return A.manifestAttrs(IRP,
11800 llvm::join(Set,
",")),
11805 return isValidState() && setContains(Assumption);
11809 const std::string getAsStr(
Attributor *
A)
const override {
11810 const SetContents &Known = getKnown();
11811 const SetContents &Assumed = getAssumed();
11815 const std::string KnownStr = llvm::join(Set,
",");
11817 std::string AssumedStr =
"Universal";
11818 if (!Assumed.isUniversal()) {
11819 Set.assign(Assumed.getSet().begin(), Assumed.getSet().end());
11820 AssumedStr = llvm::join(Set,
",");
11822 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11837struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11839 : AAAssumptionInfoImpl(IRP,
A,
11844 bool Changed =
false;
11849 DepClassTy::REQUIRED);
11853 Changed |= getIntersection(AssumptionAA->getAssumed());
11854 return !getAssumed().empty() || !getKnown().empty();
11857 bool UsedAssumedInformation =
false;
11862 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11863 UsedAssumedInformation))
11864 return indicatePessimisticFixpoint();
11866 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11869 void trackStatistics()
const override {}
11873struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11876 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11887 auto *AssumptionAA =
11890 return indicatePessimisticFixpoint();
11891 bool Changed = getIntersection(AssumptionAA->getAssumed());
11892 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11896 void trackStatistics()
const override {}
11908 return Assumptions;
11923struct AAUnderlyingObjectsImpl
11929 const std::string getAsStr(
Attributor *
A)
const override {
11930 if (!isValidState())
11931 return "<invalid>";
11934 OS <<
"underlying objects: inter " << InterAssumedUnderlyingObjects.size()
11935 <<
" objects, intra " << IntraAssumedUnderlyingObjects.size()
11937 if (!InterAssumedUnderlyingObjects.empty()) {
11938 OS <<
"inter objects:\n";
11939 for (
auto *Obj : InterAssumedUnderlyingObjects)
11940 OS << *Obj <<
'\n';
11942 if (!IntraAssumedUnderlyingObjects.empty()) {
11943 OS <<
"intra objects:\n";
11944 for (
auto *Obj : IntraAssumedUnderlyingObjects)
11945 OS << *Obj <<
'\n';
11951 void trackStatistics()
const override {}
11955 auto &
Ptr = getAssociatedValue();
11957 bool UsedAssumedInformation =
false;
11964 Scope, UsedAssumedInformation))
11967 bool Changed =
false;
11969 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
11970 auto &
VAC = Values[
I];
11971 auto *Obj =
VAC.getValue();
11973 if (!SeenObjects.
insert(UO ? UO : Obj).second)
11975 if (UO && UO != Obj) {
11976 if (isa<AllocaInst>(UO) || isa<GlobalValue>(UO)) {
11977 Changed |= UnderlyingObjects.
insert(UO);
11983 auto Pred = [&](
Value &
V) {
11985 Changed |= UnderlyingObjects.
insert(UO);
11991 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
11993 "The forall call should not return false at this position");
11998 if (isa<SelectInst>(Obj)) {
11999 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope,
12000 UsedAssumedInformation);
12003 if (
auto *
PHI = dyn_cast<PHINode>(Obj)) {
12006 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
12008 handleIndirect(
A, *
PHI->getIncomingValue(u), UnderlyingObjects,
12009 Scope, UsedAssumedInformation);
12014 Changed |= UnderlyingObjects.
insert(Obj);
12020 bool Changed =
false;
12023 if (!UsedAssumedInformation)
12024 indicateOptimisticFixpoint();
12025 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12028 bool forallUnderlyingObjects(
12031 if (!isValidState())
12032 return Pred(getAssociatedValue());
12035 ? IntraAssumedUnderlyingObjects
12036 : InterAssumedUnderlyingObjects;
12037 for (
Value *Obj : AssumedUnderlyingObjects)
12050 bool Changed =
false;
12053 auto Pred = [&](
Value &
V) {
12054 Changed |= UnderlyingObjects.
insert(&V);
12057 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
12059 "The forall call should not return false at this position");
12070struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
12072 : AAUnderlyingObjectsImpl(IRP,
A) {}
12075struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12077 : AAUnderlyingObjectsImpl(IRP,
A) {}
12080struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12082 : AAUnderlyingObjectsImpl(IRP,
A) {}
12085struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12087 : AAUnderlyingObjectsImpl(IRP,
A) {}
12090struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12092 : AAUnderlyingObjectsImpl(IRP,
A) {}
12095struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12097 : AAUnderlyingObjectsImpl(IRP,
A) {}
12100struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12102 : AAUnderlyingObjectsImpl(IRP,
A) {}
12117 Instruction *UInst = dyn_cast<Instruction>(
U.getUser());
12123 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12124 << *UInst <<
"\n");
12126 if (
auto *Cmp = dyn_cast<ICmpInst>(
U.getUser())) {
12127 int Idx = &
Cmp->getOperandUse(0) == &
U;
12128 if (isa<Constant>(
Cmp->getOperand(
Idx)))
12130 return U == &getAnchorValue();
12134 if (isa<ReturnInst>(UInst)) {
12136 Worklist.
push_back(ACS.getInstruction());
12139 bool UsedAssumedInformation =
false;
12141 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12143 UsedAssumedInformation))
12150 auto *CB = dyn_cast<CallBase>(UInst);
12161 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12170 unsigned NumUsesBefore =
Uses.size();
12176 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12184 return checkUse(
A, U, Follow, Worklist);
12186 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12187 Uses.insert(&OldU);
12191 while (!Worklist.
empty()) {
12193 if (!Visited.
insert(V).second)
12195 if (!
A.checkForAllUses(UsePred, *
this, *V,
12197 DepClassTy::OPTIONAL,
12198 true, EquivalentUseCB)) {
12199 return indicatePessimisticFixpoint();
12203 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12204 : ChangeStatus::CHANGED;
12207 bool isPotentialUse(
const Use &U)
const override {
12208 return !isValidState() ||
Uses.contains(&U);
12213 return ChangeStatus::UNCHANGED;
12217 const std::string getAsStr(
Attributor *
A)
const override {
12218 return "[" + std::to_string(
Uses.size()) +
" uses]";
12221 void trackStatistics()
const override {
12239 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12240 if (!MD && !
A.isClosedWorldModule())
12244 for (
const auto &
Op : MD->operands())
12245 if (
Function *Callee = mdconst::dyn_extract_or_null<Function>(
Op))
12246 PotentialCallees.insert(Callee);
12247 }
else if (
A.isClosedWorldModule()) {
12249 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12250 PotentialCallees.insert_range(IndirectlyCallableFunctions);
12253 if (PotentialCallees.empty())
12254 indicateOptimisticFixpoint();
12258 CallBase *CB = cast<CallBase>(getCtxI());
12263 bool AllCalleesKnownNow = AllCalleesKnown;
12265 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12266 bool &UsedAssumedInformation) {
12269 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12271 UsedAssumedInformation = !GIAA->isAtFixpoint();
12275 auto AddPotentialCallees = [&]() {
12276 for (
auto *PotentialCallee : PotentialCallees) {
12277 bool UsedAssumedInformation =
false;
12278 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12279 AssumedCalleesNow.
insert(PotentialCallee);
12285 bool UsedAssumedInformation =
false;
12288 AA::ValueScope::AnyScope,
12289 UsedAssumedInformation)) {
12290 if (PotentialCallees.empty())
12291 return indicatePessimisticFixpoint();
12292 AddPotentialCallees();
12297 auto CheckPotentialCallee = [&](
Function &Fn) {
12298 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12301 auto &CachedResult = FilterResults[&Fn];
12302 if (CachedResult.has_value())
12303 return CachedResult.value();
12305 bool UsedAssumedInformation =
false;
12306 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12307 if (!UsedAssumedInformation)
12308 CachedResult =
false;
12317 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12318 bool IsKnown =
false;
12319 if (AA::hasAssumedIRAttr<Attribute::NoUndef>(
12321 DepClassTy::OPTIONAL, IsKnown)) {
12323 CachedResult =
false;
12328 CachedResult =
true;
12334 for (
auto &VAC : Values) {
12335 if (isa<UndefValue>(
VAC.getValue()))
12337 if (isa<ConstantPointerNull>(
VAC.getValue()) &&
12338 VAC.getValue()->getType()->getPointerAddressSpace() == 0)
12341 if (
auto *VACFn = dyn_cast<Function>(
VAC.getValue())) {
12342 if (CheckPotentialCallee(*VACFn))
12343 AssumedCalleesNow.
insert(VACFn);
12346 if (!PotentialCallees.empty()) {
12347 AddPotentialCallees();
12350 AllCalleesKnownNow =
false;
12353 if (AssumedCalleesNow == AssumedCallees &&
12354 AllCalleesKnown == AllCalleesKnownNow)
12355 return ChangeStatus::UNCHANGED;
12357 std::swap(AssumedCallees, AssumedCalleesNow);
12358 AllCalleesKnown = AllCalleesKnownNow;
12359 return ChangeStatus::CHANGED;
12365 if (!AllCalleesKnown && AssumedCallees.empty())
12366 return ChangeStatus::UNCHANGED;
12368 CallBase *CB = cast<CallBase>(getCtxI());
12369 bool UsedAssumedInformation =
false;
12370 if (
A.isAssumedDead(*CB,
this,
nullptr,
12371 UsedAssumedInformation))
12372 return ChangeStatus::UNCHANGED;
12376 if (
FP->getType()->getPointerAddressSpace())
12387 if (AssumedCallees.empty()) {
12388 assert(AllCalleesKnown &&
12389 "Expected all callees to be known if there are none.");
12390 A.changeToUnreachableAfterManifest(CB);
12391 return ChangeStatus::CHANGED;
12395 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12396 auto *NewCallee = AssumedCallees.front();
12399 NumIndirectCallsPromoted++;
12400 return ChangeStatus::CHANGED;
12407 A.deleteAfterManifest(*CB);
12408 return ChangeStatus::CHANGED;
12418 bool SpecializedForAnyCallees =
false;
12419 bool SpecializedForAllCallees = AllCalleesKnown;
12423 for (
Function *NewCallee : AssumedCallees) {
12424 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee,
12425 AssumedCallees.size())) {
12426 SkippedAssumedCallees.
push_back(NewCallee);
12427 SpecializedForAllCallees =
false;
12430 SpecializedForAnyCallees =
true;
12436 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12437 A.registerManifestAddedBasicBlock(*IP->getParent());
12438 auto *SplitTI = cast<BranchInst>(LastCmp->
getNextNode());
12443 A.registerManifestAddedBasicBlock(*ElseBB);
12445 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12453 auto *CBClone = cast<CallBase>(CB->
clone());
12455 NewCall = &cast<CallInst>(
promoteCall(*CBClone, NewCallee, &RetBC));
12456 NumIndirectCallsPromoted++;
12464 auto AttachCalleeMetadata = [&](
CallBase &IndirectCB) {
12465 if (!AllCalleesKnown)
12466 return ChangeStatus::UNCHANGED;
12467 MDBuilder MDB(IndirectCB.getContext());
12468 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12469 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12470 return ChangeStatus::CHANGED;
12473 if (!SpecializedForAnyCallees)
12474 return AttachCalleeMetadata(*CB);
12477 if (SpecializedForAllCallees) {
12481 IP->eraseFromParent();
12483 auto *CBClone = cast<CallInst>(CB->
clone());
12484 CBClone->setName(CB->
getName());
12485 CBClone->insertBefore(*IP->getParent(), IP);
12486 NewCalls.
push_back({CBClone,
nullptr});
12487 AttachCalleeMetadata(*CBClone);
12494 CB->
getParent()->getFirstInsertionPt());
12495 for (
auto &It : NewCalls) {
12497 Instruction *CallRet = It.second ? It.second : It.first;
12509 A.deleteAfterManifest(*CB);
12510 Changed = ChangeStatus::CHANGED;
12516 const std::string getAsStr(
Attributor *
A)
const override {
12517 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12518 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12522 void trackStatistics()
const override {
12523 if (AllCalleesKnown) {
12525 Eliminated, CallSites,
12526 "Number of indirect call sites eliminated via specialization")
12529 "Number of indirect call sites specialized")
12534 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12551 bool AllCalleesKnown =
true;
12558struct AAInvariantLoadPointerImpl
12560 AAInvariantLoadPointer> {
12564 IS_NOALIAS = 1 << 0,
12567 IS_NOEFFECT = 1 << 1,
12569 IS_LOCALLY_INVARIANT = 1 << 2,
12571 IS_LOCALLY_CONSTRAINED = 1 << 3,
12573 IS_BEST_STATE = IS_NOALIAS | IS_NOEFFECT | IS_LOCALLY_INVARIANT |
12574 IS_LOCALLY_CONSTRAINED,
12576 static_assert(getBestState() == IS_BEST_STATE,
"Unexpected best state");
12586 bool isKnownInvariant()
const final {
12587 return isKnownLocallyInvariant() && isKnown(IS_LOCALLY_CONSTRAINED);
12590 bool isKnownLocallyInvariant()
const final {
12591 if (isKnown(IS_LOCALLY_INVARIANT))
12593 return isKnown(IS_NOALIAS | IS_NOEFFECT);
12596 bool isAssumedInvariant()
const final {
12597 return isAssumedLocallyInvariant() && isAssumed(IS_LOCALLY_CONSTRAINED);
12600 bool isAssumedLocallyInvariant()
const final {
12601 if (isAssumed(IS_LOCALLY_INVARIANT))
12603 return isAssumed(IS_NOALIAS | IS_NOEFFECT);
12609 Changed |= updateNoAlias(
A);
12610 if (requiresNoAlias() && !isAssumed(IS_NOALIAS))
12611 return indicatePessimisticFixpoint();
12613 Changed |= updateNoEffect(
A);
12615 Changed |= updateLocalInvariance(
A);
12621 if (!isKnownInvariant())
12622 return ChangeStatus::UNCHANGED;
12625 const Value *
Ptr = &getAssociatedValue();
12626 const auto TagInvariantLoads = [&](
const Use &
U,
bool &) {
12627 if (
U.get() !=
Ptr)
12629 auto *
I = dyn_cast<Instruction>(
U.getUser());
12635 if (!
A.isRunOn(
I->getFunction()))
12638 if (
I->hasMetadata(LLVMContext::MD_invariant_load))
12641 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
12642 LI->setMetadata(LLVMContext::MD_invariant_load,
12644 Changed = ChangeStatus::CHANGED;
12649 (void)
A.checkForAllUses(TagInvariantLoads, *
this, *
Ptr);
12654 const std::string getAsStr(
Attributor *)
const override {
12655 if (isKnownInvariant())
12656 return "load-invariant pointer";
12657 return "non-invariant pointer";
12661 void trackStatistics()
const override {}
12665 bool requiresNoAlias()
const {
12666 switch (getPositionKind()) {
12672 case IRP_CALL_SITE:
12674 case IRP_CALL_SITE_RETURNED: {
12675 const auto &CB = cast<CallBase>(getAnchorValue());
12679 case IRP_ARGUMENT: {
12680 const Function *
F = getAssociatedFunction();
12681 assert(
F &&
"no associated function for argument");
12687 bool isExternal()
const {
12688 const Function *
F = getAssociatedFunction();
12692 getPositionKind() != IRP_CALL_SITE_RETURNED;
12696 if (isKnown(IS_NOALIAS) || !isAssumed(IS_NOALIAS))
12697 return ChangeStatus::UNCHANGED;
12700 if (
const auto *ANoAlias =
A.getOrCreateAAFor<
AANoAlias>(
12701 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12702 if (ANoAlias->isKnownNoAlias()) {
12703 addKnownBits(IS_NOALIAS);
12704 return ChangeStatus::CHANGED;
12707 if (!ANoAlias->isAssumedNoAlias()) {
12708 removeAssumedBits(IS_NOALIAS);
12709 return ChangeStatus::CHANGED;
12712 return ChangeStatus::UNCHANGED;
12717 if (
const Argument *Arg = getAssociatedArgument()) {
12719 addKnownBits(IS_NOALIAS);
12720 return ChangeStatus::UNCHANGED;
12725 removeAssumedBits(IS_NOALIAS);
12726 return ChangeStatus::CHANGED;
12729 return ChangeStatus::UNCHANGED;
12733 if (isKnown(IS_NOEFFECT) || !isAssumed(IS_NOEFFECT))
12734 return ChangeStatus::UNCHANGED;
12736 if (!getAssociatedFunction())
12737 return indicatePessimisticFixpoint();
12739 if (isa<AllocaInst>(&getAssociatedValue()))
12740 return indicatePessimisticFixpoint();
12742 const auto HasNoEffectLoads = [&](
const Use &
U,
bool &) {
12743 const auto *LI = dyn_cast<LoadInst>(
U.getUser());
12744 return !LI || !LI->mayHaveSideEffects();
12746 if (!
A.checkForAllUses(HasNoEffectLoads, *
this, getAssociatedValue()))
12747 return indicatePessimisticFixpoint();
12750 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12753 if (!AMemoryBehavior->isAssumedReadOnly())
12754 return indicatePessimisticFixpoint();
12756 if (AMemoryBehavior->isKnownReadOnly()) {
12757 addKnownBits(IS_NOEFFECT);
12758 return ChangeStatus::UNCHANGED;
12761 return ChangeStatus::UNCHANGED;
12764 if (
const Argument *Arg = getAssociatedArgument()) {
12766 addKnownBits(IS_NOEFFECT);
12767 return ChangeStatus::UNCHANGED;
12772 return indicatePessimisticFixpoint();
12775 return ChangeStatus::UNCHANGED;
12779 if (isKnown(IS_LOCALLY_INVARIANT) || !isAssumed(IS_LOCALLY_INVARIANT))
12780 return ChangeStatus::UNCHANGED;
12784 getIRPosition(),
this, DepClassTy::REQUIRED);
12786 return ChangeStatus::UNCHANGED;
12788 bool UsedAssumedInformation =
false;
12789 const auto IsLocallyInvariantLoadIfPointer = [&](
const Value &
V) {
12790 if (!
V.getType()->isPointerTy())
12792 const auto *IsInvariantLoadPointer =
12794 DepClassTy::REQUIRED);
12796 if (!IsInvariantLoadPointer)
12799 if (IsInvariantLoadPointer->isKnownLocallyInvariant())
12801 if (!IsInvariantLoadPointer->isAssumedLocallyInvariant())
12804 UsedAssumedInformation =
true;
12807 if (!AUO->forallUnderlyingObjects(IsLocallyInvariantLoadIfPointer))
12808 return indicatePessimisticFixpoint();
12810 if (
const auto *CB = dyn_cast<CallBase>(&getAnchorValue())) {
12814 if (!IsLocallyInvariantLoadIfPointer(*Arg))
12815 return indicatePessimisticFixpoint();
12820 if (!UsedAssumedInformation) {
12822 addKnownBits(IS_LOCALLY_INVARIANT);
12823 return ChangeStatus::CHANGED;
12826 return ChangeStatus::UNCHANGED;
12830struct AAInvariantLoadPointerFloating final : AAInvariantLoadPointerImpl {
12832 : AAInvariantLoadPointerImpl(IRP,
A) {}
12835struct AAInvariantLoadPointerReturned final : AAInvariantLoadPointerImpl {
12837 : AAInvariantLoadPointerImpl(IRP,
A) {}
12840 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12844struct AAInvariantLoadPointerCallSiteReturned final
12845 : AAInvariantLoadPointerImpl {
12847 : AAInvariantLoadPointerImpl(IRP,
A) {}
12850 const Function *
F = getAssociatedFunction();
12851 assert(
F &&
"no associated function for return from call");
12853 if (!
F->isDeclaration() && !
F->isIntrinsic())
12854 return AAInvariantLoadPointerImpl::initialize(
A);
12856 const auto &CB = cast<CallBase>(getAnchorValue());
12859 return AAInvariantLoadPointerImpl::initialize(
A);
12861 if (
F->onlyReadsMemory() &&
F->hasNoSync())
12862 return AAInvariantLoadPointerImpl::initialize(
A);
12866 indicatePessimisticFixpoint();
12870struct AAInvariantLoadPointerArgument final : AAInvariantLoadPointerImpl {
12872 : AAInvariantLoadPointerImpl(IRP,
A) {}
12875 const Function *
F = getAssociatedFunction();
12876 assert(
F &&
"no associated function for argument");
12879 addKnownBits(IS_LOCALLY_CONSTRAINED);
12883 if (!
F->hasLocalLinkage())
12884 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12888struct AAInvariantLoadPointerCallSiteArgument final
12889 : AAInvariantLoadPointerImpl {
12891 : AAInvariantLoadPointerImpl(IRP,
A) {}
12898template <
typename InstType>
12899static bool makeChange(
Attributor &
A, InstType *MemInst,
const Use &U,
12901 bool UseOriginalValue) {
12902 if (
U.getOperandNo() != InstType::getPointerOperandIndex())
12905 if (MemInst->isVolatile()) {
12907 *MemInst->getFunction());
12908 unsigned NewAS = NewPtrTy->getPointerAddressSpace();
12913 if (UseOriginalValue) {
12914 A.changeUseAfterManifest(
const_cast<Use &
>(U), *OriginalValue);
12920 A.changeUseAfterManifest(
const_cast<Use &
>(U), *
CastInst);
12929 assert(isValidState() &&
"the AA is invalid");
12930 return AssumedAddressSpace;
12935 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
12936 "Associated value is not a pointer");
12938 if (!
A.getInfoCache().getFlatAddressSpace().has_value()) {
12939 indicatePessimisticFixpoint();
12943 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
12944 unsigned AS = getAssociatedType()->getPointerAddressSpace();
12945 if (AS != FlatAS) {
12946 [[maybe_unused]]
bool R = takeAddressSpace(AS);
12947 assert(R &&
"The take should happen");
12948 indicateOptimisticFixpoint();
12953 uint32_t OldAddressSpace = AssumedAddressSpace;
12954 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
12956 auto CheckAddressSpace = [&](
Value &Obj) {
12958 if (isa<UndefValue>(&Obj))
12962 unsigned ObjAS = Obj.getType()->getPointerAddressSpace();
12963 if (ObjAS != FlatAS)
12964 return takeAddressSpace(ObjAS);
12970 if (
auto *Arg = dyn_cast<Argument>(&Obj))
12972 else if (
auto *
I = dyn_cast<Instruction>(&Obj))
12973 F =
I->getFunction();
12980 if (AssumedAS != ~0U)
12981 return takeAddressSpace(AssumedAS);
12985 return takeAddressSpace(FlatAS);
12989 DepClassTy::REQUIRED);
12990 if (!AUO->forallUnderlyingObjects(CheckAddressSpace))
12991 return indicatePessimisticFixpoint();
12993 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
12994 : ChangeStatus::CHANGED;
13001 if (NewAS == InvalidAddressSpace ||
13003 return ChangeStatus::UNCHANGED;
13005 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13007 Value *AssociatedValue = &getAssociatedValue();
13008 Value *OriginalValue = peelAddrspacecast(AssociatedValue, FlatAS);
13011 PointerType::get(getAssociatedType()->getContext(), NewAS);
13012 bool UseOriginalValue =
13015 bool Changed =
false;
13017 auto Pred = [&](
const Use &
U,
bool &) {
13018 if (
U.get() != AssociatedValue)
13020 auto *Inst = dyn_cast<Instruction>(
U.getUser());
13027 if (
auto *LI = dyn_cast<LoadInst>(Inst)) {
13029 makeChange(
A, LI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13030 }
else if (
auto *SI = dyn_cast<StoreInst>(Inst)) {
13032 makeChange(
A, SI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13033 }
else if (
auto *RMW = dyn_cast<AtomicRMWInst>(Inst)) {
13035 makeChange(
A, RMW, U, OriginalValue, NewPtrTy, UseOriginalValue);
13036 }
else if (
auto *CmpX = dyn_cast<AtomicCmpXchgInst>(Inst)) {
13038 makeChange(
A, CmpX, U, OriginalValue, NewPtrTy, UseOriginalValue);
13045 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
13048 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13052 const std::string getAsStr(
Attributor *
A)
const override {
13053 if (!isValidState())
13054 return "addrspace(<invalid>)";
13055 return "addrspace(" +
13056 (AssumedAddressSpace == InvalidAddressSpace
13058 : std::to_string(AssumedAddressSpace)) +
13063 uint32_t AssumedAddressSpace = InvalidAddressSpace;
13065 bool takeAddressSpace(
uint32_t AS) {
13066 if (AssumedAddressSpace == InvalidAddressSpace) {
13067 AssumedAddressSpace = AS;
13070 return AssumedAddressSpace == AS;
13073 static Value *peelAddrspacecast(
Value *V,
unsigned FlatAS) {
13074 if (
auto *
I = dyn_cast<AddrSpaceCastInst>(V)) {
13075 assert(
I->getSrcAddressSpace() != FlatAS &&
13076 "there should not be flat AS -> non-flat AS");
13077 return I->getPointerOperand();
13079 if (
auto *
C = dyn_cast<ConstantExpr>(V))
13080 if (
C->getOpcode() == Instruction::AddrSpaceCast) {
13081 assert(
C->getOperand(0)->getType()->getPointerAddressSpace() !=
13083 "there should not be flat AS -> non-flat AS X");
13084 return C->getOperand(0);
13090struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
13092 : AAAddressSpaceImpl(IRP,
A) {}
13094 void trackStatistics()
const override {
13099struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
13101 : AAAddressSpaceImpl(IRP,
A) {}
13107 (void)indicatePessimisticFixpoint();
13110 void trackStatistics()
const override {
13115struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
13117 : AAAddressSpaceImpl(IRP,
A) {}
13119 void trackStatistics()
const override {
13124struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
13126 : AAAddressSpaceImpl(IRP,
A) {}
13131struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
13133 : AAAddressSpaceImpl(IRP,
A) {}
13139 (void)indicatePessimisticFixpoint();
13142 void trackStatistics()
const override {
13162 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
13163 "Associated value is not a pointer");
13167 std::optional<unsigned> FlatAS =
A.getInfoCache().getFlatAddressSpace();
13168 if (!FlatAS.has_value()) {
13169 indicatePessimisticFixpoint();
13175 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13176 if (AS != *FlatAS) {
13178 indicateOptimisticFixpoint();
13183 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13184 uint32_t OldAssumed = getAssumed();
13186 auto CheckAddressSpace = [&](
Value &Obj) {
13187 if (isa<PoisonValue>(&Obj))
13190 unsigned AS = Obj.getType()->getPointerAddressSpace();
13194 removeAS(Obj.getType()->getPointerAddressSpace());
13199 getIRPosition(),
this, DepClassTy::REQUIRED);
13201 return indicatePessimisticFixpoint();
13203 return OldAssumed == getAssumed() ? ChangeStatus::UNCHANGED
13204 : ChangeStatus::CHANGED;
13209 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13211 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13212 if (AS != FlatAS ||
Map.empty())
13213 return ChangeStatus::UNCHANGED;
13215 LLVMContext &Ctx = getAssociatedValue().getContext();
13216 MDNode *NoAliasASNode =
nullptr;
13219 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13222 unsigned Upper =
I.stop();
13223 unsigned Lower =
I.start();
13224 if (!NoAliasASNode) {
13232 Value *AssociatedValue = &getAssociatedValue();
13233 bool Changed =
false;
13235 auto AddNoAliasAttr = [&](
const Use &
U,
bool &) {
13236 if (
U.get() != AssociatedValue)
13238 Instruction *Inst = dyn_cast<Instruction>(
U.getUser());
13239 if (!Inst || Inst->
hasMetadata(LLVMContext::MD_noalias_addrspace))
13241 if (!isa<LoadInst>(Inst) && !isa<StoreInst>(Inst) &&
13242 !isa<AtomicCmpXchgInst>(Inst) && !isa<AtomicRMWInst>(Inst))
13246 Inst->
setMetadata(LLVMContext::MD_noalias_addrspace, NoAliasASNode);
13250 (void)
A.checkForAllUses(AddNoAliasAttr, *
this, *AssociatedValue,
13252 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13256 const std::string getAsStr(
Attributor *
A)
const override {
13257 if (!isValidState())
13258 return "<invalid>";
13261 OS <<
"CanNotBeAddrSpace(";
13262 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13263 unsigned Upper =
I.stop();
13264 unsigned Lower =
I.start();
13272 void removeAS(
unsigned AS) {
13273 RangeMap::iterator
I =
Map.find(AS);
13275 if (
I !=
Map.end()) {
13276 unsigned Upper =
I.stop();
13277 unsigned Lower =
I.start();
13281 if (AS != ~((
unsigned)0) && AS + 1 <=
Upper)
13283 if (AS != 0 &&
Lower <= AS - 1)
13290 Map.insert(0,
A.getInfoCache().getMaxAddrSpace(),
true);
13294struct AANoAliasAddrSpaceFloating final : AANoAliasAddrSpaceImpl {
13296 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13298 void trackStatistics()
const override {
13303struct AANoAliasAddrSpaceReturned final : AANoAliasAddrSpaceImpl {
13305 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13307 void trackStatistics()
const override {
13312struct AANoAliasAddrSpaceCallSiteReturned final : AANoAliasAddrSpaceImpl {
13314 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13316 void trackStatistics()
const override {
13321struct AANoAliasAddrSpaceArgument final : AANoAliasAddrSpaceImpl {
13323 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13325 void trackStatistics()
const override {
13330struct AANoAliasAddrSpaceCallSiteArgument final : AANoAliasAddrSpaceImpl {
13332 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13334 void trackStatistics()
const override {
13345 std::optional<TypeSize> getAllocatedSize()
const override {
13346 assert(isValidState() &&
"the AA is invalid");
13347 return AssumedAllocatedSize;
13350 std::optional<TypeSize> findInitialAllocationSize(
Instruction *
I,
13354 switch (
I->getOpcode()) {
13355 case Instruction::Alloca: {
13360 return std::nullopt;
13370 if (!isa<AllocaInst>(
I))
13371 return indicatePessimisticFixpoint();
13373 bool IsKnownNoCapture;
13374 if (!AA::hasAssumedIRAttr<Attribute::Captures>(
13375 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
13376 return indicatePessimisticFixpoint();
13379 A.getOrCreateAAFor<
AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
13382 return indicatePessimisticFixpoint();
13385 return indicatePessimisticFixpoint();
13388 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
13391 if (!AllocationSize)
13392 return indicatePessimisticFixpoint();
13396 if (*AllocationSize == 0)
13397 return indicatePessimisticFixpoint();
13403 return indicatePessimisticFixpoint();
13405 if (BinSize == 0) {
13406 auto NewAllocationSize = std::make_optional<TypeSize>(0,
false);
13407 if (!changeAllocationSize(NewAllocationSize))
13408 return ChangeStatus::UNCHANGED;
13409 return ChangeStatus::CHANGED;
13413 const auto &It = PI->
begin();
13416 if (It->first.Offset != 0)
13417 return indicatePessimisticFixpoint();
13419 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
13421 if (SizeOfBin >= *AllocationSize)
13422 return indicatePessimisticFixpoint();
13424 auto NewAllocationSize = std::make_optional<TypeSize>(SizeOfBin * 8,
false);
13426 if (!changeAllocationSize(NewAllocationSize))
13427 return ChangeStatus::UNCHANGED;
13429 return ChangeStatus::CHANGED;
13435 assert(isValidState() &&
13436 "Manifest should only be called if the state is valid.");
13440 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
13442 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
13444 switch (
I->getOpcode()) {
13446 case Instruction::Alloca: {
13452 auto *NumBytesToValue =
13453 ConstantInt::get(
I->getContext(),
APInt(32, NumBytesToAllocate));
13456 insertPt = std::next(insertPt);
13462 return ChangeStatus::CHANGED;
13470 return ChangeStatus::UNCHANGED;
13474 const std::string getAsStr(
Attributor *
A)
const override {
13475 if (!isValidState())
13476 return "allocationinfo(<invalid>)";
13477 return "allocationinfo(" +
13478 (AssumedAllocatedSize == HasNoAllocationSize
13480 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
13485 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
13489 bool changeAllocationSize(std::optional<TypeSize>
Size) {
13490 if (AssumedAllocatedSize == HasNoAllocationSize ||
13491 AssumedAllocatedSize !=
Size) {
13492 AssumedAllocatedSize =
Size;
13499struct AAAllocationInfoFloating : AAAllocationInfoImpl {
13501 : AAAllocationInfoImpl(IRP,
A) {}
13503 void trackStatistics()
const override {
13508struct AAAllocationInfoReturned : AAAllocationInfoImpl {
13510 : AAAllocationInfoImpl(IRP,
A) {}
13516 (void)indicatePessimisticFixpoint();
13519 void trackStatistics()
const override {
13524struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
13526 : AAAllocationInfoImpl(IRP,
A) {}
13528 void trackStatistics()
const override {
13533struct AAAllocationInfoArgument : AAAllocationInfoImpl {
13535 : AAAllocationInfoImpl(IRP,
A) {}
13537 void trackStatistics()
const override {
13542struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
13544 : AAAllocationInfoImpl(IRP,
A) {}
13549 (void)indicatePessimisticFixpoint();
13552 void trackStatistics()
const override {
13601#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
13602 case IRPosition::PK: \
13603 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
13605#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
13606 case IRPosition::PK: \
13607 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
13611#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13612 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13613 CLASS *AA = nullptr; \
13614 switch (IRP.getPositionKind()) { \
13615 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13616 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13617 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13618 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13619 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13620 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13621 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13622 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13627#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13628 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13629 CLASS *AA = nullptr; \
13630 switch (IRP.getPositionKind()) { \
13631 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13632 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
13633 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13634 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13635 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13636 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13637 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13638 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13643#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
13644 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13645 CLASS *AA = nullptr; \
13646 switch (IRP.getPositionKind()) { \
13647 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
13649 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
13655#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13656 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13657 CLASS *AA = nullptr; \
13658 switch (IRP.getPositionKind()) { \
13659 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13660 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13661 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13662 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13663 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13664 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13665 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13666 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13671#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13672 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13673 CLASS *AA = nullptr; \
13674 switch (IRP.getPositionKind()) { \
13675 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13676 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13677 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13678 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13679 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13680 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13681 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13682 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13687#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13688 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13689 CLASS *AA = nullptr; \
13690 switch (IRP.getPositionKind()) { \
13691 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13692 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13693 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13694 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13695 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13696 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13697 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13698 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13750#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13751#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13752#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13753#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13754#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13755#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13756#undef SWITCH_PK_CREATE
13757#undef SWITCH_PK_INV
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefAnalysis InstSet & ToRemove
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static const Value * getPointerOperand(const Instruction *I, bool AllowVolatile)
Get pointer operand of memory accessing instruction.
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
BlockVerifier::State From
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static bool isReachableImpl(SmallVectorImpl< BasicBlock * > &Worklist, const StopSetT &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT, const LoopInfo *LI)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
DXIL Forward Handle Accesses
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
Given that RA is a live propagate it s liveness to any other values it uses(according to Uses). void DeadArgumentEliminationPass
Performs the initial survey of the specified function
Given that RA is a live value
This file defines DenseMapInfo traits for DenseMap.
This file implements a map that provides insertion order iteration.
static unsigned getAddressSpace(const Value *V, unsigned MaxLookup)
ConstantRange Range(APInt(BitWidth, Low), APInt(BitWidth, High))
uint64_t IntrinsicInst * II
static StringRef getName(Value *V)
static cl::opt< RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode > Mode("regalloc-enable-advisor", cl::Hidden, cl::init(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Default), cl::desc("Enable regalloc advisor mode"), cl::values(clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Default, "default", "Default"), clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Release, "release", "precompiled"), clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Development, "development", "for training")))
Remove Loads Into Fake Uses
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
std::pair< BasicBlock *, BasicBlock * > Edge
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, ArrayRef< StringLiteral > StandardNames)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
LLVM_ABI AACallGraphNode * operator*() const
A manager for alias analyses.
A private abstract base class describing the concept of an individual alias analysis implementation.
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static LLVM_ABI void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
This class represents a conversion between pointers from one address space to another.
an instruction to allocate memory on the stack
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
LLVM_ABI std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
LLVM_ABI bool hasNoAliasAttr() const
Return true if this argument has the noalias attribute.
LLVM_ABI bool onlyReadsMemory() const
Return true if this argument has the readonly or readnone attribute.
LLVM_ABI bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
LLVM_ABI bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
LLVM_ABI bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
static LLVM_ABI Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
static LLVM_ABI Attribute getWithDereferenceableBytes(LLVMContext &Context, uint64_t Bytes)
LLVM_ABI FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
LLVM_ABI Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
LLVM_ABI MemoryEffects getMemoryEffects() const
Returns memory effects.
static LLVM_ABI Attribute getWithDereferenceableOrNullBytes(LLVMContext &Context, uint64_t Bytes)
static LLVM_ABI Attribute getWithNoFPClass(LLVMContext &Context, FPClassTest Mask)
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static LLVM_ABI Attribute getWithCaptureInfo(LLVMContext &Context, CaptureInfo CI)
static bool isEnumAttrKind(AttrKind Kind)
static LLVM_ABI Attribute getWithMemoryEffects(LLVMContext &Context, MemoryEffects ME)
LLVM_ABI CaptureInfo getCaptureInfo() const
Returns information from captures attribute.
static LLVM_ABI Attribute getWithAlignment(LLVMContext &Context, Align Alignment)
Return a uniquified Attribute object that has the specific alignment set.
LLVM Basic Block Representation.
LLVM_ABI const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Instruction & front() const
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Function * getParent() const
Return the enclosing method, or null if none.
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
Conditional or Unconditional Branch instruction.
unsigned getNumSuccessors() const
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
BasicBlock * getSuccessor(unsigned i) const
Value * getCondition() const
Allocate memory in an ever growing pool, as if by bump-pointer.
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isMustTailCall() const
Tests if this call site must be tail call optimized.
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
LLVM_ABI std::optional< ConstantRange > getRange() const
If this return value has a range attribute, return the value range of the argument.
Value * getArgOperand(unsigned i) const
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
LLVM_ABI Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
LLVM_ABI Function * getCaller()
Helper to get the caller (the parent function).
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
static CaptureInfo none()
Create CaptureInfo that does not capture any components of the pointer.
This is the base class for all instructions that perform data casts.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
LLVM_ABI bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
This class is the base class for the comparison instructions.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
A constant value that is initialized with an expression using other constant values.
static LLVM_ABI Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
This is the shared class of boolean and integer constants.
static LLVM_ABI ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
LLVM_ABI bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
LLVM_ABI bool isEmptySet() const
Return true if this set contains no members.
LLVM_ABI APInt getSignedMin() const
Return the smallest signed value contained in the ConstantRange.
bool isSingleElement() const
Return true if this set contains exactly one member.
static LLVM_ABI ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
LLVM_ABI bool contains(const APInt &Val) const
Return true if the specified value is in the set.
LLVM_ABI APInt getSignedMax() const
Return the largest signed value contained in the ConstantRange.
This is an important base class in LLVM.
Analysis pass which computes a CycleInfo.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Implements a dense probed hash-table based set.
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
LLVM_ABI bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
An instruction for ordering other memory operations.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
A possibly irreducible generalization of a Loop.
LLVM_ABI bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
This instruction compares its operands according to the predicate given to the constructor.
static LLVM_ABI bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
LLVM_ABI bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
LLVM_ABI bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
LLVM_ABI void insertBefore(InstListType::iterator InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified position.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
LLVM_ABI bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
LLVM_ABI bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
LLVM_ABI void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
LLVM_ABI const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
Analysis to compute lazy value information.
This pass computes, caches, and vends lazy value constraint information.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
An instruction for reading from memory.
Analysis pass that exposes the LoopInfo for a function.
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
static LLVM_ABI MDNode * getMostGenericRange(MDNode *A, MDNode *B)
This class implements a map that also provides access to all stored values in a deterministic order.
static MemoryEffectsBase readOnly()
Create MemoryEffectsBase that can read any memory.
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access argument memory.
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible memory.
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
Create MemoryEffectsBase that can write any memory.
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible or argument memory.
static MemoryEffectsBase none()
Create MemoryEffectsBase that cannot read or write any memory.
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
Create MemoryEffectsBase that can read and write any memory.
static LLVM_ABI std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
A Module instance is used to store all the information related to an LLVM module.
Evaluate the size and offset of an object pointed to by a Value*.
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Return a value (possibly void), from a function.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
This class represents an analyzed expression in the program.
Analysis pass that exposes the ScalarEvolution for a function.
The main scalar evolution driver.
LLVM_ABI const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
LLVM_ABI const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
LLVM_ABI unsigned getSmallConstantMaxTripCount(const Loop *L, SmallVectorImpl< const SCEVPredicate * > *Predicates=nullptr)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
This class represents the LLVM 'select' instruction.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
A SetVector that performs no allocations if smaller than a certain size.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Analysis pass providing the TargetTransformInfo.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
static LLVM_ABI IntegerType * getInt8Ty(LLVMContext &C)
bool isPointerTy() const
True if this is an instance of PointerType.
static LLVM_ABI IntegerType * getInt32Ty(LLVMContext &C)
static LLVM_ABI IntegerType * getInt1Ty(LLVMContext &C)
LLVM_ABI unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isVoidTy() const
Return true if this is 'void'.
LLVM_ABI unsigned getIntegerBitWidth() const
'undef' values are things that do not have specified contents.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
const Use & getOperandUse(unsigned i) const
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
LLVM_ABI bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
LLVM_ABI void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVM_ABI const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr, bool LookThroughIntToPtr=false) const
Accumulate the constant offset this value has compared to a base pointer.
LLVM_ABI LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< use_iterator > uses()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
An efficient, type-erasing, non-owning reference to a callable.
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
Enumerate the SCCs of a directed graph in reverse topological order of the SCC DAG.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
LLVM_ABI bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
LLVM_ABI bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
LLVM_ABI std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
LLVM_ABI bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
LLVM_ABI bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
LLVM_ABI bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
LLVM_ABI bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
LLVM_ABI bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
LLVM_ABI bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
LLVM_ABI bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
LLVM_ABI bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
LLVM_ABI bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
LLVM_ABI Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
@ C
The default llvm calling convention, compatible with C.
@ Unsupported
This operation is completely unsupported on the target.
@ Undef
Value of the register doesn't matter.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
LLVM_ABI const_iterator begin(StringRef path LLVM_LIFETIME_BOUND, Style style=Style::native)
Get begin iterator over path.
LLVM_ABI const_iterator end(StringRef path LLVM_LIFETIME_BOUND)
Get end iterator over path.
This is an optimization pass for GlobalISel generic memory operations.
bool operator<(int64_t V1, const APSInt &V2)
LLVM_ATTRIBUTE_ALWAYS_INLINE DynamicAPInt gcd(const DynamicAPInt &A, const DynamicAPInt &B)
LLVM_ABI KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, const SimplifyQuery &SQ, unsigned Depth=0)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
LLVM_ABI bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
LLVM_ABI Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
auto pred_end(const MachineBasicBlock *BB)
unsigned getPointerAddressSpace(const Type *T)
auto successors(const MachineBasicBlock *BB)
LLVM_ABI bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
LLVM_ABI raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
LLVM_ABI Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
LLVM_ABI Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
LLVM_ABI bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
LLVM_ABI bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr, bool UseVariableInfo=true, bool IgnoreUBImplyingAttrs=true)
Return true if the instruction does not have any effects besides calculating the result and does not ...
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
LLVM_ABI ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
auto map_range(ContainerTy &&C, FuncTy F)
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
LLVM_ABI Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
LLVM_ABI bool isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(const CallBase *Call, bool MustPreserveNullness)
{launder,strip}.invariant.group returns pointer that aliases its argument, and it only captures point...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
void sort(IteratorTy Start, IteratorTy End)
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
LLVM_ABI bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
LLVM_ABI bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
LLVM_ABI CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
LLVM_ABI bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
LLVM_ABI RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
LLVM_ABI bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
InterleavedRange< Range > interleaved_array(const Range &R, StringRef Separator=", ")
Output range R as an array of interleaved elements.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
PotentialValuesState< APInt > PotentialConstantIntValuesState
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr, const MetadataPredicate *IdentityMD=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
DWARFExpression::Operation Op
LLVM_ABI bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
LLVM_ABI Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
constexpr unsigned BitWidth
auto pred_begin(const MachineBasicBlock *BB)
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
LLVM_ABI std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
LLVM_ABI DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
LLVM_ABI UseCaptureInfo DetermineUseCaptureKind(const Use &U, const Value *Base)
Determine what kind of capture behaviour U may exhibit.
LLVM_ABI Value * simplifyCmpInst(CmpPredicate Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
LLVM_ABI bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
LLVM_ABI const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=MaxLookupSearchDepth)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
bool capturesNothing(CaptureComponents CC)
LLVM_ABI bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
constexpr bool isCallableCC(CallingConv::ID CC)
Implement std::hash so that hash_code can be used in STL containers.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
bool forallInterferingAccesses(AA::RangeTy Range, F CB) const
See AAPointerInfo::forallInterferingAccesses.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
AAPointerInfo::OffsetInfo ReturnedOffsets
Flag to determine if the underlying pointer is reaching a return statement in the associated function...
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
bool forallInterferingAccesses(Instruction &I, F CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
unsigned computeHashValue() const
An abstract interface for address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
static LLVM_ABI const char ID
An abstract attribute for getting assumption information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of "denormal-fp-math" and "denormal-fp-ma...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
uint32_t getKnownDereferenceableBytes() const
Return known dereferenceable bytes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
virtual bool isAssumedReachable(Attributor &A, const Instruction &From, const Instruction &To, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Returns true if 'From' instruction is assumed to reach, 'To' instruction.
An abstract interface for identifying pointers from which loads can be marked invariant.
static LLVM_ABI const char ID
Unique ID (due to the unique address).
An abstract interface for liveness abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static LLVM_ABI std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for potential address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static LLVM_ABI bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static LLVM_ABI bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedNotConvergent() const
Return true if "non-convergent" is assumed.
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A helper containing a list of offsets computed for a Use.
bool isUnassigned() const
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual bool reachesReturn() const =0
virtual const_bin_iterator begin() const =0
static LLVM_ABI const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
virtual std::optional< Type * > getPrivatizableType() const =0
Return the type we can choose for a private copy of the underlying value.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
virtual bool forallUnderlyingObjects(function_ref< bool(Value &)> Pred, AA::ValueScope Scope=AA::Interprocedural) const =0
Check Pred on all underlying objects in Scope collected so far.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual ChangeStatus indicatePessimisticFixpoint()=0
Indicate that the abstract state should converge to the pessimistic state.
virtual bool isAtFixpoint() const =0
Return if this abstract state is fixed, thus does not need to be updated if information changes as it...
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
virtual ChangeStatus indicateOptimisticFixpoint()=0
Indicate that the abstract state should converge to the optimistic state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
static Access getTombstoneKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Helper struct used in the communication between an abstract attribute (AA) that wants to change the s...
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >(const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & removeAssumedBits(base_t BitsEncoding)
Remove the bits in BitsEncoding from the "assumed bits" if not known.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
Represent subnormal handling kind for floating point instruction inputs and outputs.
static constexpr DenormalMode getDefault()
Return the assumed default mode for a function without denormal-fp-math.
static constexpr DenormalMode getInvalid()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
LLVM_ABI Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
Incoming for lane maks phi as machine instruction, incoming register Reg and incoming block Block are...
State for an integer range.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
bool isValidState() const override
See AbstractState::isValidState() NOTE: For now we simply pretend that the worst possible state is in...
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint()
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint(...)
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Return the worst possible representable state.
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint(...)
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
Various options to control the behavior of getObjectSize.
static unsigned MaxPotentialValues
Maximum number of potential values to be tracked.
void unionAssumed(const MemberTy &C)
Union assumed set with the passed value.
const SetTy & getAssumedSet() const
Return this set.
Represent one information held inside an operand bundle of an llvm.assume.
A MapVector that performs no allocations if smaller than a certain size.
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
Capture information for a specific Use.
bool isPassthrough() const
LLVM_ABI bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.