LLVM 22.0.0git
StackProtector.cpp
Go to the documentation of this file.
1//===- StackProtector.cpp - Stack Protector Insertion ---------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This pass inserts stack protectors into functions which need them. A variable
10// with a random value in it is stored onto the stack before the local variables
11// are allocated. Upon exiting the block, the stored value is checked. If it's
12// changed, then there was some sort of violation and the program aborts.
13//
14//===----------------------------------------------------------------------===//
15
18#include "llvm/ADT/Statistic.h"
23#include "llvm/CodeGen/Passes.h"
27#include "llvm/IR/Attributes.h"
28#include "llvm/IR/BasicBlock.h"
29#include "llvm/IR/Constants.h"
30#include "llvm/IR/DataLayout.h"
32#include "llvm/IR/Dominators.h"
34#include "llvm/IR/Function.h"
35#include "llvm/IR/IRBuilder.h"
36#include "llvm/IR/Instruction.h"
39#include "llvm/IR/Intrinsics.h"
40#include "llvm/IR/MDBuilder.h"
41#include "llvm/IR/Module.h"
42#include "llvm/IR/Type.h"
43#include "llvm/IR/User.h"
45#include "llvm/Pass.h"
51#include <optional>
52#include <utility>
53
54using namespace llvm;
55
56#define DEBUG_TYPE "stack-protector"
57
58STATISTIC(NumFunProtected, "Number of functions protected");
59STATISTIC(NumAddrTaken, "Number of local variables that have their address"
60 " taken.");
61
62static cl::opt<bool> EnableSelectionDAGSP("enable-selectiondag-sp",
63 cl::init(true), cl::Hidden);
64static cl::opt<bool> DisableCheckNoReturn("disable-check-noreturn-call",
65 cl::init(false), cl::Hidden);
66
67/// InsertStackProtectors - Insert code into the prologue and epilogue of the
68/// function.
69///
70/// - The prologue code loads and stores the stack guard onto the stack.
71/// - The epilogue checks the value stored in the prologue against the original
72/// value. It calls __stack_chk_fail if they differ.
73static bool InsertStackProtectors(const TargetMachine *TM, Function *F,
74 DomTreeUpdater *DTU, bool &HasPrologue,
75 bool &HasIRCheck);
76
77/// CreateFailBB - Create a basic block to jump to when the stack protector
78/// check fails.
79static BasicBlock *CreateFailBB(Function *F, const TargetLowering &TLI);
80
82 return HasPrologue && !HasIRCheck && isa<ReturnInst>(BB.getTerminator());
83}
84
86 if (Layout.empty())
87 return;
88
89 for (int I = 0, E = MFI.getObjectIndexEnd(); I != E; ++I) {
90 if (MFI.isDeadObjectIndex(I))
91 continue;
92
93 const AllocaInst *AI = MFI.getObjectAllocation(I);
94 if (!AI)
95 continue;
96
97 SSPLayoutMap::const_iterator LI = Layout.find(AI);
98 if (LI == Layout.end())
99 continue;
100
101 MFI.setObjectSSPLayout(I, LI->second);
102 }
103}
104
107
109 Info.RequireStackProtector =
111 Info.SSPBufferSize = F.getFnAttributeAsParsedInteger(
112 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
113 return Info;
114}
115
116AnalysisKey SSPLayoutAnalysis::Key;
117
122 DomTreeUpdater DTU(DT, DomTreeUpdater::UpdateStrategy::Lazy);
123
124 if (!Info.RequireStackProtector)
125 return PreservedAnalyses::all();
126
127 // TODO(etienneb): Functions with funclets are not correctly supported now.
128 // Do nothing if this is funclet-based personality.
129 if (F.hasPersonalityFn()) {
130 EHPersonality Personality = classifyEHPersonality(F.getPersonalityFn());
131 if (isFuncletEHPersonality(Personality))
132 return PreservedAnalyses::all();
133 }
134
135 ++NumFunProtected;
136 bool Changed = InsertStackProtectors(TM, &F, DT ? &DTU : nullptr,
137 Info.HasPrologue, Info.HasIRCheck);
138#ifdef EXPENSIVE_CHECKS
139 assert((!DT ||
140 DTU.getDomTree().verify(DominatorTree::VerificationLevel::Full)) &&
141 "Failed to maintain validity of domtree!");
142#endif
143
144 if (!Changed)
145 return PreservedAnalyses::all();
149 return PA;
150}
151
152char StackProtector::ID = 0;
153
156}
157
159 "Insert stack protectors", false, true)
163 "Insert stack protectors", false, true)
164
166
170}
171
173 F = &Fn;
174 M = F->getParent();
175 if (auto *DTWP = getAnalysisIfAvailable<DominatorTreeWrapperPass>())
176 DTU.emplace(DTWP->getDomTree(), DomTreeUpdater::UpdateStrategy::Lazy);
177 TM = &getAnalysis<TargetPassConfig>().getTM<TargetMachine>();
178 LayoutInfo.HasPrologue = false;
179 LayoutInfo.HasIRCheck = false;
180
181 LayoutInfo.SSPBufferSize = Fn.getFnAttributeAsParsedInteger(
182 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
183 if (!requiresStackProtector(F, &LayoutInfo.Layout))
184 return false;
185
186 // TODO(etienneb): Functions with funclets are not correctly supported now.
187 // Do nothing if this is funclet-based personality.
188 if (Fn.hasPersonalityFn()) {
190 if (isFuncletEHPersonality(Personality))
191 return false;
192 }
193
194 ++NumFunProtected;
195 bool Changed =
196 InsertStackProtectors(TM, F, DTU ? &*DTU : nullptr,
197 LayoutInfo.HasPrologue, LayoutInfo.HasIRCheck);
198#ifdef EXPENSIVE_CHECKS
199 assert((!DTU ||
200 DTU->getDomTree().verify(DominatorTree::VerificationLevel::Full)) &&
201 "Failed to maintain validity of domtree!");
202#endif
203 DTU.reset();
204 return Changed;
205}
206
207/// \param [out] IsLarge is set to true if a protectable array is found and
208/// it is "large" ( >= ssp-buffer-size). In the case of a structure with
209/// multiple arrays, this gets set if any of them is large.
210static bool ContainsProtectableArray(Type *Ty, Module *M, unsigned SSPBufferSize,
211 bool &IsLarge, bool Strong,
212 bool InStruct) {
213 if (!Ty)
214 return false;
215 if (ArrayType *AT = dyn_cast<ArrayType>(Ty)) {
216 if (!AT->getElementType()->isIntegerTy(8)) {
217 // If we're on a non-Darwin platform or we're inside of a structure, don't
218 // add stack protectors unless the array is a character array.
219 // However, in strong mode any array, regardless of type and size,
220 // triggers a protector.
221 if (!Strong && (InStruct || !M->getTargetTriple().isOSDarwin()))
222 return false;
223 }
224
225 // If an array has more than SSPBufferSize bytes of allocated space, then we
226 // emit stack protectors.
227 if (SSPBufferSize <= M->getDataLayout().getTypeAllocSize(AT)) {
228 IsLarge = true;
229 return true;
230 }
231
232 if (Strong)
233 // Require a protector for all arrays in strong mode
234 return true;
235 }
236
237 const StructType *ST = dyn_cast<StructType>(Ty);
238 if (!ST)
239 return false;
240
241 bool NeedsProtector = false;
242 for (Type *ET : ST->elements())
243 if (ContainsProtectableArray(ET, M, SSPBufferSize, IsLarge, Strong, true)) {
244 // If the element is a protectable array and is large (>= SSPBufferSize)
245 // then we are done. If the protectable array is not large, then
246 // keep looking in case a subsequent element is a large array.
247 if (IsLarge)
248 return true;
249 NeedsProtector = true;
250 }
251
252 return NeedsProtector;
253}
254
255/// Maximum remaining allocation size observed for a phi node, and how often
256/// the allocation size has already been decreased. We only allow a limited
257/// number of decreases.
258struct PhiInfo {
260 unsigned NumDecreased = 0;
261 static constexpr unsigned MaxNumDecreased = 3;
262 PhiInfo(TypeSize AllocSize) : AllocSize(AllocSize) {}
263};
265
266/// Check whether a stack allocation has its address taken.
267static bool HasAddressTaken(const Instruction *AI, TypeSize AllocSize,
268 Module *M,
269 PhiMap &VisitedPHIs) {
270 const DataLayout &DL = M->getDataLayout();
271 for (const User *U : AI->users()) {
272 const auto *I = cast<Instruction>(U);
273 // If this instruction accesses memory make sure it doesn't access beyond
274 // the bounds of the allocated object.
275 std::optional<MemoryLocation> MemLoc = MemoryLocation::getOrNone(I);
276 if (MemLoc && MemLoc->Size.hasValue() &&
277 !TypeSize::isKnownGE(AllocSize, MemLoc->Size.getValue()))
278 return true;
279 switch (I->getOpcode()) {
280 case Instruction::Store:
281 if (AI == cast<StoreInst>(I)->getValueOperand())
282 return true;
283 break;
284 case Instruction::AtomicCmpXchg:
285 // cmpxchg conceptually includes both a load and store from the same
286 // location. So, like store, the value being stored is what matters.
287 if (AI == cast<AtomicCmpXchgInst>(I)->getNewValOperand())
288 return true;
289 break;
290 case Instruction::AtomicRMW:
291 if (AI == cast<AtomicRMWInst>(I)->getValOperand())
292 return true;
293 break;
294 case Instruction::PtrToInt:
295 if (AI == cast<PtrToIntInst>(I)->getOperand(0))
296 return true;
297 break;
298 case Instruction::Call: {
299 // Ignore intrinsics that do not become real instructions.
300 // TODO: Narrow this to intrinsics that have store-like effects.
301 const auto *CI = cast<CallInst>(I);
302 if (!CI->isDebugOrPseudoInst() && !CI->isLifetimeStartOrEnd())
303 return true;
304 break;
305 }
306 case Instruction::Invoke:
307 return true;
308 case Instruction::GetElementPtr: {
309 // If the GEP offset is out-of-bounds, or is non-constant and so has to be
310 // assumed to be potentially out-of-bounds, then any memory access that
311 // would use it could also be out-of-bounds meaning stack protection is
312 // required.
313 const GetElementPtrInst *GEP = cast<GetElementPtrInst>(I);
314 unsigned IndexSize = DL.getIndexTypeSizeInBits(I->getType());
315 APInt Offset(IndexSize, 0);
316 if (!GEP->accumulateConstantOffset(DL, Offset))
317 return true;
318 TypeSize OffsetSize = TypeSize::getFixed(Offset.getLimitedValue());
319 if (!TypeSize::isKnownGT(AllocSize, OffsetSize))
320 return true;
321 // Adjust AllocSize to be the space remaining after this offset.
322 // We can't subtract a fixed size from a scalable one, so in that case
323 // assume the scalable value is of minimum size.
324 TypeSize NewAllocSize =
325 TypeSize::getFixed(AllocSize.getKnownMinValue()) - OffsetSize;
326 if (HasAddressTaken(I, NewAllocSize, M, VisitedPHIs))
327 return true;
328 break;
329 }
330 case Instruction::BitCast:
331 case Instruction::Select:
332 case Instruction::AddrSpaceCast:
333 if (HasAddressTaken(I, AllocSize, M, VisitedPHIs))
334 return true;
335 break;
336 case Instruction::PHI: {
337 // Keep track of what PHI nodes we have already visited to ensure
338 // they are only visited once.
339 const auto *PN = cast<PHINode>(I);
340 auto [It, Inserted] = VisitedPHIs.try_emplace(PN, AllocSize);
341 if (!Inserted) {
342 if (TypeSize::isKnownGE(AllocSize, It->second.AllocSize))
343 break;
344
345 // Check again with smaller size.
346 if (It->second.NumDecreased == PhiInfo::MaxNumDecreased)
347 return true;
348
349 It->second.AllocSize = AllocSize;
350 ++It->second.NumDecreased;
351 }
352 if (HasAddressTaken(PN, AllocSize, M, VisitedPHIs))
353 return true;
354 break;
355 }
356 case Instruction::Load:
357 case Instruction::Ret:
358 // These instructions take an address operand, but have load-like or
359 // other innocuous behavior that should not trigger a stack protector.
360 break;
361 default:
362 // Conservatively return true for any instruction that takes an address
363 // operand, but is not handled above.
364 return true;
365 }
366 }
367 return false;
368}
369
370/// Search for the first call to the llvm.stackprotector intrinsic and return it
371/// if present.
373 for (const BasicBlock &BB : F)
374 for (const Instruction &I : BB)
375 if (const auto *II = dyn_cast<IntrinsicInst>(&I))
376 if (II->getIntrinsicID() == Intrinsic::stackprotector)
377 return II;
378 return nullptr;
379}
380
381/// Check whether or not this function needs a stack protector based
382/// upon the stack protector level.
383///
384/// We use two heuristics: a standard (ssp) and strong (sspstrong).
385/// The standard heuristic which will add a guard variable to functions that
386/// call alloca with a either a variable size or a size >= SSPBufferSize,
387/// functions with character buffers larger than SSPBufferSize, and functions
388/// with aggregates containing character buffers larger than SSPBufferSize. The
389/// strong heuristic will add a guard variables to functions that call alloca
390/// regardless of size, functions with any buffer regardless of type and size,
391/// functions with aggregates that contain any buffer regardless of type and
392/// size, and functions that contain stack-based variables that have had their
393/// address taken.
395 SSPLayoutMap *Layout) {
396 Module *M = F->getParent();
397 bool Strong = false;
398 bool NeedsProtector = false;
399
400 // The set of PHI nodes visited when determining if a variable's reference has
401 // been taken. This set is maintained to ensure we don't visit the same PHI
402 // node multiple times.
403 PhiMap VisitedPHIs;
404
405 unsigned SSPBufferSize = F->getFnAttributeAsParsedInteger(
406 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
407
408 if (F->hasFnAttribute(Attribute::SafeStack))
409 return false;
410
411 // We are constructing the OptimizationRemarkEmitter on the fly rather than
412 // using the analysis pass to avoid building DominatorTree and LoopInfo which
413 // are not available this late in the IR pipeline.
415
416 if (F->hasFnAttribute(Attribute::StackProtectReq)) {
417 if (!Layout)
418 return true;
419 ORE.emit([&]() {
420 return OptimizationRemark(DEBUG_TYPE, "StackProtectorRequested", F)
421 << "Stack protection applied to function "
422 << ore::NV("Function", F)
423 << " due to a function attribute or command-line switch";
424 });
425 NeedsProtector = true;
426 Strong = true; // Use the same heuristic as strong to determine SSPLayout
427 } else if (F->hasFnAttribute(Attribute::StackProtectStrong))
428 Strong = true;
429 else if (!F->hasFnAttribute(Attribute::StackProtect))
430 return false;
431
432 for (const BasicBlock &BB : *F) {
433 for (const Instruction &I : BB) {
434 if (const AllocaInst *AI = dyn_cast<AllocaInst>(&I)) {
435 if (AI->isArrayAllocation()) {
436 auto RemarkBuilder = [&]() {
437 return OptimizationRemark(DEBUG_TYPE, "StackProtectorAllocaOrArray",
438 &I)
439 << "Stack protection applied to function "
440 << ore::NV("Function", F)
441 << " due to a call to alloca or use of a variable length "
442 "array";
443 };
444 if (const auto *CI = dyn_cast<ConstantInt>(AI->getArraySize())) {
445 if (CI->getLimitedValue(SSPBufferSize) >= SSPBufferSize) {
446 // A call to alloca with size >= SSPBufferSize requires
447 // stack protectors.
448 if (!Layout)
449 return true;
450 Layout->insert(
451 std::make_pair(AI, MachineFrameInfo::SSPLK_LargeArray));
452 ORE.emit(RemarkBuilder);
453 NeedsProtector = true;
454 } else if (Strong) {
455 // Require protectors for all alloca calls in strong mode.
456 if (!Layout)
457 return true;
458 Layout->insert(
459 std::make_pair(AI, MachineFrameInfo::SSPLK_SmallArray));
460 ORE.emit(RemarkBuilder);
461 NeedsProtector = true;
462 }
463 } else {
464 // A call to alloca with a variable size requires protectors.
465 if (!Layout)
466 return true;
467 Layout->insert(
468 std::make_pair(AI, MachineFrameInfo::SSPLK_LargeArray));
469 ORE.emit(RemarkBuilder);
470 NeedsProtector = true;
471 }
472 continue;
473 }
474
475 bool IsLarge = false;
476 if (ContainsProtectableArray(AI->getAllocatedType(), M, SSPBufferSize,
477 IsLarge, Strong, false)) {
478 if (!Layout)
479 return true;
480 Layout->insert(std::make_pair(
483 ORE.emit([&]() {
484 return OptimizationRemark(DEBUG_TYPE, "StackProtectorBuffer", &I)
485 << "Stack protection applied to function "
486 << ore::NV("Function", F)
487 << " due to a stack allocated buffer or struct containing a "
488 "buffer";
489 });
490 NeedsProtector = true;
491 continue;
492 }
493
494 if (Strong &&
496 AI, M->getDataLayout().getTypeAllocSize(AI->getAllocatedType()),
497 M, VisitedPHIs)) {
498 ++NumAddrTaken;
499 if (!Layout)
500 return true;
501 Layout->insert(std::make_pair(AI, MachineFrameInfo::SSPLK_AddrOf));
502 ORE.emit([&]() {
503 return OptimizationRemark(DEBUG_TYPE, "StackProtectorAddressTaken",
504 &I)
505 << "Stack protection applied to function "
506 << ore::NV("Function", F)
507 << " due to the address of a local variable being taken";
508 });
509 NeedsProtector = true;
510 }
511 // Clear any PHIs that we visited, to make sure we examine all uses of
512 // any subsequent allocas that we look at.
513 VisitedPHIs.clear();
514 }
515 }
516 }
517
518 return NeedsProtector;
519}
520
521/// Create a stack guard loading and populate whether SelectionDAG SSP is
522/// supported.
524 IRBuilder<> &B,
525 bool *SupportsSelectionDAGSP = nullptr) {
526 Value *Guard = TLI->getIRStackGuard(B);
527 StringRef GuardMode = M->getStackProtectorGuard();
528 if ((GuardMode == "tls" || GuardMode.empty()) && Guard)
529 return B.CreateLoad(B.getPtrTy(), Guard, true, "StackGuard");
530
531 // Use SelectionDAG SSP handling, since there isn't an IR guard.
532 //
533 // This is more or less weird, since we optionally output whether we
534 // should perform a SelectionDAG SP here. The reason is that it's strictly
535 // defined as !TLI->getIRStackGuard(B), where getIRStackGuard is also
536 // mutating. There is no way to get this bit without mutating the IR, so
537 // getting this bit has to happen in this right time.
538 //
539 // We could have define a new function TLI::supportsSelectionDAGSP(), but that
540 // will put more burden on the backends' overriding work, especially when it
541 // actually conveys the same information getIRStackGuard() already gives.
542 if (SupportsSelectionDAGSP)
543 *SupportsSelectionDAGSP = true;
544 TLI->insertSSPDeclarations(*M);
545 return B.CreateIntrinsic(Intrinsic::stackguard, {});
546}
547
548/// Insert code into the entry block that stores the stack guard
549/// variable onto the stack:
550///
551/// entry:
552/// StackGuardSlot = alloca i8*
553/// StackGuard = <stack guard>
554/// call void @llvm.stackprotector(StackGuard, StackGuardSlot)
555///
556/// Returns true if the platform/triple supports the stackprotectorcreate pseudo
557/// node.
558static bool CreatePrologue(Function *F, Module *M, Instruction *CheckLoc,
559 const TargetLoweringBase *TLI, AllocaInst *&AI) {
560 bool SupportsSelectionDAGSP = false;
561 IRBuilder<> B(&F->getEntryBlock().front());
562 PointerType *PtrTy = PointerType::getUnqual(CheckLoc->getContext());
563 AI = B.CreateAlloca(PtrTy, nullptr, "StackGuardSlot");
564
565 Value *GuardSlot = getStackGuard(TLI, M, B, &SupportsSelectionDAGSP);
566 B.CreateIntrinsic(Intrinsic::stackprotector, {GuardSlot, AI});
567 return SupportsSelectionDAGSP;
568}
569
571 DomTreeUpdater *DTU, bool &HasPrologue,
572 bool &HasIRCheck) {
573 auto *M = F->getParent();
574 auto *TLI = TM->getSubtargetImpl(*F)->getTargetLowering();
575
576 // If the target wants to XOR the frame pointer into the guard value, it's
577 // impossible to emit the check in IR, so the target *must* support stack
578 // protection in SDAG.
579 bool SupportsSelectionDAGSP =
580 TLI->useStackGuardXorFP() ||
581 (EnableSelectionDAGSP && !TM->Options.EnableFastISel);
582 AllocaInst *AI = nullptr; // Place on stack that stores the stack guard.
583 BasicBlock *FailBB = nullptr;
584
586 // This is stack protector auto generated check BB, skip it.
587 if (&BB == FailBB)
588 continue;
589 Instruction *CheckLoc = dyn_cast<ReturnInst>(BB.getTerminator());
590 if (!CheckLoc && !DisableCheckNoReturn)
591 for (auto &Inst : BB) {
592 if (IntrinsicInst *IB = dyn_cast<IntrinsicInst>(&Inst);
593 IB && (IB->getIntrinsicID() == Intrinsic::eh_sjlj_callsite)) {
594 // eh_sjlj_callsite has to be in same BB as the
595 // bb terminator. Don't insert within this range.
596 CheckLoc = IB;
597 break;
598 }
599 if (auto *CB = dyn_cast<CallBase>(&Inst))
600 // Do stack check before noreturn calls that aren't nounwind (e.g:
601 // __cxa_throw).
602 if (CB->doesNotReturn() && !CB->doesNotThrow()) {
603 CheckLoc = CB;
604 break;
605 }
606 }
607
608 if (!CheckLoc)
609 continue;
610
611 // Generate prologue instrumentation if not already generated.
612 if (!HasPrologue) {
613 HasPrologue = true;
614 SupportsSelectionDAGSP &= CreatePrologue(F, M, CheckLoc, TLI, AI);
615 }
616
617 // SelectionDAG based code generation. Nothing else needs to be done here.
618 // The epilogue instrumentation is postponed to SelectionDAG.
619 if (SupportsSelectionDAGSP)
620 break;
621
622 // Find the stack guard slot if the prologue was not created by this pass
623 // itself via a previous call to CreatePrologue().
624 if (!AI) {
625 const CallInst *SPCall = findStackProtectorIntrinsic(*F);
626 assert(SPCall && "Call to llvm.stackprotector is missing");
627 AI = cast<AllocaInst>(SPCall->getArgOperand(1));
628 }
629
630 // Set HasIRCheck to true, so that SelectionDAG will not generate its own
631 // version. SelectionDAG called 'shouldEmitSDCheck' to check whether
632 // instrumentation has already been generated.
633 HasIRCheck = true;
634
635 // If we're instrumenting a block with a tail call, the check has to be
636 // inserted before the call rather than between it and the return.
637 Instruction *Prev = CheckLoc->getPrevNode();
638 if (auto *CI = dyn_cast_if_present<CallInst>(Prev))
639 if (CI->isTailCall() && isInTailCallPosition(*CI, *TM))
640 CheckLoc = Prev;
641
642 // Generate epilogue instrumentation. The epilogue intrumentation can be
643 // function-based or inlined depending on which mechanism the target is
644 // providing.
645 if (Function *GuardCheck = TLI->getSSPStackGuardCheck(*M)) {
646 // Generate the function-based epilogue instrumentation.
647 // The target provides a guard check function, generate a call to it.
648 IRBuilder<> B(CheckLoc);
649 LoadInst *Guard = B.CreateLoad(B.getPtrTy(), AI, true, "Guard");
650 CallInst *Call = B.CreateCall(GuardCheck, {Guard});
651 Call->setAttributes(GuardCheck->getAttributes());
652 Call->setCallingConv(GuardCheck->getCallingConv());
653 } else {
654 // Generate the epilogue with inline instrumentation.
655 // If we do not support SelectionDAG based calls, generate IR level
656 // calls.
657 //
658 // For each block with a return instruction, convert this:
659 //
660 // return:
661 // ...
662 // ret ...
663 //
664 // into this:
665 //
666 // return:
667 // ...
668 // %1 = <stack guard>
669 // %2 = load StackGuardSlot
670 // %3 = icmp ne i1 %1, %2
671 // br i1 %3, label %CallStackCheckFailBlk, label %SP_return
672 //
673 // SP_return:
674 // ret ...
675 //
676 // CallStackCheckFailBlk:
677 // call void @__stack_chk_fail()
678 // unreachable
679
680 // Create the FailBB. We duplicate the BB every time since the MI tail
681 // merge pass will merge together all of the various BB into one including
682 // fail BB generated by the stack protector pseudo instruction.
683 if (!FailBB)
684 FailBB = CreateFailBB(F, *TLI);
685
686 IRBuilder<> B(CheckLoc);
687 Value *Guard = getStackGuard(TLI, M, B);
688 LoadInst *LI2 = B.CreateLoad(B.getPtrTy(), AI, true);
689 auto *Cmp = cast<ICmpInst>(B.CreateICmpNE(Guard, LI2));
690 auto SuccessProb =
692 auto FailureProb =
694 MDNode *Weights = MDBuilder(F->getContext())
695 .createBranchWeights(FailureProb.getNumerator(),
696 SuccessProb.getNumerator());
697
698 SplitBlockAndInsertIfThen(Cmp, CheckLoc,
699 /*Unreachable=*/false, Weights, DTU,
700 /*LI=*/nullptr, /*ThenBlock=*/FailBB);
701
702 auto *BI = cast<BranchInst>(Cmp->getParent()->getTerminator());
703 BasicBlock *NewBB = BI->getSuccessor(1);
704 NewBB->setName("SP_return");
705 NewBB->moveAfter(&BB);
706
707 Cmp->setPredicate(Cmp->getInversePredicate());
708 BI->swapSuccessors();
709 }
710 }
711
712 // Return if we didn't modify any basic blocks. i.e., there are no return
713 // statements in the function.
714 return HasPrologue;
715}
716
718 auto *M = F->getParent();
719 LLVMContext &Context = F->getContext();
720 BasicBlock *FailBB = BasicBlock::Create(Context, "CallStackCheckFailBlk", F);
721 IRBuilder<> B(FailBB);
722 if (F->getSubprogram())
723 B.SetCurrentDebugLocation(
724 DILocation::get(Context, 0, 0, F->getSubprogram()));
725 FunctionCallee StackChkFail;
727
728 if (const char *ChkFailName =
729 TLI.getLibcallName(RTLIB::STACKPROTECTOR_CHECK_FAIL)) {
730 StackChkFail =
731 M->getOrInsertFunction(ChkFailName, Type::getVoidTy(Context));
732 } else if (const char *SSHName =
733 TLI.getLibcallName(RTLIB::STACK_SMASH_HANDLER)) {
734 StackChkFail = M->getOrInsertFunction(SSHName, Type::getVoidTy(Context),
736 Args.push_back(B.CreateGlobalString(F->getName(), "SSH"));
737 } else {
738 Context.emitError("no libcall available for stack protector");
739 }
740
741 if (StackChkFail) {
742 CallInst *Call = B.CreateCall(StackChkFail, Args);
743 Call->addFnAttr(Attribute::NoReturn);
744 }
745
746 B.CreateUnreachable();
747 return FailBB;
748}
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file contains the simple types necessary to represent the attributes associated with functions a...
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
Analysis containing CSE Info
Definition: CSEInfo.cpp:27
This file contains the declarations for the subclasses of Constant, which represent the different fla...
#define DEBUG_TYPE
Hexagon Common GEP
Module.h This file contains the declarations for the Module class.
#define F(x, y, z)
Definition: MD5.cpp:55
#define I(x, y, z)
Definition: MD5.cpp:58
This file provides utility analysis objects describing memory locations.
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
#define INITIALIZE_PASS_DEPENDENCY(depName)
Definition: PassSupport.h:42
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:44
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:39
This file defines the SmallVector class.
static Value * getStackGuard(const TargetLoweringBase *TLI, Module *M, IRBuilder<> &B, bool *SupportsSelectionDAGSP=nullptr)
Create a stack guard loading and populate whether SelectionDAG SSP is supported.
static BasicBlock * CreateFailBB(Function *F, const TargetLowering &TLI)
CreateFailBB - Create a basic block to jump to when the stack protector check fails.
static bool InsertStackProtectors(const TargetMachine *TM, Function *F, DomTreeUpdater *DTU, bool &HasPrologue, bool &HasIRCheck)
InsertStackProtectors - Insert code into the prologue and epilogue of the function.
static bool HasAddressTaken(const Instruction *AI, TypeSize AllocSize, Module *M, PhiMap &VisitedPHIs)
Check whether a stack allocation has its address taken.
static cl::opt< bool > DisableCheckNoReturn("disable-check-noreturn-call", cl::init(false), cl::Hidden)
static bool CreatePrologue(Function *F, Module *M, Instruction *CheckLoc, const TargetLoweringBase *TLI, AllocaInst *&AI)
Insert code into the entry block that stores the stack guard variable onto the stack:
Insert stack protectors
static bool ContainsProtectableArray(Type *Ty, Module *M, unsigned SSPBufferSize, bool &IsLarge, bool Strong, bool InStruct)
static cl::opt< bool > EnableSelectionDAGSP("enable-selectiondag-sp", cl::init(true), cl::Hidden)
static const CallInst * findStackProtectorIntrinsic(Function &F)
Search for the first call to the llvm.stackprotector intrinsic and return it if present.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Definition: Statistic.h:167
This file describes how to lower LLVM code to machine code.
Target-Independent Code Generator Pass Configuration Options pass.
Class for arbitrary precision integers.
Definition: APInt.h:78
an instruction to allocate memory on the stack
Definition: Instructions.h:64
A container for analyses that lazily runs them and caches their results.
Definition: PassManager.h:255
PassT::Result * getCachedResult(IRUnitT &IR) const
Get the cached result of an analysis pass for a given IR unit.
Definition: PassManager.h:431
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
Definition: PassManager.h:412
Represent the analysis usage information of a pass.
AnalysisUsage & addRequired()
AnalysisUsage & addPreserved()
Add the specified Pass class to the set of analyses preserved by this pass.
LLVM Basic Block Representation.
Definition: BasicBlock.h:62
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
Definition: BasicBlock.h:206
LLVM_ABI void moveAfter(BasicBlock *MovePos)
Unlink this basic block from its current function and insert it right after MovePos in the function M...
Definition: BasicBlock.cpp:243
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
Definition: BasicBlock.h:233
static BranchProbability getBranchProbStackProtector(bool IsLikely)
Value * getArgOperand(unsigned i) const
Definition: InstrTypes.h:1292
This class represents a function call, abstracting a target machine's calling convention.
A parsed version of the target data layout string in and methods for querying it.
Definition: DataLayout.h:63
iterator find(const_arg_type_t< KeyT > Val)
Definition: DenseMap.h:177
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
Definition: DenseMap.h:245
bool empty() const
Definition: DenseMap.h:119
iterator end()
Definition: DenseMap.h:87
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition: DenseMap.h:230
Analysis pass which computes a DominatorTree.
Definition: Dominators.h:284
bool verify(VerificationLevel VL=VerificationLevel::Full) const
verify - checks if the tree is correct.
Legacy analysis pass which computes a DominatorTree.
Definition: Dominators.h:322
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
Definition: DerivedTypes.h:170
FunctionPass class - This class is used to implement most global optimizations.
Definition: Pass.h:314
uint64_t getFnAttributeAsParsedInteger(StringRef Kind, uint64_t Default=0) const
For a string attribute Kind, parse attribute as an integer.
Definition: Function.cpp:774
bool hasPersonalityFn() const
Check whether this function has a personality function.
Definition: Function.h:903
Constant * getPersonalityFn() const
Get the personality function associated with this function.
Definition: Function.cpp:1036
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
Definition: Instructions.h:949
Module * getParent()
Get the module that this global value is contained inside of...
Definition: GlobalValue.h:663
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Definition: IRBuilder.h:2780
A wrapper class for inspecting calls to intrinsic functions.
Definition: IntrinsicInst.h:49
This is an important class for using LLVM in a threaded context.
Definition: LLVMContext.h:68
An instruction for reading from memory.
Definition: Instructions.h:180
LLVM_ABI MDNode * createBranchWeights(uint32_t TrueWeight, uint32_t FalseWeight, bool IsExpected=false)
Return metadata containing two branch weights.
Definition: MDBuilder.cpp:38
Metadata node.
Definition: Metadata.h:1077
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
Definition: Metadata.h:1565
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted.
const AllocaInst * getObjectAllocation(int ObjectIdx) const
Return the underlying Alloca of the specified stack object if it exists.
@ SSPLK_SmallArray
Array or nested array < SSP-buffer-size.
@ SSPLK_LargeArray
Array or nested array >= SSP-buffer-size.
@ SSPLK_AddrOf
The address of this allocation is exposed and triggered protection.
void setObjectSSPLayout(int ObjectIdx, SSPLayoutKind Kind)
int getObjectIndexEnd() const
Return one past the maximum frame object index.
bool isDeadObjectIndex(int ObjectIdx) const
Returns true if the specified index corresponds to a dead object.
static LLVM_ABI std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
A Module instance is used to store all the information related to an LLVM module.
Definition: Module.h:67
The optimization diagnostic interface.
LLVM_ABI void emit(DiagnosticInfoOptimizationBase &OptDiag)
Output the remark via the diagnostic handler and to the optimization record file.
Diagnostic information for applied optimization remarks.
static LLVM_ABI PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the default address space (address sp...
Definition: DerivedTypes.h:720
A set of analyses that are preserved following a run of a transformation pass.
Definition: Analysis.h:112
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition: Analysis.h:118
PreservedAnalyses & preserve()
Mark an analysis as preserved.
Definition: Analysis.h:132
static bool requiresStackProtector(Function *F, SSPLayoutMap *Layout=nullptr)
Check whether or not F needs a stack protector based upon the stack protector level.
Result run(Function &F, FunctionAnalysisManager &FAM)
void copyToMachineFrameInfo(MachineFrameInfo &MFI) const
bool shouldEmitSDCheck(const BasicBlock &BB) const
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1197
PreservedAnalyses run(Function &F, FunctionAnalysisManager &FAM)
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
static bool requiresStackProtector(Function *F, SSPLayoutMap *Layout=nullptr)
Check whether or not F needs a stack protector based upon the stack protector level.
bool runOnFunction(Function &Fn) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
StringRef - Represent a constant reference to a string, i.e.
Definition: StringRef.h:55
constexpr bool empty() const
empty - Check if the string is empty.
Definition: StringRef.h:151
Class to represent struct types.
Definition: DerivedTypes.h:218
This base class for TargetLowering contains the SelectionDAG-independent parts that can be used from ...
virtual Value * getIRStackGuard(IRBuilderBase &IRB) const
If the target has a standard location for the stack protector guard, returns the address of that loca...
const char * getLibcallName(RTLIB::Libcall Call) const
Get the libcall routine name for the specified libcall.
virtual void insertSSPDeclarations(Module &M) const
Inserts necessary declarations for SSP (stack protection) purpose.
This class defines information used to lower LLVM code to legal SelectionDAG operators that the targe...
Primary interface to the complete machine description for the target machine.
Definition: TargetMachine.h:83
Target-Independent Code Generator Pass Configuration Options.
static constexpr TypeSize getFixed(ScalarTy ExactSize)
Definition: TypeSize.h:346
The instances of the Type class are immutable: once they are created, they are never changed.
Definition: Type.h:45
static LLVM_ABI Type * getVoidTy(LLVMContext &C)
LLVM Value Representation.
Definition: Value.h:75
LLVM_ABI void setName(const Twine &Name)
Change the name of the value.
Definition: Value.cpp:390
iterator_range< user_iterator > users()
Definition: Value.h:426
LLVM_ABI LLVMContext & getContext() const
All values hold a context through their type.
Definition: Value.cpp:1098
constexpr ScalarTy getKnownMinValue() const
Returns the minimum value this quantity can represent.
Definition: TypeSize.h:169
static constexpr bool isKnownGT(const FixedOrScalableQuantity &LHS, const FixedOrScalableQuantity &RHS)
Definition: TypeSize.h:226
static constexpr bool isKnownGE(const FixedOrScalableQuantity &LHS, const FixedOrScalableQuantity &RHS)
Definition: TypeSize.h:240
initializer< Ty > init(const Ty &Val)
Definition: CommandLine.h:444
DiagnosticInfoOptimizationBase::Argument NV
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
@ Offset
Definition: DWP.cpp:477
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
Definition: STLExtras.h:663
LLVM_ABI FunctionPass * createStackProtectorPass()
createStackProtectorPass - This pass adds stack protectors to functions.
LLVM_ABI void initializeStackProtectorPass(PassRegistry &)
LLVM_ABI EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
bool isFuncletEHPersonality(EHPersonality Pers)
Returns true if this is a personality function that invokes handler funclets (which must return to it...
bool isInTailCallPosition(const CallBase &Call, const TargetMachine &TM, bool ReturnsFirstArg=false)
Test if the given instruction is in a position to be optimized with a tail-call.
Definition: Analysis.cpp:543
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
Maximum remaining allocation size observed for a phi node, and how often the allocation size has alre...
PhiInfo(TypeSize AllocSize)
static constexpr unsigned MaxNumDecreased
TypeSize AllocSize
A special type used by analysis passes to provide an address that identifies that particular analysis...
Definition: Analysis.h:29