LLVM 22.0.0git
StackProtector.cpp
Go to the documentation of this file.
1//===- StackProtector.cpp - Stack Protector Insertion ---------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This pass inserts stack protectors into functions which need them. A variable
10// with a random value in it is stored onto the stack before the local variables
11// are allocated. Upon exiting the block, the stored value is checked. If it's
12// changed, then there was some sort of violation and the program aborts.
13//
14//===----------------------------------------------------------------------===//
15
18#include "llvm/ADT/Statistic.h"
23#include "llvm/CodeGen/Passes.h"
27#include "llvm/IR/Attributes.h"
28#include "llvm/IR/BasicBlock.h"
29#include "llvm/IR/Constants.h"
30#include "llvm/IR/DataLayout.h"
32#include "llvm/IR/Dominators.h"
34#include "llvm/IR/Function.h"
35#include "llvm/IR/IRBuilder.h"
36#include "llvm/IR/Instruction.h"
39#include "llvm/IR/Intrinsics.h"
40#include "llvm/IR/MDBuilder.h"
41#include "llvm/IR/Module.h"
42#include "llvm/IR/Type.h"
43#include "llvm/IR/User.h"
45#include "llvm/Pass.h"
51#include <optional>
52
53using namespace llvm;
54
55#define DEBUG_TYPE "stack-protector"
56
57STATISTIC(NumFunProtected, "Number of functions protected");
58STATISTIC(NumAddrTaken, "Number of local variables that have their address"
59 " taken.");
60
61static cl::opt<bool> EnableSelectionDAGSP("enable-selectiondag-sp",
62 cl::init(true), cl::Hidden);
63static cl::opt<bool> DisableCheckNoReturn("disable-check-noreturn-call",
64 cl::init(false), cl::Hidden);
65
66/// InsertStackProtectors - Insert code into the prologue and epilogue of the
67/// function.
68///
69/// - The prologue code loads and stores the stack guard onto the stack.
70/// - The epilogue checks the value stored in the prologue against the original
71/// value. It calls __stack_chk_fail if they differ.
72static bool InsertStackProtectors(const TargetMachine *TM, Function *F,
73 DomTreeUpdater *DTU, bool &HasPrologue,
74 bool &HasIRCheck);
75
76/// CreateFailBB - Create a basic block to jump to when the stack protector
77/// check fails.
78static BasicBlock *CreateFailBB(Function *F, const TargetLowering &TLI);
79
81 return HasPrologue && !HasIRCheck && isa<ReturnInst>(BB.getTerminator());
82}
83
85 if (Layout.empty())
86 return;
87
88 for (int I = 0, E = MFI.getObjectIndexEnd(); I != E; ++I) {
89 if (MFI.isDeadObjectIndex(I))
90 continue;
91
92 const AllocaInst *AI = MFI.getObjectAllocation(I);
93 if (!AI)
94 continue;
95
96 SSPLayoutMap::const_iterator LI = Layout.find(AI);
97 if (LI == Layout.end())
98 continue;
99
100 MFI.setObjectSSPLayout(I, LI->second);
101 }
102}
103
106
107 SSPLayoutInfo Info;
108 Info.RequireStackProtector =
110 Info.SSPBufferSize = F.getFnAttributeAsParsedInteger(
111 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
112 return Info;
113}
114
115AnalysisKey SSPLayoutAnalysis::Key;
116
119 auto &Info = FAM.getResult<SSPLayoutAnalysis>(F);
120 auto *DT = FAM.getCachedResult<DominatorTreeAnalysis>(F);
121 DomTreeUpdater DTU(DT, DomTreeUpdater::UpdateStrategy::Lazy);
122
123 if (!Info.RequireStackProtector)
124 return PreservedAnalyses::all();
125
126 // TODO(etienneb): Functions with funclets are not correctly supported now.
127 // Do nothing if this is funclet-based personality.
128 if (F.hasPersonalityFn()) {
129 EHPersonality Personality = classifyEHPersonality(F.getPersonalityFn());
130 if (isFuncletEHPersonality(Personality))
131 return PreservedAnalyses::all();
132 }
133
134 ++NumFunProtected;
135 bool Changed = InsertStackProtectors(TM, &F, DT ? &DTU : nullptr,
136 Info.HasPrologue, Info.HasIRCheck);
137#ifdef EXPENSIVE_CHECKS
138 assert((!DT ||
139 DTU.getDomTree().verify(DominatorTree::VerificationLevel::Full)) &&
140 "Failed to maintain validity of domtree!");
141#endif
142
143 if (!Changed)
144 return PreservedAnalyses::all();
148 return PA;
149}
150
151char StackProtector::ID = 0;
152
156
158 "Insert stack protectors", false, true)
162 "Insert stack protectors", false, true)
163
165
170
172 F = &Fn;
173 M = F->getParent();
175 DTU.emplace(DTWP->getDomTree(), DomTreeUpdater::UpdateStrategy::Lazy);
177 LayoutInfo.HasPrologue = false;
178 LayoutInfo.HasIRCheck = false;
179
180 LayoutInfo.SSPBufferSize = Fn.getFnAttributeAsParsedInteger(
181 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
182 if (!requiresStackProtector(F, &LayoutInfo.Layout))
183 return false;
184
185 // TODO(etienneb): Functions with funclets are not correctly supported now.
186 // Do nothing if this is funclet-based personality.
187 if (Fn.hasPersonalityFn()) {
189 if (isFuncletEHPersonality(Personality))
190 return false;
191 }
192
193 ++NumFunProtected;
194 bool Changed =
195 InsertStackProtectors(TM, F, DTU ? &*DTU : nullptr,
196 LayoutInfo.HasPrologue, LayoutInfo.HasIRCheck);
197#ifdef EXPENSIVE_CHECKS
198 assert((!DTU ||
199 DTU->getDomTree().verify(DominatorTree::VerificationLevel::Full)) &&
200 "Failed to maintain validity of domtree!");
201#endif
202 DTU.reset();
203 return Changed;
204}
205
206/// \param [out] IsLarge is set to true if a protectable array is found and
207/// it is "large" ( >= ssp-buffer-size). In the case of a structure with
208/// multiple arrays, this gets set if any of them is large.
209static bool ContainsProtectableArray(Type *Ty, Module *M, unsigned SSPBufferSize,
210 bool &IsLarge, bool Strong,
211 bool InStruct) {
212 if (!Ty)
213 return false;
214 if (ArrayType *AT = dyn_cast<ArrayType>(Ty)) {
215 if (!AT->getElementType()->isIntegerTy(8)) {
216 // If we're on a non-Darwin platform or we're inside of a structure, don't
217 // add stack protectors unless the array is a character array.
218 // However, in strong mode any array, regardless of type and size,
219 // triggers a protector.
220 if (!Strong && (InStruct || !M->getTargetTriple().isOSDarwin()))
221 return false;
222 }
223
224 // If an array has more than SSPBufferSize bytes of allocated space, then we
225 // emit stack protectors.
226 if (SSPBufferSize <= M->getDataLayout().getTypeAllocSize(AT)) {
227 IsLarge = true;
228 return true;
229 }
230
231 if (Strong)
232 // Require a protector for all arrays in strong mode
233 return true;
234 }
235
236 const StructType *ST = dyn_cast<StructType>(Ty);
237 if (!ST)
238 return false;
239
240 bool NeedsProtector = false;
241 for (Type *ET : ST->elements())
242 if (ContainsProtectableArray(ET, M, SSPBufferSize, IsLarge, Strong, true)) {
243 // If the element is a protectable array and is large (>= SSPBufferSize)
244 // then we are done. If the protectable array is not large, then
245 // keep looking in case a subsequent element is a large array.
246 if (IsLarge)
247 return true;
248 NeedsProtector = true;
249 }
250
251 return NeedsProtector;
252}
253
254/// Maximum remaining allocation size observed for a phi node, and how often
255/// the allocation size has already been decreased. We only allow a limited
256/// number of decreases.
257struct PhiInfo {
259 unsigned NumDecreased = 0;
260 static constexpr unsigned MaxNumDecreased = 3;
262};
264
265/// Check whether a stack allocation has its address taken.
266static bool HasAddressTaken(const Instruction *AI, TypeSize AllocSize,
267 Module *M,
268 PhiMap &VisitedPHIs) {
269 const DataLayout &DL = M->getDataLayout();
270 for (const User *U : AI->users()) {
271 const auto *I = cast<Instruction>(U);
272 // If this instruction accesses memory make sure it doesn't access beyond
273 // the bounds of the allocated object.
274 std::optional<MemoryLocation> MemLoc = MemoryLocation::getOrNone(I);
275 if (MemLoc && MemLoc->Size.hasValue() &&
276 !TypeSize::isKnownGE(AllocSize, MemLoc->Size.getValue()))
277 return true;
278 switch (I->getOpcode()) {
279 case Instruction::Store:
280 if (AI == cast<StoreInst>(I)->getValueOperand())
281 return true;
282 break;
283 case Instruction::AtomicCmpXchg:
284 // cmpxchg conceptually includes both a load and store from the same
285 // location. So, like store, the value being stored is what matters.
286 if (AI == cast<AtomicCmpXchgInst>(I)->getNewValOperand())
287 return true;
288 break;
289 case Instruction::AtomicRMW:
290 if (AI == cast<AtomicRMWInst>(I)->getValOperand())
291 return true;
292 break;
293 case Instruction::PtrToInt:
294 if (AI == cast<PtrToIntInst>(I)->getOperand(0))
295 return true;
296 break;
297 case Instruction::Call: {
298 // Ignore intrinsics that do not become real instructions.
299 // TODO: Narrow this to intrinsics that have store-like effects.
300 const auto *CI = cast<CallInst>(I);
301 if (!CI->isDebugOrPseudoInst() && !CI->isLifetimeStartOrEnd())
302 return true;
303 break;
304 }
305 case Instruction::Invoke:
306 return true;
307 case Instruction::GetElementPtr: {
308 // If the GEP offset is out-of-bounds, or is non-constant and so has to be
309 // assumed to be potentially out-of-bounds, then any memory access that
310 // would use it could also be out-of-bounds meaning stack protection is
311 // required.
313 unsigned IndexSize = DL.getIndexTypeSizeInBits(I->getType());
314 APInt Offset(IndexSize, 0);
315 if (!GEP->accumulateConstantOffset(DL, Offset))
316 return true;
317 TypeSize OffsetSize = TypeSize::getFixed(Offset.getLimitedValue());
318 if (!TypeSize::isKnownGT(AllocSize, OffsetSize))
319 return true;
320 // Adjust AllocSize to be the space remaining after this offset.
321 // We can't subtract a fixed size from a scalable one, so in that case
322 // assume the scalable value is of minimum size.
323 TypeSize NewAllocSize =
324 TypeSize::getFixed(AllocSize.getKnownMinValue()) - OffsetSize;
325 if (HasAddressTaken(I, NewAllocSize, M, VisitedPHIs))
326 return true;
327 break;
328 }
329 case Instruction::BitCast:
330 case Instruction::Select:
331 case Instruction::AddrSpaceCast:
332 if (HasAddressTaken(I, AllocSize, M, VisitedPHIs))
333 return true;
334 break;
335 case Instruction::PHI: {
336 // Keep track of what PHI nodes we have already visited to ensure
337 // they are only visited once.
338 const auto *PN = cast<PHINode>(I);
339 auto [It, Inserted] = VisitedPHIs.try_emplace(PN, AllocSize);
340 if (!Inserted) {
341 if (TypeSize::isKnownGE(AllocSize, It->second.AllocSize))
342 break;
343
344 // Check again with smaller size.
345 if (It->second.NumDecreased == PhiInfo::MaxNumDecreased)
346 return true;
347
348 It->second.AllocSize = AllocSize;
349 ++It->second.NumDecreased;
350 }
351 if (HasAddressTaken(PN, AllocSize, M, VisitedPHIs))
352 return true;
353 break;
354 }
355 case Instruction::Load:
356 case Instruction::Ret:
357 // These instructions take an address operand, but have load-like or
358 // other innocuous behavior that should not trigger a stack protector.
359 break;
360 default:
361 // Conservatively return true for any instruction that takes an address
362 // operand, but is not handled above.
363 return true;
364 }
365 }
366 return false;
367}
368
369/// Search for the first call to the llvm.stackprotector intrinsic and return it
370/// if present.
372 for (const BasicBlock &BB : F)
373 for (const Instruction &I : BB)
374 if (const auto *II = dyn_cast<IntrinsicInst>(&I))
375 if (II->getIntrinsicID() == Intrinsic::stackprotector)
376 return II;
377 return nullptr;
378}
379
380/// Check whether or not this function needs a stack protector based
381/// upon the stack protector level.
382///
383/// We use two heuristics: a standard (ssp) and strong (sspstrong).
384/// The standard heuristic which will add a guard variable to functions that
385/// call alloca with a either a variable size or a size >= SSPBufferSize,
386/// functions with character buffers larger than SSPBufferSize, and functions
387/// with aggregates containing character buffers larger than SSPBufferSize. The
388/// strong heuristic will add a guard variables to functions that call alloca
389/// regardless of size, functions with any buffer regardless of type and size,
390/// functions with aggregates that contain any buffer regardless of type and
391/// size, and functions that contain stack-based variables that have had their
392/// address taken.
394 SSPLayoutMap *Layout) {
395 Module *M = F->getParent();
396 bool Strong = false;
397 bool NeedsProtector = false;
398
399 // The set of PHI nodes visited when determining if a variable's reference has
400 // been taken. This set is maintained to ensure we don't visit the same PHI
401 // node multiple times.
402 PhiMap VisitedPHIs;
403
404 unsigned SSPBufferSize = F->getFnAttributeAsParsedInteger(
405 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
406
407 if (F->hasFnAttribute(Attribute::SafeStack))
408 return false;
409
410 // We are constructing the OptimizationRemarkEmitter on the fly rather than
411 // using the analysis pass to avoid building DominatorTree and LoopInfo which
412 // are not available this late in the IR pipeline.
414
415 if (F->hasFnAttribute(Attribute::StackProtectReq)) {
416 if (!Layout)
417 return true;
418 ORE.emit([&]() {
419 return OptimizationRemark(DEBUG_TYPE, "StackProtectorRequested", F)
420 << "Stack protection applied to function "
421 << ore::NV("Function", F)
422 << " due to a function attribute or command-line switch";
423 });
424 NeedsProtector = true;
425 Strong = true; // Use the same heuristic as strong to determine SSPLayout
426 } else if (F->hasFnAttribute(Attribute::StackProtectStrong))
427 Strong = true;
428 else if (!F->hasFnAttribute(Attribute::StackProtect))
429 return false;
430
431 for (const BasicBlock &BB : *F) {
432 for (const Instruction &I : BB) {
433 if (const AllocaInst *AI = dyn_cast<AllocaInst>(&I)) {
434 if (AI->isArrayAllocation()) {
435 auto RemarkBuilder = [&]() {
436 return OptimizationRemark(DEBUG_TYPE, "StackProtectorAllocaOrArray",
437 &I)
438 << "Stack protection applied to function "
439 << ore::NV("Function", F)
440 << " due to a call to alloca or use of a variable length "
441 "array";
442 };
443 if (const auto *CI = dyn_cast<ConstantInt>(AI->getArraySize())) {
444 if (CI->getLimitedValue(SSPBufferSize) >= SSPBufferSize) {
445 // A call to alloca with size >= SSPBufferSize requires
446 // stack protectors.
447 if (!Layout)
448 return true;
449 Layout->insert(
450 std::make_pair(AI, MachineFrameInfo::SSPLK_LargeArray));
451 ORE.emit(RemarkBuilder);
452 NeedsProtector = true;
453 } else if (Strong) {
454 // Require protectors for all alloca calls in strong mode.
455 if (!Layout)
456 return true;
457 Layout->insert(
458 std::make_pair(AI, MachineFrameInfo::SSPLK_SmallArray));
459 ORE.emit(RemarkBuilder);
460 NeedsProtector = true;
461 }
462 } else {
463 // A call to alloca with a variable size requires protectors.
464 if (!Layout)
465 return true;
466 Layout->insert(
467 std::make_pair(AI, MachineFrameInfo::SSPLK_LargeArray));
468 ORE.emit(RemarkBuilder);
469 NeedsProtector = true;
470 }
471 continue;
472 }
473
474 bool IsLarge = false;
475 if (ContainsProtectableArray(AI->getAllocatedType(), M, SSPBufferSize,
476 IsLarge, Strong, false)) {
477 if (!Layout)
478 return true;
479 Layout->insert(std::make_pair(
482 ORE.emit([&]() {
483 return OptimizationRemark(DEBUG_TYPE, "StackProtectorBuffer", &I)
484 << "Stack protection applied to function "
485 << ore::NV("Function", F)
486 << " due to a stack allocated buffer or struct containing a "
487 "buffer";
488 });
489 NeedsProtector = true;
490 continue;
491 }
492
493 if (Strong &&
495 AI, M->getDataLayout().getTypeAllocSize(AI->getAllocatedType()),
496 M, VisitedPHIs)) {
497 ++NumAddrTaken;
498 if (!Layout)
499 return true;
500 Layout->insert(std::make_pair(AI, MachineFrameInfo::SSPLK_AddrOf));
501 ORE.emit([&]() {
502 return OptimizationRemark(DEBUG_TYPE, "StackProtectorAddressTaken",
503 &I)
504 << "Stack protection applied to function "
505 << ore::NV("Function", F)
506 << " due to the address of a local variable being taken";
507 });
508 NeedsProtector = true;
509 }
510 // Clear any PHIs that we visited, to make sure we examine all uses of
511 // any subsequent allocas that we look at.
512 VisitedPHIs.clear();
513 }
514 }
515 }
516
517 return NeedsProtector;
518}
519
520/// Create a stack guard loading and populate whether SelectionDAG SSP is
521/// supported.
523 IRBuilder<> &B,
524 bool *SupportsSelectionDAGSP = nullptr) {
525 Value *Guard = TLI->getIRStackGuard(B);
526 StringRef GuardMode = M->getStackProtectorGuard();
527 if ((GuardMode == "tls" || GuardMode.empty()) && Guard)
528 return B.CreateLoad(B.getPtrTy(), Guard, true, "StackGuard");
529
530 // Use SelectionDAG SSP handling, since there isn't an IR guard.
531 //
532 // This is more or less weird, since we optionally output whether we
533 // should perform a SelectionDAG SP here. The reason is that it's strictly
534 // defined as !TLI->getIRStackGuard(B), where getIRStackGuard is also
535 // mutating. There is no way to get this bit without mutating the IR, so
536 // getting this bit has to happen in this right time.
537 //
538 // We could have define a new function TLI::supportsSelectionDAGSP(), but that
539 // will put more burden on the backends' overriding work, especially when it
540 // actually conveys the same information getIRStackGuard() already gives.
541 if (SupportsSelectionDAGSP)
542 *SupportsSelectionDAGSP = true;
543 TLI->insertSSPDeclarations(*M);
544 return B.CreateIntrinsic(Intrinsic::stackguard, {});
545}
546
547/// Insert code into the entry block that stores the stack guard
548/// variable onto the stack:
549///
550/// entry:
551/// StackGuardSlot = alloca i8*
552/// StackGuard = <stack guard>
553/// call void @llvm.stackprotector(StackGuard, StackGuardSlot)
554///
555/// Returns true if the platform/triple supports the stackprotectorcreate pseudo
556/// node.
557static bool CreatePrologue(Function *F, Module *M, Instruction *CheckLoc,
558 const TargetLoweringBase *TLI, AllocaInst *&AI) {
559 bool SupportsSelectionDAGSP = false;
560 IRBuilder<> B(&F->getEntryBlock().front());
561 PointerType *PtrTy = PointerType::getUnqual(CheckLoc->getContext());
562 AI = B.CreateAlloca(PtrTy, nullptr, "StackGuardSlot");
563
564 Value *GuardSlot = getStackGuard(TLI, M, B, &SupportsSelectionDAGSP);
565 B.CreateIntrinsic(Intrinsic::stackprotector, {GuardSlot, AI});
566 return SupportsSelectionDAGSP;
567}
568
570 DomTreeUpdater *DTU, bool &HasPrologue,
571 bool &HasIRCheck) {
572 auto *M = F->getParent();
573 auto *TLI = TM->getSubtargetImpl(*F)->getTargetLowering();
574
575 // If the target wants to XOR the frame pointer into the guard value, it's
576 // impossible to emit the check in IR, so the target *must* support stack
577 // protection in SDAG.
578 bool SupportsSelectionDAGSP =
579 TLI->useStackGuardXorFP() ||
581 AllocaInst *AI = nullptr; // Place on stack that stores the stack guard.
582 BasicBlock *FailBB = nullptr;
583
585 // This is stack protector auto generated check BB, skip it.
586 if (&BB == FailBB)
587 continue;
588 Instruction *CheckLoc = dyn_cast<ReturnInst>(BB.getTerminator());
589 if (!CheckLoc && !DisableCheckNoReturn)
590 for (auto &Inst : BB) {
591 if (IntrinsicInst *IB = dyn_cast<IntrinsicInst>(&Inst);
592 IB && (IB->getIntrinsicID() == Intrinsic::eh_sjlj_callsite)) {
593 // eh_sjlj_callsite has to be in same BB as the
594 // bb terminator. Don't insert within this range.
595 CheckLoc = IB;
596 break;
597 }
598 if (auto *CB = dyn_cast<CallBase>(&Inst))
599 // Do stack check before noreturn calls that aren't nounwind (e.g:
600 // __cxa_throw).
601 if (CB->doesNotReturn() && !CB->doesNotThrow()) {
602 CheckLoc = CB;
603 break;
604 }
605 }
606
607 if (!CheckLoc)
608 continue;
609
610 // Generate prologue instrumentation if not already generated.
611 if (!HasPrologue) {
612 HasPrologue = true;
613 SupportsSelectionDAGSP &= CreatePrologue(F, M, CheckLoc, TLI, AI);
614 }
615
616 // SelectionDAG based code generation. Nothing else needs to be done here.
617 // The epilogue instrumentation is postponed to SelectionDAG.
618 if (SupportsSelectionDAGSP)
619 break;
620
621 // Find the stack guard slot if the prologue was not created by this pass
622 // itself via a previous call to CreatePrologue().
623 if (!AI) {
624 const CallInst *SPCall = findStackProtectorIntrinsic(*F);
625 assert(SPCall && "Call to llvm.stackprotector is missing");
626 AI = cast<AllocaInst>(SPCall->getArgOperand(1));
627 }
628
629 // Set HasIRCheck to true, so that SelectionDAG will not generate its own
630 // version. SelectionDAG called 'shouldEmitSDCheck' to check whether
631 // instrumentation has already been generated.
632 HasIRCheck = true;
633
634 // If we're instrumenting a block with a tail call, the check has to be
635 // inserted before the call rather than between it and the return.
636 Instruction *Prev = CheckLoc->getPrevNode();
637 if (auto *CI = dyn_cast_if_present<CallInst>(Prev))
638 if (CI->isTailCall() && isInTailCallPosition(*CI, *TM))
639 CheckLoc = Prev;
640
641 // Generate epilogue instrumentation. The epilogue intrumentation can be
642 // function-based or inlined depending on which mechanism the target is
643 // providing.
644 if (Function *GuardCheck = TLI->getSSPStackGuardCheck(*M)) {
645 // Generate the function-based epilogue instrumentation.
646 // The target provides a guard check function, generate a call to it.
647 IRBuilder<> B(CheckLoc);
648 LoadInst *Guard = B.CreateLoad(B.getPtrTy(), AI, true, "Guard");
649 CallInst *Call = B.CreateCall(GuardCheck, {Guard});
650 Call->setAttributes(GuardCheck->getAttributes());
651 Call->setCallingConv(GuardCheck->getCallingConv());
652 } else {
653 // Generate the epilogue with inline instrumentation.
654 // If we do not support SelectionDAG based calls, generate IR level
655 // calls.
656 //
657 // For each block with a return instruction, convert this:
658 //
659 // return:
660 // ...
661 // ret ...
662 //
663 // into this:
664 //
665 // return:
666 // ...
667 // %1 = <stack guard>
668 // %2 = load StackGuardSlot
669 // %3 = icmp ne i1 %1, %2
670 // br i1 %3, label %CallStackCheckFailBlk, label %SP_return
671 //
672 // SP_return:
673 // ret ...
674 //
675 // CallStackCheckFailBlk:
676 // call void @__stack_chk_fail()
677 // unreachable
678
679 // Create the FailBB. We duplicate the BB every time since the MI tail
680 // merge pass will merge together all of the various BB into one including
681 // fail BB generated by the stack protector pseudo instruction.
682 if (!FailBB)
683 FailBB = CreateFailBB(F, *TLI);
684
685 IRBuilder<> B(CheckLoc);
686 Value *Guard = getStackGuard(TLI, M, B);
687 LoadInst *LI2 = B.CreateLoad(B.getPtrTy(), AI, true);
688 auto *Cmp = cast<ICmpInst>(B.CreateICmpNE(Guard, LI2));
689 auto SuccessProb =
691 auto FailureProb =
693 MDNode *Weights = MDBuilder(F->getContext())
694 .createBranchWeights(FailureProb.getNumerator(),
695 SuccessProb.getNumerator());
696
697 SplitBlockAndInsertIfThen(Cmp, CheckLoc,
698 /*Unreachable=*/false, Weights, DTU,
699 /*LI=*/nullptr, /*ThenBlock=*/FailBB);
700
701 auto *BI = cast<BranchInst>(Cmp->getParent()->getTerminator());
702 BasicBlock *NewBB = BI->getSuccessor(1);
703 NewBB->setName("SP_return");
704 NewBB->moveAfter(&BB);
705
706 Cmp->setPredicate(Cmp->getInversePredicate());
707 BI->swapSuccessors();
708 }
709 }
710
711 // Return if we didn't modify any basic blocks. i.e., there are no return
712 // statements in the function.
713 return HasPrologue;
714}
715
717 auto *M = F->getParent();
718 LLVMContext &Context = F->getContext();
719 BasicBlock *FailBB = BasicBlock::Create(Context, "CallStackCheckFailBlk", F);
720 IRBuilder<> B(FailBB);
721 if (F->getSubprogram())
722 B.SetCurrentDebugLocation(
723 DILocation::get(Context, 0, 0, F->getSubprogram()));
724 FunctionCallee StackChkFail;
726
727 if (const char *ChkFailName =
728 TLI.getLibcallName(RTLIB::STACKPROTECTOR_CHECK_FAIL)) {
729 StackChkFail =
730 M->getOrInsertFunction(ChkFailName, Type::getVoidTy(Context));
731 } else if (const char *SSHName =
732 TLI.getLibcallName(RTLIB::STACK_SMASH_HANDLER)) {
733 StackChkFail = M->getOrInsertFunction(SSHName, Type::getVoidTy(Context),
734 PointerType::getUnqual(Context));
735 Args.push_back(B.CreateGlobalString(F->getName(), "SSH"));
736 } else {
737 Context.emitError("no libcall available for stack protector");
738 }
739
740 if (StackChkFail) {
741 CallInst *Call = B.CreateCall(StackChkFail, Args);
742 Call->addFnAttr(Attribute::NoReturn);
743 }
744
745 B.CreateUnreachable();
746 return FailBB;
747}
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file contains the simple types necessary to represent the attributes associated with functions a...
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
This file contains the declarations for the subclasses of Constant, which represent the different fla...
#define DEBUG_TYPE
Hexagon Common GEP
Module.h This file contains the declarations for the Module class.
#define F(x, y, z)
Definition MD5.cpp:54
#define I(x, y, z)
Definition MD5.cpp:57
This file provides utility analysis objects describing memory locations.
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
#define INITIALIZE_PASS_DEPENDENCY(depName)
Definition PassSupport.h:42
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
Definition PassSupport.h:44
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
Definition PassSupport.h:39
This file defines the SmallVector class.
static Value * getStackGuard(const TargetLoweringBase *TLI, Module *M, IRBuilder<> &B, bool *SupportsSelectionDAGSP=nullptr)
Create a stack guard loading and populate whether SelectionDAG SSP is supported.
static BasicBlock * CreateFailBB(Function *F, const TargetLowering &TLI)
CreateFailBB - Create a basic block to jump to when the stack protector check fails.
static bool InsertStackProtectors(const TargetMachine *TM, Function *F, DomTreeUpdater *DTU, bool &HasPrologue, bool &HasIRCheck)
InsertStackProtectors - Insert code into the prologue and epilogue of the function.
static bool HasAddressTaken(const Instruction *AI, TypeSize AllocSize, Module *M, PhiMap &VisitedPHIs)
Check whether a stack allocation has its address taken.
static cl::opt< bool > DisableCheckNoReturn("disable-check-noreturn-call", cl::init(false), cl::Hidden)
static bool CreatePrologue(Function *F, Module *M, Instruction *CheckLoc, const TargetLoweringBase *TLI, AllocaInst *&AI)
Insert code into the entry block that stores the stack guard variable onto the stack:
SmallDenseMap< const PHINode *, PhiInfo, 16 > PhiMap
static bool ContainsProtectableArray(Type *Ty, Module *M, unsigned SSPBufferSize, bool &IsLarge, bool Strong, bool InStruct)
static cl::opt< bool > EnableSelectionDAGSP("enable-selectiondag-sp", cl::init(true), cl::Hidden)
static const CallInst * findStackProtectorIntrinsic(Function &F)
Search for the first call to the llvm.stackprotector intrinsic and return it if present.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Definition Statistic.h:171
This file describes how to lower LLVM code to machine code.
Target-Independent Code Generator Pass Configuration Options pass.
Class for arbitrary precision integers.
Definition APInt.h:78
an instruction to allocate memory on the stack
Represent the analysis usage information of a pass.
AnalysisUsage & addRequired()
AnalysisUsage & addPreserved()
Add the specified Pass class to the set of analyses preserved by this pass.
LLVM Basic Block Representation.
Definition BasicBlock.h:62
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
Definition BasicBlock.h:206
LLVM_ABI void moveAfter(BasicBlock *MovePos)
Unlink this basic block from its current function and insert it right after MovePos in the function M...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
Definition BasicBlock.h:233
static BranchProbability getBranchProbStackProtector(bool IsLikely)
Value * getArgOperand(unsigned i) const
This class represents a function call, abstracting a target machine's calling convention.
A parsed version of the target data layout string in and methods for querying it.
Definition DataLayout.h:63
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
Definition DenseMap.h:256
DenseMapIterator< KeyT, ValueT, KeyInfoT, BucketT, true > const_iterator
Definition DenseMap.h:75
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition DenseMap.h:241
Analysis pass which computes a DominatorTree.
Definition Dominators.h:283
Legacy analysis pass which computes a DominatorTree.
Definition Dominators.h:321
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
FunctionPass class - This class is used to implement most global optimizations.
Definition Pass.h:314
FunctionPass(char &pid)
Definition Pass.h:316
uint64_t getFnAttributeAsParsedInteger(StringRef Kind, uint64_t Default=0) const
For a string attribute Kind, parse attribute as an integer.
Definition Function.cpp:777
bool hasPersonalityFn() const
Check whether this function has a personality function.
Definition Function.h:903
Constant * getPersonalityFn() const
Get the personality function associated with this function.
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
Module * getParent()
Get the module that this global value is contained inside of...
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Definition IRBuilder.h:2788
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
Definition LLVMContext.h:68
An instruction for reading from memory.
LLVM_ABI MDNode * createBranchWeights(uint32_t TrueWeight, uint32_t FalseWeight, bool IsExpected=false)
Return metadata containing two branch weights.
Definition MDBuilder.cpp:38
Metadata node.
Definition Metadata.h:1078
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
Definition Metadata.h:1569
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted.
const AllocaInst * getObjectAllocation(int ObjectIdx) const
Return the underlying Alloca of the specified stack object if it exists.
@ SSPLK_SmallArray
Array or nested array < SSP-buffer-size.
@ SSPLK_LargeArray
Array or nested array >= SSP-buffer-size.
@ SSPLK_AddrOf
The address of this allocation is exposed and triggered protection.
void setObjectSSPLayout(int ObjectIdx, SSPLayoutKind Kind)
int getObjectIndexEnd() const
Return one past the maximum frame object index.
bool isDeadObjectIndex(int ObjectIdx) const
Returns true if the specified index corresponds to a dead object.
static LLVM_ABI std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
A Module instance is used to store all the information related to an LLVM module.
Definition Module.h:67
The optimization diagnostic interface.
LLVM_ABI void emit(DiagnosticInfoOptimizationBase &OptDiag)
Output the remark via the diagnostic handler and to the optimization record file.
Diagnostic information for applied optimization remarks.
static LLVM_ABI PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
AnalysisType & getAnalysis() const
getAnalysis<AnalysisType>() - This function is used by subclasses to get to the analysis information ...
AnalysisType * getAnalysisIfAvailable() const
getAnalysisIfAvailable<AnalysisType>() - Subclasses use this function to get analysis information tha...
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the default address space (address sp...
A set of analyses that are preserved following a run of a transformation pass.
Definition Analysis.h:112
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition Analysis.h:118
PreservedAnalyses & preserve()
Mark an analysis as preserved.
Definition Analysis.h:132
static bool requiresStackProtector(Function *F, SSPLayoutMap *Layout=nullptr)
Check whether or not F needs a stack protector based upon the stack protector level.
Result run(Function &F, FunctionAnalysisManager &FAM)
void copyToMachineFrameInfo(MachineFrameInfo &MFI) const
bool shouldEmitSDCheck(const BasicBlock &BB) const
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
PreservedAnalyses run(Function &F, FunctionAnalysisManager &FAM)
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
static bool requiresStackProtector(Function *F, SSPLayoutMap *Layout=nullptr)
Check whether or not F needs a stack protector based upon the stack protector level.
bool runOnFunction(Function &Fn) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
StringRef - Represent a constant reference to a string, i.e.
Definition StringRef.h:55
constexpr bool empty() const
empty - Check if the string is empty.
Definition StringRef.h:143
Class to represent struct types.
This base class for TargetLowering contains the SelectionDAG-independent parts that can be used from ...
virtual bool useStackGuardXorFP() const
If this function returns true, stack protection checks should XOR the frame pointer (or whichever poi...
virtual Value * getIRStackGuard(IRBuilderBase &IRB) const
If the target has a standard location for the stack protector guard, returns the address of that loca...
const char * getLibcallName(RTLIB::Libcall Call) const
Get the libcall routine name for the specified libcall.
virtual void insertSSPDeclarations(Module &M) const
Inserts necessary declarations for SSP (stack protection) purpose.
This class defines information used to lower LLVM code to legal SelectionDAG operators that the targe...
Primary interface to the complete machine description for the target machine.
virtual const TargetSubtargetInfo * getSubtargetImpl(const Function &) const
Virtual method implemented by subclasses that returns a reference to that target's TargetSubtargetInf...
TargetOptions Options
unsigned EnableFastISel
EnableFastISel - This flag enables fast-path instruction selection which trades away generated code q...
Target-Independent Code Generator Pass Configuration Options.
virtual const TargetLowering * getTargetLowering() const
static constexpr TypeSize getFixed(ScalarTy ExactSize)
Definition TypeSize.h:343
The instances of the Type class are immutable: once they are created, they are never changed.
Definition Type.h:45
static LLVM_ABI Type * getVoidTy(LLVMContext &C)
Definition Type.cpp:280
LLVM Value Representation.
Definition Value.h:75
LLVM_ABI void setName(const Twine &Name)
Change the name of the value.
Definition Value.cpp:390
iterator_range< user_iterator > users()
Definition Value.h:426
constexpr ScalarTy getKnownMinValue() const
Returns the minimum value this quantity can represent.
Definition TypeSize.h:165
static constexpr bool isKnownGT(const FixedOrScalableQuantity &LHS, const FixedOrScalableQuantity &RHS)
Definition TypeSize.h:223
static constexpr bool isKnownGE(const FixedOrScalableQuantity &LHS, const FixedOrScalableQuantity &RHS)
Definition TypeSize.h:237
CallInst * Call
Changed
initializer< Ty > init(const Ty &Val)
DiagnosticInfoOptimizationBase::Argument NV
This is an optimization pass for GlobalISel generic memory operations.
@ Offset
Definition DWP.cpp:532
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
Definition Casting.h:643
auto dyn_cast_if_present(const Y &Val)
dyn_cast_if_present<X> - Functionally identical to dyn_cast, except that a null (or none in the case ...
Definition Casting.h:732
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
Definition STLExtras.h:632
LLVM_ABI FunctionPass * createStackProtectorPass()
createStackProtectorPass - This pass adds stack protectors to functions.
LLVM_ABI void initializeStackProtectorPass(PassRegistry &)
LLVM_ABI EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
Definition Casting.h:547
bool isFuncletEHPersonality(EHPersonality Pers)
Returns true if this is a personality function that invokes handler funclets (which must return to it...
bool isInTailCallPosition(const CallBase &Call, const TargetMachine &TM, bool ReturnsFirstArg=false)
Test if the given instruction is in a position to be optimized with a tail-call.
Definition Analysis.cpp:543
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
Definition Casting.h:559
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
AnalysisManager< Function > FunctionAnalysisManager
Convenience typedef for the Function analysis manager.
PhiInfo(TypeSize AllocSize)
unsigned NumDecreased
static constexpr unsigned MaxNumDecreased
TypeSize AllocSize
A special type used by analysis passes to provide an address that identifies that particular analysis...
Definition Analysis.h:29