22 #include "llvm/Support/SaveAndRestore.h"
24 using namespace clang;
25 using namespace CodeGen;
42 return saved_type(V, ScalarLiteral);
48 return saved_type(addr.
getPointer(), ScalarAddress);
54 llvm::StructType::get(V.first->getType(), V.second->getType(),
63 return saved_type(addr.
getPointer(), ComplexAddress);
69 return saved_type(V.getPointer(), AggregateLiteral,
70 V.getAlignment().getQuantity());
75 return saved_type(addr.getPointer(), AggregateAddress,
76 V.getAlignment().getQuantity());
84 auto alignment = cast<llvm::AllocaInst>(value)->getAlignment();
92 case AggregateLiteral:
94 case AggregateAddress: {
98 case ComplexAddress: {
110 llvm_unreachable(
"bad saved r-value kind");
114 char *EHScopeStack::allocate(
size_t Size) {
116 if (!StartOfBuffer) {
117 unsigned Capacity = 1024;
118 while (Capacity < Size) Capacity *= 2;
119 StartOfBuffer =
new char[Capacity];
120 StartOfData = EndOfBuffer = StartOfBuffer + Capacity;
121 }
else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) {
122 unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer;
123 unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);
125 unsigned NewCapacity = CurrentCapacity;
128 }
while (NewCapacity < UsedCapacity + Size);
130 char *NewStartOfBuffer =
new char[NewCapacity];
131 char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity;
132 char *NewStartOfData = NewEndOfBuffer - UsedCapacity;
133 memcpy(NewStartOfData, StartOfData, UsedCapacity);
134 delete [] StartOfBuffer;
135 StartOfBuffer = NewStartOfBuffer;
136 EndOfBuffer = NewEndOfBuffer;
137 StartOfData = NewStartOfData;
140 assert(StartOfBuffer + Size <= StartOfData);
145 void EHScopeStack::deallocate(
size_t Size) {
153 if (!cleanup || !cleanup->isLifetimeMarker())
163 if (
auto *
cleanup = dyn_cast<EHCleanupScope>(&*
find(si)))
164 if (
cleanup->isLifetimeMarker()) {
165 si =
cleanup->getEnclosingEHScope();
179 if (cleanup.isActive())
return si;
180 si = cleanup.getEnclosingNormalCleanup();
198 InnermostNormalCleanup,
204 if (IsLifetimeMarker)
205 Scope->setLifetimeMarker();
207 return Scope->getCleanupBuffer();
211 assert(!
empty() &&
"popping exception stack when not empty");
213 assert(isa<EHCleanupScope>(*
begin()));
215 InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
216 InnermostEHScope = Cleanup.getEnclosingEHScope();
217 deallocate(Cleanup.getAllocatedSize());
223 if (!BranchFixups.empty()) {
227 BranchFixups.clear();
244 assert(!
empty() &&
"popping exception stack when not empty");
255 new (buffer)
EHCatchScope(numHandlers, InnermostEHScope);
277 unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
278 assert(BranchFixups.size() >= MinSize &&
"fixup stack out of order");
280 while (BranchFixups.size() > MinSize &&
281 BranchFixups.back().Destination ==
nullptr)
282 BranchFixups.pop_back();
299 assert(!cleanup.hasActiveFlag() &&
"cleanup already has active flag?");
300 cleanup.setActiveFlag(active);
302 if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup();
303 if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup();
306 void EHScopeStack::Cleanup::anchor() {}
309 llvm::Instruction *beforeInst) {
310 auto store =
new llvm::StoreInst(value, addr.
getPointer(), beforeInst);
315 llvm::Instruction *beforeInst) {
316 auto load =
new llvm::LoadInst(addr.
getPointer(), name, beforeInst);
325 llvm::SwitchInst *Switch,
326 llvm::BasicBlock *CleanupEntry) {
327 llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded;
363 llvm::BasicBlock *Block) {
366 llvm::TerminatorInst *Term = Block->getTerminator();
367 assert(Term &&
"can't transition block without terminator");
369 if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
370 assert(Br->isUnconditional());
372 "cleanup.dest", Term);
373 llvm::SwitchInst *Switch =
375 Br->eraseFromParent();
378 return cast<llvm::SwitchInst>(Term);
383 assert(Block &&
"resolving a null target block");
384 if (!EHStack.getNumBranchFixups())
return;
386 assert(EHStack.hasNormalCleanups() &&
387 "branch fixups exist with no normal cleanups on stack");
389 llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks;
390 bool ResolvedAny =
false;
392 for (
unsigned I = 0,
E = EHStack.getNumBranchFixups();
I !=
E; ++
I) {
407 if (!ModifiedOptimisticBlocks.insert(BranchBB).second)
417 EHStack.popNullFixups();
424 while (EHStack.stable_begin() != Old) {
430 bool FallThroughIsBranchThrough =
433 PopCleanupBlock(FallThroughIsBranchThrough);
441 size_t OldLifetimeExtendedSize) {
442 PopCleanupBlocks(Old);
445 for (
size_t I = OldLifetimeExtendedSize,
446 E = LifetimeExtendedCleanupStack.size();
I !=
E; ) {
448 assert((
I % llvm::alignOf<LifetimeExtendedCleanupHeader>() == 0) &&
449 "misaligned cleanup stack entry");
453 LifetimeExtendedCleanupStack[
I]);
456 EHStack.pushCopyOfCleanup(Header.
getKind(),
457 &LifetimeExtendedCleanupStack[
I],
461 LifetimeExtendedCleanupStack.resize(OldLifetimeExtendedSize);
466 assert(Scope.isNormalCleanup());
467 llvm::BasicBlock *Entry = Scope.getNormalBlock();
470 Scope.setNormalBlock(Entry);
481 llvm::BasicBlock *Entry) {
482 llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
483 if (!Pred)
return Entry;
485 llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
486 if (!Br || Br->isConditional())
return Entry;
487 assert(Br->getSuccessor(0) == Entry);
492 bool WasInsertBlock = CGF.
Builder.GetInsertBlock() == Entry;
493 assert(!WasInsertBlock || CGF.
Builder.GetInsertPoint() == Entry->end());
496 Br->eraseFromParent();
500 Entry->replaceAllUsesWith(Pred);
503 Pred->getInstList().splice(Pred->end(), Entry->getInstList());
506 Entry->eraseFromParent();
509 CGF.
Builder.SetInsertPoint(Pred);
520 llvm::BasicBlock *ContBB =
nullptr;
526 CGF.
Builder.CreateCondBr(IsActive, CleanupBB, ContBB);
531 Fn->
Emit(CGF, flags);
532 assert(CGF.
HaveInsertPoint() &&
"cleanup ended with no insertion point?");
540 llvm::BasicBlock *From,
541 llvm::BasicBlock *To) {
544 llvm::TerminatorInst *Term = Exit->getTerminator();
546 if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
547 assert(Br->isUnconditional() && Br->getSuccessor(0) == From);
548 Br->setSuccessor(0, To);
550 llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term);
551 for (
unsigned I = 0,
E = Switch->getNumSuccessors();
I !=
E; ++
I)
552 if (Switch->getSuccessor(
I) == From)
553 Switch->setSuccessor(
I, To);
565 llvm::BasicBlock *entry = scope.getNormalBlock();
570 for (llvm::BasicBlock::use_iterator
571 i = entry->use_begin(), e = entry->use_end(); i != e; ) {
575 use.set(unreachableBB);
578 llvm::SwitchInst *si = cast<llvm::SwitchInst>(use.getUser());
579 if (si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) {
584 llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition());
587 si->eraseFromParent();
591 assert(condition->use_empty());
592 condition->eraseFromParent();
596 assert(entry->use_empty());
604 assert(!EHStack.empty() &&
"cleanup stack is empty!");
605 assert(isa<EHCleanupScope>(*EHStack.begin()) &&
"top not a cleanup!");
607 assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
610 bool IsActive = Scope.isActive();
612 Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag()
615 Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag()
620 llvm::BasicBlock *EHEntry = Scope.getCachedEHDispatchBlock();
621 assert(Scope.hasEHBranches() == (EHEntry !=
nullptr));
622 bool RequiresEHCleanup = (EHEntry !=
nullptr);
629 bool HasFixups = EHStack.getNumBranchFixups() !=
FixupDepth;
632 bool HasExistingBranches = Scope.hasBranches();
635 llvm::BasicBlock *FallthroughSource =
Builder.GetInsertBlock();
636 bool HasFallthrough = (FallthroughSource !=
nullptr && IsActive);
642 bool HasPrebranchedFallthrough =
643 (FallthroughSource && FallthroughSource->getTerminator());
648 assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough ||
649 (Scope.getNormalBlock() &&
650 FallthroughSource->getTerminator()->getSuccessor(0)
651 == Scope.getNormalBlock()));
653 bool RequiresNormalCleanup =
false;
654 if (Scope.isNormalCleanup() &&
655 (HasFixups || HasExistingBranches || HasFallthrough)) {
656 RequiresNormalCleanup =
true;
661 if (Scope.isNormalCleanup() && HasPrebranchedFallthrough && !IsActive) {
662 llvm::BasicBlock *prebranchDest;
667 if (FallthroughIsBranchThrough) {
668 EHScope &enclosing = *EHStack.find(Scope.getEnclosingNormalCleanup());
676 prebranchDest = createBasicBlock(
"forwarded-prebranch");
677 EmitBlock(prebranchDest);
680 llvm::BasicBlock *normalEntry = Scope.getNormalBlock();
681 assert(normalEntry && !normalEntry->use_empty());
684 normalEntry, prebranchDest);
688 if (!RequiresNormalCleanup && !RequiresEHCleanup) {
690 EHStack.popCleanup();
691 assert(EHStack.getNumBranchFixups() == 0 ||
692 EHStack.hasNormalCleanups());
701 auto *CleanupSource =
reinterpret_cast<char *
>(Scope.getCleanupBuffer());
702 llvm::AlignedCharArray<EHScopeStack::ScopeStackAlignment, 8 * sizeof(void *)> CleanupBufferStack;
703 std::unique_ptr<char[]> CleanupBufferHeap;
704 size_t CleanupSize = Scope.getCleanupSize();
707 if (CleanupSize <=
sizeof(CleanupBufferStack)) {
708 memcpy(CleanupBufferStack.buffer, CleanupSource, CleanupSize);
711 CleanupBufferHeap.reset(
new char[CleanupSize]);
712 memcpy(CleanupBufferHeap.get(), CleanupSource, CleanupSize);
717 if (Scope.isNormalCleanup())
719 if (Scope.isEHCleanup())
722 if (!RequiresNormalCleanup) {
724 EHStack.popCleanup();
728 if (HasFallthrough && !HasPrebranchedFallthrough &&
729 !HasFixups && !HasExistingBranches) {
732 EHStack.popCleanup();
734 EmitCleanup(*
this, Fn, cleanupFlags, NormalActiveFlag);
744 CGBuilderTy::InsertPoint savedInactiveFallthroughIP;
748 if (HasFallthrough) {
749 if (!HasPrebranchedFallthrough)
750 Builder.CreateStore(
Builder.getInt32(0), getNormalCleanupDestSlot());
754 }
else if (FallthroughSource) {
755 assert(!IsActive &&
"source without fallthrough for active cleanup");
756 savedInactiveFallthroughIP =
Builder.saveAndClearIP();
762 EmitBlock(NormalEntry);
767 bool HasEnclosingCleanups =
768 (Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
775 llvm::BasicBlock *BranchThroughDest =
nullptr;
776 if (Scope.hasBranchThroughs() ||
777 (FallthroughSource && FallthroughIsBranchThrough) ||
778 (HasFixups && HasEnclosingCleanups)) {
779 assert(HasEnclosingCleanups);
780 EHScope &
S = *EHStack.find(Scope.getEnclosingNormalCleanup());
784 llvm::BasicBlock *FallthroughDest =
nullptr;
789 if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
790 Scope.getNumBranchAfters() == 1) {
791 assert(!BranchThroughDest || !IsActive);
794 llvm::Instruction *NormalCleanupDestSlot =
795 cast<llvm::Instruction>(getNormalCleanupDestSlot().getPointer());
796 if (NormalCleanupDestSlot->hasOneUse()) {
797 NormalCleanupDestSlot->user_back()->eraseFromParent();
798 NormalCleanupDestSlot->eraseFromParent();
799 NormalCleanupDest =
nullptr;
802 llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0);
810 }
else if (Scope.getNumBranchAfters() ||
811 (HasFallthrough && !FallthroughIsBranchThrough) ||
812 (HasFixups && !HasEnclosingCleanups)) {
814 llvm::BasicBlock *Default =
815 (BranchThroughDest ? BranchThroughDest : getUnreachableBlock());
818 const unsigned SwitchCapacity = 10;
820 llvm::LoadInst *Load =
823 llvm::SwitchInst *Switch =
826 InstsToAppend.push_back(Load);
827 InstsToAppend.push_back(Switch);
830 if (FallthroughSource && !FallthroughIsBranchThrough) {
831 FallthroughDest = createBasicBlock(
"cleanup.cont");
833 Switch->addCase(
Builder.getInt32(0), FallthroughDest);
836 for (
unsigned I = 0,
E = Scope.getNumBranchAfters();
I !=
E; ++
I) {
837 Switch->addCase(Scope.getBranchAfterIndex(
I),
838 Scope.getBranchAfterBlock(
I));
843 if (HasFixups && !HasEnclosingCleanups)
847 assert(BranchThroughDest);
852 EHStack.popCleanup();
853 assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
855 EmitCleanup(*
this, Fn, cleanupFlags, NormalActiveFlag);
858 llvm::BasicBlock *NormalExit =
Builder.GetInsertBlock();
859 for (
unsigned I = 0,
E = InstsToAppend.size();
I !=
E; ++
I)
860 NormalExit->getInstList().push_back(InstsToAppend[
I]);
863 for (
unsigned I = FixupDepth,
E = EHStack.getNumBranchFixups();
869 getNormalCleanupDestSlot(),
880 if (!HasFallthrough && FallthroughSource) {
885 Builder.restoreIP(savedInactiveFallthroughIP);
890 }
else if (HasFallthrough && FallthroughDest) {
891 assert(!FallthroughIsBranchThrough);
892 EmitBlock(FallthroughDest);
896 }
else if (HasFallthrough) {
908 llvm::BasicBlock *NewNormalEntry =
913 if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit)
914 for (
unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
916 EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry;
920 assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);
923 if (RequiresEHCleanup) {
924 CGBuilderTy::InsertPoint SavedIP =
Builder.saveAndClearIP();
928 llvm::BasicBlock *NextAction = getEHDispatchBlock(EHParent);
933 bool PushedTerminate =
false;
936 llvm::CleanupPadInst *CPI =
nullptr;
938 EHStack.pushTerminate();
939 PushedTerminate =
true;
943 ParentPad = llvm::ConstantTokenNone::get(CGM.getLLVMContext());
944 CurrentFuncletPad = CPI =
Builder.CreateCleanupPad(ParentPad);
949 if (EHActiveFlag.
isValid() || IsActive) {
951 EmitCleanup(*
this, Fn, cleanupFlags, EHActiveFlag);
955 Builder.CreateCleanupRet(CPI, NextAction);
961 EHStack.popTerminate();
974 &&
"stale jump destination");
978 EHStack.getInnermostActiveNormalCleanup();
983 if (TopCleanup == EHStack.stable_end() ||
999 &&
"stale jump destination");
1001 if (!HaveInsertPoint())
1009 TopCleanup = EHStack.getInnermostActiveNormalCleanup();
1014 if (TopCleanup == EHStack.stable_end() ||
1016 Builder.ClearInsertionPoint();
1029 Builder.ClearInsertionPoint();
1042 cast<EHCleanupScope>(*EHStack.find(TopCleanup));
1052 assert(Scope.isNormalCleanup());
1053 I = Scope.getEnclosingNormalCleanup();
1058 Scope.addBranchAfter(Index, Dest.
getBlock());
1065 if (!Scope.addBranchThrough(Dest.
getBlock()))
1070 Builder.ClearInsertionPoint();
1085 if (S.getNormalBlock())
return true;
1086 I = S.getEnclosingNormalCleanup();
1104 if (scope.hasEHBranches())
1107 i = scope.getEnclosingEHScope();
1126 llvm::Instruction *dominatingIP) {
1132 bool isActivatedInConditional =
1135 bool needFlag =
false;
1140 if (Scope.isNormalCleanup() &&
1142 Scope.setTestFlagInNormalCleanup();
1147 if (Scope.isEHCleanup() &&
1149 Scope.setTestFlagInEHCleanup();
1154 if (!needFlag)
return;
1156 Address var = Scope.getActiveFlag();
1159 "cleanup.isactive");
1160 Scope.setActiveFlag(var);
1162 assert(dominatingIP &&
"no existing variable and no dominating IP!");
1182 llvm::Instruction *dominatingIP) {
1183 assert(C != EHStack.stable_end() &&
"activating bottom of stack?");
1185 assert(!Scope.isActive() &&
"double activation");
1189 Scope.setActive(
true);
1194 llvm::Instruction *dominatingIP) {
1195 assert(C != EHStack.stable_end() &&
"deactivating bottom of stack?");
1197 assert(Scope.isActive() &&
"double deactivation");
1200 if (C == EHStack.stable_begin()) {
1203 CGBuilderTy::InsertPoint SavedIP =
Builder.saveAndClearIP();
1212 Scope.setActive(
false);
1216 if (!NormalCleanupDest)
1218 CreateTempAlloca(
Builder.getInt32Ty(),
"cleanup.dest.slot");
void pushTerminate()
Push a terminate handler on the stack.
static llvm::BasicBlock * CreateNormalEntry(CodeGenFunction &CGF, EHCleanupScope &Scope)
void ActivateCleanupBlock(EHScopeStack::stable_iterator Cleanup, llvm::Instruction *DominatingIP)
ActivateCleanupBlock - Activates an initially-inactive cleanup.
A (possibly-)qualified type.
void setIsEHCleanupKind()
static llvm::LoadInst * createLoadInstBefore(Address addr, const Twine &name, llvm::Instruction *beforeInst)
static void destroyOptimisticNormalEntry(CodeGenFunction &CGF, EHCleanupScope &scope)
We don't need a normal entry block for the given cleanup.
bool isInConditionalBranch() const
isInConditionalBranch - Return true if we're currently emitting one branch or the other of a conditio...
std::pair< llvm::Value *, llvm::Value * > getComplexVal() const
getComplexVal - Return the real/imag components of this complex value.
static stable_iterator stable_end()
Create a stable reference to the bottom of the EH stack.
llvm::AllocaInst * CreateTempAlloca(llvm::Type *Ty, const Twine &Name="tmp")
CreateTempAlloca - This creates a alloca and inserts it into the entry block.
QuantityType getQuantity() const
getQuantity - Get the raw integer representation of this quantity.
static llvm::SwitchInst * TransitionToCleanupSwitch(CodeGenFunction &CGF, llvm::BasicBlock *Block)
Transitions the terminator of the given exit-block of a cleanup to be a cleanup switch.
const llvm::DataLayout & getDataLayout() const
std::unique_ptr< llvm::MemoryBuffer > Buffer
static bool needsSaving(llvm::Value *value)
Answer whether the given value needs extra work to be saved.
static void EmitCleanup(CodeGenFunction &CGF, EHScopeStack::Cleanup *Fn, EHScopeStack::Cleanup::Flags flags, Address ActiveFlag)
A protected scope for zero-cost EH handling.
A scope which attempts to handle some, possibly all, types of exceptions.
A jump destination is an abstract label, branching to which may require a jump out through normal cle...
An exceptions scope which calls std::terminate if any exception reaches it.
stable_iterator stabilize(iterator it) const
Translates an iterator into a stable_iterator.
bool requiresLandingPad() const
CodeGenFunction - This class organizes the per-function state that is used while generating LLVM code...
Denotes a cleanup that should run when a scope is exited using exceptional control flow (a throw stat...
bool hasEHBranches() const
static const EHPersonality & get(CodeGenModule &CGM, const FunctionDecl *FD)
A metaprogramming class for ensuring that a value will dominate an arbitrary position in a function...
class EHCatchScope * pushCatch(unsigned NumHandlers)
Push a set of catch handlers on the stack.
iterator begin() const
Returns an iterator pointing to the innermost EH scope.
unsigned FixupDepth
The number of fixups required by enclosing scopes (not including this one).
BranchFixup & getBranchFixup(unsigned I)
CharUnits - This is an opaque type for sizes expressed in character units.
bool isObviouslyBranchWithoutCleanups(JumpDest Dest) const
isObviouslyBranchWithoutCleanups - Return true if a branch to the specified destination obviously has...
Scope - A scope is a transient data structure that is used while parsing the program.
void initFullExprCleanup()
Set up the last cleaup that was pushed as a conditional full-expression cleanup.
A stack of scopes which respond to exceptions, including cleanups and catch blocks.
llvm::BasicBlock * createBasicBlock(const Twine &name="", llvm::Function *parent=nullptr, llvm::BasicBlock *before=nullptr)
createBasicBlock - Create an LLVM basic block.
Denotes a cleanup that should run when a scope is exited using normal control flow (falling off the e...
void popFilter()
Pops an exceptions filter off the stack.
detail::InMemoryDirectory::const_iterator I
static bool IsUsedAsEHCleanup(EHScopeStack &EHStack, EHScopeStack::stable_iterator cleanup)
static size_t getSizeForNumHandlers(unsigned N)
iterator find(stable_iterator save) const
Turn a stable reference to a scope depth into a unstable pointer to the EH stack. ...
static CharUnits One()
One - Construct a CharUnits quantity of one.
std::pair< llvm::Value *, llvm::Value * > ComplexPairTy
unsigned getNumBranchFixups() const
static void createStoreInstBefore(llvm::Value *value, Address addr, llvm::Instruction *beforeInst)
RValue - This trivial value class is used to represent the result of an expression that is evaluated...
static void ResolveAllBranchFixups(CodeGenFunction &CGF, llvm::SwitchInst *Switch, llvm::BasicBlock *CleanupEntry)
All the branch fixups on the EH stack have propagated out past the outermost normal cleanup; resolve ...
llvm::BranchInst * InitialBranch
The initial branch of the fixup.
Address CreateDefaultAlignTempAlloca(llvm::Type *Ty, const Twine &Name="tmp")
CreateDefaultAlignedTempAlloca - This creates an alloca with the default ABI alignment of the given L...
llvm::Value * getPointer() const
bool empty() const
Determines whether the exception-scopes stack is empty.
static size_t getSizeForCleanupSize(size_t Size)
Gets the size required for a lazy cleanup scope with the given cleanup-data requirements.
void clearFixups()
Clears the branch-fixups list.
llvm::AllocaInst * NormalCleanupDest
i32s containing the indexes of the cleanup destinations.
llvm::BasicBlock * getBlock() const
EHScopeStack::stable_iterator getScopeDepth() const
static CharUnits fromQuantity(QuantityType Quantity)
fromQuantity - Construct a CharUnits quantity from a raw integer type.
EHScopeStack::stable_iterator getEnclosingEHScope() const
stable_iterator stable_begin() const
Create a stable reference to the top of the EH stack.
llvm::AllocaInst * ActiveFlag
An optional i1 variable indicating whether this cleanup has been activated yet.
bool containsOnlyLifetimeMarkers(stable_iterator Old) const
void ResolveBranchFixups(llvm::BasicBlock *Target)
unsigned getDestIndex() const
bool usesFuncletPads() const
Does this personality use landingpads or the family of pad instructions designed to form funclets...
EHCleanupScope(bool isNormal, bool isEH, bool isActive, unsigned cleanupSize, unsigned fixupDepth, EHScopeStack::stable_iterator enclosingNormal, EHScopeStack::stable_iterator enclosingEH)
llvm::BasicBlock * OptimisticBranchBlock
The block containing the terminator which needs to be modified into a switch if this fixup is resolve...
void popCleanup()
Pops a cleanup scope off the stack. This is private to CGCleanup.cpp.
The l-value was considered opaque, so the alignment was determined from a type.
bool HaveInsertPoint() const
HaveInsertPoint - True if an insertion point is defined.
llvm::BasicBlock * Destination
The ultimate destination of the branch.
CharUnits getPointerAlign() const
A saved depth on the scope stack.
Represents a C++ temporary.
llvm::BasicBlock * getUnreachableBlock()
void setBeforeOutermostConditional(llvm::Value *value, Address addr)
static OMPLinearClause * Create(const ASTContext &C, SourceLocation StartLoc, SourceLocation LParenLoc, OpenMPLinearClauseKind Modifier, SourceLocation ModifierLoc, SourceLocation ColonLoc, SourceLocation EndLoc, ArrayRef< Expr * > VL, ArrayRef< Expr * > PL, ArrayRef< Expr * > IL, Expr *Step, Expr *CalcStep, Stmt *PreInit, Expr *PostUpdate)
Creates clause with a list of variables VL and a linear step Step.
void DeactivateCleanupBlock(EHScopeStack::stable_iterator Cleanup, llvm::Instruction *DominatingIP)
DeactivateCleanupBlock - Deactivates the given cleanup block.
void EmitCXXTemporary(const CXXTemporary *Temporary, QualType TempType, Address Ptr)
Emits all the code to cause the given temporary to be cleaned up.
unsigned DestinationIndex
The destination index value.
void setIsNormalCleanupKind()
virtual void Emit(CodeGenFunction &CGF, Flags flags)=0
Emit the cleanup.
static size_t getSizeForNumFilters(unsigned numFilters)
static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack, EHScopeStack::stable_iterator C)
static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit, llvm::BasicBlock *From, llvm::BasicBlock *To)
CharUnits getAlignment() const
Return the alignment of this pointer.
class EHFilterScope * pushFilter(unsigned NumFilters)
Push an exceptions filter on the stack.
llvm::Value * getAggregatePointer() const
bool encloses(stable_iterator I) const
Returns true if this scope encloses I.
static RValue getComplex(llvm::Value *V1, llvm::Value *V2)
void popNullFixups()
Pops lazily-removed fixups from the end of the list.
llvm::Value * getScalarVal() const
getScalarVal() - Return the Value* of this scalar value.
Address CreateStructGEP(Address Addr, unsigned Index, CharUnits Offset, const llvm::Twine &Name="")
static llvm::BasicBlock * SimplifyCleanupEntry(CodeGenFunction &CGF, llvm::BasicBlock *Entry)
Attempts to reduce a cleanup's entry block to a fallthrough.
llvm::LoadInst * CreateLoad(Address Addr, const llvm::Twine &Name="")
detail::InMemoryDirectory::const_iterator E
llvm::StoreInst * CreateStore(llvm::Value *Val, Address Addr, bool IsVolatile=false)
Address getNormalCleanupDestSlot()
static void SetupCleanupBlockActivation(CodeGenFunction &CGF, EHScopeStack::stable_iterator C, ForActivation_t kind, llvm::Instruction *dominatingIP)
The given cleanup block is changing activation state.
void EmitBlock(llvm::BasicBlock *BB, bool IsFinished=false)
EmitBlock - Emit the given block.
llvm::BasicBlock * getNormalBlock() const
BoundNodesTreeBuilder *const Builder
stable_iterator getInnermostActiveNormalCleanup() const
void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize)
Takes the old cleanup stack size and emits the cleanup blocks that have been added.
unsigned kind
All of the diagnostics that can be emitted by the frontend.
Address getAggregateAddress() const
getAggregateAddr() - Return the Value* of the address of the aggregate.
bool hasNormalCleanups() const
Determines whether there are any normal cleanups on the stack.
unsigned getNumFilters() const
stable_iterator getInnermostEHScope() const
bool strictlyEncloses(stable_iterator I) const
Returns true if this scope strictly encloses I: that is, if it encloses I and is not I...
stable_iterator getInnermostNormalCleanup() const
Returns the innermost normal cleanup on the stack, or stable_end() if there are no normal cleanups...
An exceptions scope which filters exceptions thrown through it.
static RValue get(llvm::Value *V)
void EmitBranchThroughCleanup(JumpDest Dest)
EmitBranchThroughCleanup - Emit a branch from the current insert block through the normal cleanup han...
static RValue getAggregate(Address addr, bool isVolatile=false)
Information for lazily generating a cleanup.
A non-stable pointer into the scope stack.
void PopCleanupBlock(bool FallThroughIsBranchThrough=false)
PopCleanupBlock - Will pop the cleanup entry on the stack and process all branch fixups.