LLVM 17.0.0git
ModuleSummaryAnalysis.cpp
Go to the documentation of this file.
1//===- ModuleSummaryAnalysis.cpp - Module summary index builder -----------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This pass builds a ModuleSummaryIndex object for the module, to be written
10// to bitcode or LLVM assembly.
11//
12//===----------------------------------------------------------------------===//
13
15#include "llvm/ADT/ArrayRef.h"
16#include "llvm/ADT/DenseSet.h"
17#include "llvm/ADT/MapVector.h"
18#include "llvm/ADT/STLExtras.h"
19#include "llvm/ADT/SetVector.h"
22#include "llvm/ADT/StringRef.h"
32#include "llvm/IR/Attributes.h"
33#include "llvm/IR/BasicBlock.h"
34#include "llvm/IR/Constant.h"
35#include "llvm/IR/Constants.h"
36#include "llvm/IR/Dominators.h"
37#include "llvm/IR/Function.h"
38#include "llvm/IR/GlobalAlias.h"
39#include "llvm/IR/GlobalValue.h"
43#include "llvm/IR/Metadata.h"
44#include "llvm/IR/Module.h"
46#include "llvm/IR/Use.h"
47#include "llvm/IR/User.h"
51#include "llvm/Pass.h"
55#include <algorithm>
56#include <cassert>
57#include <cstdint>
58#include <vector>
59
60using namespace llvm;
61using namespace llvm::memprof;
62
63#define DEBUG_TYPE "module-summary-analysis"
64
65// Option to force edges cold which will block importing when the
66// -import-cold-multiplier is set to 0. Useful for debugging.
67namespace llvm {
70} // namespace llvm
71
73 "force-summary-edges-cold", cl::Hidden, cl::location(ForceSummaryEdgesCold),
74 cl::desc("Force all edges in the function summary to cold"),
77 "all-non-critical", "All non-critical edges."),
78 clEnumValN(FunctionSummary::FSHT_All, "all", "All edges.")));
79
81 "module-summary-dot-file", cl::Hidden, cl::value_desc("filename"),
82 cl::desc("File to emit dot graph of new summary into"));
83
84// Walk through the operands of a given User via worklist iteration and populate
85// the set of GlobalValue references encountered. Invoked either on an
86// Instruction or a GlobalVariable (which walks its initializer).
87// Return true if any of the operands contains blockaddress. This is important
88// to know when computing summary for global var, because if global variable
89// references basic block address we can't import it separately from function
90// containing that basic block. For simplicity we currently don't import such
91// global vars at all. When importing function we aren't interested if any
92// instruction in it takes an address of any basic block, because instruction
93// can only take an address of basic block located in the same function.
94static bool findRefEdges(ModuleSummaryIndex &Index, const User *CurUser,
95 SetVector<ValueInfo> &RefEdges,
97 bool HasBlockAddress = false;
99 if (Visited.insert(CurUser).second)
100 Worklist.push_back(CurUser);
101
102 while (!Worklist.empty()) {
103 const User *U = Worklist.pop_back_val();
104 const auto *CB = dyn_cast<CallBase>(U);
105
106 for (const auto &OI : U->operands()) {
107 const User *Operand = dyn_cast<User>(OI);
108 if (!Operand)
109 continue;
110 if (isa<BlockAddress>(Operand)) {
111 HasBlockAddress = true;
112 continue;
113 }
114 if (auto *GV = dyn_cast<GlobalValue>(Operand)) {
115 // We have a reference to a global value. This should be added to
116 // the reference set unless it is a callee. Callees are handled
117 // specially by WriteFunction and are added to a separate list.
118 if (!(CB && CB->isCallee(&OI)))
119 RefEdges.insert(Index.getOrInsertValueInfo(GV));
120 continue;
121 }
122 if (Visited.insert(Operand).second)
123 Worklist.push_back(Operand);
124 }
125 }
126 return HasBlockAddress;
127}
128
130 ProfileSummaryInfo *PSI) {
131 if (!PSI)
132 return CalleeInfo::HotnessType::Unknown;
133 if (PSI->isHotCount(ProfileCount))
134 return CalleeInfo::HotnessType::Hot;
135 if (PSI->isColdCount(ProfileCount))
136 return CalleeInfo::HotnessType::Cold;
137 return CalleeInfo::HotnessType::None;
138}
139
140static bool isNonRenamableLocal(const GlobalValue &GV) {
141 return GV.hasSection() && GV.hasLocalLinkage();
142}
143
144/// Determine whether this call has all constant integer arguments (excluding
145/// "this") and summarize it to VCalls or ConstVCalls as appropriate.
149 std::vector<uint64_t> Args;
150 // Start from the second argument to skip the "this" pointer.
151 for (auto &Arg : drop_begin(Call.CB.args())) {
152 auto *CI = dyn_cast<ConstantInt>(Arg);
153 if (!CI || CI->getBitWidth() > 64) {
154 VCalls.insert({Guid, Call.Offset});
155 return;
156 }
157 Args.push_back(CI->getZExtValue());
158 }
159 ConstVCalls.insert({{Guid, Call.Offset}, std::move(Args)});
160}
161
162/// If this intrinsic call requires that we add information to the function
163/// summary, do so via the non-constant reference arguments.
165 const CallInst *CI, SetVector<GlobalValue::GUID> &TypeTests,
166 SetVector<FunctionSummary::VFuncId> &TypeTestAssumeVCalls,
167 SetVector<FunctionSummary::VFuncId> &TypeCheckedLoadVCalls,
168 SetVector<FunctionSummary::ConstVCall> &TypeTestAssumeConstVCalls,
169 SetVector<FunctionSummary::ConstVCall> &TypeCheckedLoadConstVCalls,
170 DominatorTree &DT) {
171 switch (CI->getCalledFunction()->getIntrinsicID()) {
172 case Intrinsic::type_test:
173 case Intrinsic::public_type_test: {
174 auto *TypeMDVal = cast<MetadataAsValue>(CI->getArgOperand(1));
175 auto *TypeId = dyn_cast<MDString>(TypeMDVal->getMetadata());
176 if (!TypeId)
177 break;
178 GlobalValue::GUID Guid = GlobalValue::getGUID(TypeId->getString());
179
180 // Produce a summary from type.test intrinsics. We only summarize type.test
181 // intrinsics that are used other than by an llvm.assume intrinsic.
182 // Intrinsics that are assumed are relevant only to the devirtualization
183 // pass, not the type test lowering pass.
184 bool HasNonAssumeUses = llvm::any_of(CI->uses(), [](const Use &CIU) {
185 return !isa<AssumeInst>(CIU.getUser());
186 });
187 if (HasNonAssumeUses)
188 TypeTests.insert(Guid);
189
192 findDevirtualizableCallsForTypeTest(DevirtCalls, Assumes, CI, DT);
193 for (auto &Call : DevirtCalls)
194 addVCallToSet(Call, Guid, TypeTestAssumeVCalls,
195 TypeTestAssumeConstVCalls);
196
197 break;
198 }
199
200 case Intrinsic::type_checked_load: {
201 auto *TypeMDVal = cast<MetadataAsValue>(CI->getArgOperand(2));
202 auto *TypeId = dyn_cast<MDString>(TypeMDVal->getMetadata());
203 if (!TypeId)
204 break;
205 GlobalValue::GUID Guid = GlobalValue::getGUID(TypeId->getString());
206
210 bool HasNonCallUses = false;
211 findDevirtualizableCallsForTypeCheckedLoad(DevirtCalls, LoadedPtrs, Preds,
212 HasNonCallUses, CI, DT);
213 // Any non-call uses of the result of llvm.type.checked.load will
214 // prevent us from optimizing away the llvm.type.test.
215 if (HasNonCallUses)
216 TypeTests.insert(Guid);
217 for (auto &Call : DevirtCalls)
218 addVCallToSet(Call, Guid, TypeCheckedLoadVCalls,
219 TypeCheckedLoadConstVCalls);
220
221 break;
222 }
223 default:
224 break;
225 }
226}
227
228static bool isNonVolatileLoad(const Instruction *I) {
229 if (const auto *LI = dyn_cast<LoadInst>(I))
230 return !LI->isVolatile();
231
232 return false;
233}
234
235static bool isNonVolatileStore(const Instruction *I) {
236 if (const auto *SI = dyn_cast<StoreInst>(I))
237 return !SI->isVolatile();
238
239 return false;
240}
241
242// Returns true if the function definition must be unreachable.
243//
244// Note if this helper function returns true, `F` is guaranteed
245// to be unreachable; if it returns false, `F` might still
246// be unreachable but not covered by this helper function.
248 // A function must be unreachable if its entry block ends with an
249 // 'unreachable'.
250 assert(!F.isDeclaration());
251 return isa<UnreachableInst>(F.getEntryBlock().getTerminator());
252}
253
255 ModuleSummaryIndex &Index, const Module &M, const Function &F,
257 bool HasLocalsInUsedOrAsm, DenseSet<GlobalValue::GUID> &CantBePromoted,
258 bool IsThinLTO,
259 std::function<const StackSafetyInfo *(const Function &F)> GetSSICallback) {
260 // Summary not currently supported for anonymous functions, they should
261 // have been named.
262 assert(F.hasName());
263
264 unsigned NumInsts = 0;
265 // Map from callee ValueId to profile count. Used to accumulate profile
266 // counts for all static calls to a given callee.
268 SetVector<ValueInfo> RefEdges, LoadRefEdges, StoreRefEdges;
270 SetVector<FunctionSummary::VFuncId> TypeTestAssumeVCalls,
271 TypeCheckedLoadVCalls;
272 SetVector<FunctionSummary::ConstVCall> TypeTestAssumeConstVCalls,
273 TypeCheckedLoadConstVCalls;
274 ICallPromotionAnalysis ICallAnalysis;
276
277 // Add personality function, prefix data and prologue data to function's ref
278 // list.
279 findRefEdges(Index, &F, RefEdges, Visited);
280 std::vector<const Instruction *> NonVolatileLoads;
281 std::vector<const Instruction *> NonVolatileStores;
282
283 std::vector<CallsiteInfo> Callsites;
284 std::vector<AllocInfo> Allocs;
285
286 bool HasInlineAsmMaybeReferencingInternal = false;
287 bool HasIndirBranchToBlockAddress = false;
288 bool HasUnknownCall = false;
289 bool MayThrow = false;
290 for (const BasicBlock &BB : F) {
291 // We don't allow inlining of function with indirect branch to blockaddress.
292 // If the blockaddress escapes the function, e.g., via a global variable,
293 // inlining may lead to an invalid cross-function reference. So we shouldn't
294 // import such function either.
295 if (BB.hasAddressTaken()) {
296 for (User *U : BlockAddress::get(const_cast<BasicBlock *>(&BB))->users())
297 if (!isa<CallBrInst>(*U)) {
298 HasIndirBranchToBlockAddress = true;
299 break;
300 }
301 }
302
303 for (const Instruction &I : BB) {
304 if (I.isDebugOrPseudoInst())
305 continue;
306 ++NumInsts;
307
308 // Regular LTO module doesn't participate in ThinLTO import,
309 // so no reference from it can be read/writeonly, since this
310 // would require importing variable as local copy
311 if (IsThinLTO) {
312 if (isNonVolatileLoad(&I)) {
313 // Postpone processing of non-volatile load instructions
314 // See comments below
315 Visited.insert(&I);
316 NonVolatileLoads.push_back(&I);
317 continue;
318 } else if (isNonVolatileStore(&I)) {
319 Visited.insert(&I);
320 NonVolatileStores.push_back(&I);
321 // All references from second operand of store (destination address)
322 // can be considered write-only if they're not referenced by any
323 // non-store instruction. References from first operand of store
324 // (stored value) can't be treated either as read- or as write-only
325 // so we add them to RefEdges as we do with all other instructions
326 // except non-volatile load.
327 Value *Stored = I.getOperand(0);
328 if (auto *GV = dyn_cast<GlobalValue>(Stored))
329 // findRefEdges will try to examine GV operands, so instead
330 // of calling it we should add GV to RefEdges directly.
331 RefEdges.insert(Index.getOrInsertValueInfo(GV));
332 else if (auto *U = dyn_cast<User>(Stored))
333 findRefEdges(Index, U, RefEdges, Visited);
334 continue;
335 }
336 }
337 findRefEdges(Index, &I, RefEdges, Visited);
338 const auto *CB = dyn_cast<CallBase>(&I);
339 if (!CB) {
340 if (I.mayThrow())
341 MayThrow = true;
342 continue;
343 }
344
345 const auto *CI = dyn_cast<CallInst>(&I);
346 // Since we don't know exactly which local values are referenced in inline
347 // assembly, conservatively mark the function as possibly referencing
348 // a local value from inline assembly to ensure we don't export a
349 // reference (which would require renaming and promotion of the
350 // referenced value).
351 if (HasLocalsInUsedOrAsm && CI && CI->isInlineAsm())
352 HasInlineAsmMaybeReferencingInternal = true;
353
354 auto *CalledValue = CB->getCalledOperand();
355 auto *CalledFunction = CB->getCalledFunction();
356 if (CalledValue && !CalledFunction) {
357 CalledValue = CalledValue->stripPointerCasts();
358 // Stripping pointer casts can reveal a called function.
359 CalledFunction = dyn_cast<Function>(CalledValue);
360 }
361 // Check if this is an alias to a function. If so, get the
362 // called aliasee for the checks below.
363 if (auto *GA = dyn_cast<GlobalAlias>(CalledValue)) {
364 assert(!CalledFunction && "Expected null called function in callsite for alias");
365 CalledFunction = dyn_cast<Function>(GA->getAliaseeObject());
366 }
367 // Check if this is a direct call to a known function or a known
368 // intrinsic, or an indirect call with profile data.
369 if (CalledFunction) {
370 if (CI && CalledFunction->isIntrinsic()) {
372 CI, TypeTests, TypeTestAssumeVCalls, TypeCheckedLoadVCalls,
373 TypeTestAssumeConstVCalls, TypeCheckedLoadConstVCalls, DT);
374 continue;
375 }
376 // We should have named any anonymous globals
377 assert(CalledFunction->hasName());
378 auto ScaledCount = PSI->getProfileCount(*CB, BFI);
379 auto Hotness = ScaledCount ? getHotness(*ScaledCount, PSI)
380 : CalleeInfo::HotnessType::Unknown;
382 Hotness = CalleeInfo::HotnessType::Cold;
383
384 // Use the original CalledValue, in case it was an alias. We want
385 // to record the call edge to the alias in that case. Eventually
386 // an alias summary will be created to associate the alias and
387 // aliasee.
388 auto &ValueInfo = CallGraphEdges[Index.getOrInsertValueInfo(
389 cast<GlobalValue>(CalledValue))];
390 ValueInfo.updateHotness(Hotness);
391 // Add the relative block frequency to CalleeInfo if there is no profile
392 // information.
393 if (BFI != nullptr && Hotness == CalleeInfo::HotnessType::Unknown) {
394 uint64_t BBFreq = BFI->getBlockFreq(&BB).getFrequency();
395 uint64_t EntryFreq = BFI->getEntryFreq();
396 ValueInfo.updateRelBlockFreq(BBFreq, EntryFreq);
397 }
398 } else {
399 HasUnknownCall = true;
400 // Skip inline assembly calls.
401 if (CI && CI->isInlineAsm())
402 continue;
403 // Skip direct calls.
404 if (!CalledValue || isa<Constant>(CalledValue))
405 continue;
406
407 // Check if the instruction has a callees metadata. If so, add callees
408 // to CallGraphEdges to reflect the references from the metadata, and
409 // to enable importing for subsequent indirect call promotion and
410 // inlining.
411 if (auto *MD = I.getMetadata(LLVMContext::MD_callees)) {
412 for (const auto &Op : MD->operands()) {
413 Function *Callee = mdconst::extract_or_null<Function>(Op);
414 if (Callee)
415 CallGraphEdges[Index.getOrInsertValueInfo(Callee)];
416 }
417 }
418
419 uint32_t NumVals, NumCandidates;
420 uint64_t TotalCount;
421 auto CandidateProfileData =
423 &I, NumVals, TotalCount, NumCandidates);
424 for (const auto &Candidate : CandidateProfileData)
425 CallGraphEdges[Index.getOrInsertValueInfo(Candidate.Value)]
426 .updateHotness(getHotness(Candidate.Count, PSI));
427 }
428
429 // TODO: Skip indirect calls for now. Need to handle these better, likely
430 // by creating multiple Callsites, one per target, then speculatively
431 // devirtualize while applying clone info in the ThinLTO backends. This
432 // will also be important because we will have a different set of clone
433 // versions per target. This handling needs to match that in the ThinLTO
434 // backend so we handle things consistently for matching of callsite
435 // summaries to instructions.
436 if (!CalledFunction)
437 continue;
438
439 // Compute the list of stack ids first (so we can trim them from the stack
440 // ids on any MIBs).
442 I.getMetadata(LLVMContext::MD_callsite));
443 auto *MemProfMD = I.getMetadata(LLVMContext::MD_memprof);
444 if (MemProfMD) {
445 std::vector<MIBInfo> MIBs;
446 for (auto &MDOp : MemProfMD->operands()) {
447 auto *MIBMD = cast<const MDNode>(MDOp);
448 MDNode *StackNode = getMIBStackNode(MIBMD);
449 assert(StackNode);
450 SmallVector<unsigned> StackIdIndices;
451 CallStack<MDNode, MDNode::op_iterator> StackContext(StackNode);
452 // Collapse out any on the allocation call (inlining).
453 for (auto ContextIter =
454 StackContext.beginAfterSharedPrefix(InstCallsite);
455 ContextIter != StackContext.end(); ++ContextIter) {
456 unsigned StackIdIdx = Index.addOrGetStackIdIndex(*ContextIter);
457 // If this is a direct recursion, simply skip the duplicate
458 // entries. If this is mutual recursion, handling is left to
459 // the LTO link analysis client.
460 if (StackIdIndices.empty() || StackIdIndices.back() != StackIdIdx)
461 StackIdIndices.push_back(StackIdIdx);
462 }
463 MIBs.push_back(
464 MIBInfo(getMIBAllocType(MIBMD), std::move(StackIdIndices)));
465 }
466 Allocs.push_back(AllocInfo(std::move(MIBs)));
467 } else if (!InstCallsite.empty()) {
468 SmallVector<unsigned> StackIdIndices;
469 for (auto StackId : InstCallsite)
470 StackIdIndices.push_back(Index.addOrGetStackIdIndex(StackId));
471 // Use the original CalledValue, in case it was an alias. We want
472 // to record the call edge to the alias in that case. Eventually
473 // an alias summary will be created to associate the alias and
474 // aliasee.
475 auto CalleeValueInfo =
476 Index.getOrInsertValueInfo(cast<GlobalValue>(CalledValue));
477 Callsites.push_back({CalleeValueInfo, StackIdIndices});
478 }
479 }
480 }
481 Index.addBlockCount(F.size());
482
483 std::vector<ValueInfo> Refs;
484 if (IsThinLTO) {
485 auto AddRefEdges = [&](const std::vector<const Instruction *> &Instrs,
488 for (const auto *I : Instrs) {
489 Cache.erase(I);
490 findRefEdges(Index, I, Edges, Cache);
491 }
492 };
493
494 // By now we processed all instructions in a function, except
495 // non-volatile loads and non-volatile value stores. Let's find
496 // ref edges for both of instruction sets
497 AddRefEdges(NonVolatileLoads, LoadRefEdges, Visited);
498 // We can add some values to the Visited set when processing load
499 // instructions which are also used by stores in NonVolatileStores.
500 // For example this can happen if we have following code:
501 //
502 // store %Derived* @foo, %Derived** bitcast (%Base** @bar to %Derived**)
503 // %42 = load %Derived*, %Derived** bitcast (%Base** @bar to %Derived**)
504 //
505 // After processing loads we'll add bitcast to the Visited set, and if
506 // we use the same set while processing stores, we'll never see store
507 // to @bar and @bar will be mistakenly treated as readonly.
509 AddRefEdges(NonVolatileStores, StoreRefEdges, StoreCache);
510
511 // If both load and store instruction reference the same variable
512 // we won't be able to optimize it. Add all such reference edges
513 // to RefEdges set.
514 for (const auto &VI : StoreRefEdges)
515 if (LoadRefEdges.remove(VI))
516 RefEdges.insert(VI);
517
518 unsigned RefCnt = RefEdges.size();
519 // All new reference edges inserted in two loops below are either
520 // read or write only. They will be grouped in the end of RefEdges
521 // vector, so we can use a single integer value to identify them.
522 for (const auto &VI : LoadRefEdges)
523 RefEdges.insert(VI);
524
525 unsigned FirstWORef = RefEdges.size();
526 for (const auto &VI : StoreRefEdges)
527 RefEdges.insert(VI);
528
529 Refs = RefEdges.takeVector();
530 for (; RefCnt < FirstWORef; ++RefCnt)
531 Refs[RefCnt].setReadOnly();
532
533 for (; RefCnt < Refs.size(); ++RefCnt)
534 Refs[RefCnt].setWriteOnly();
535 } else {
536 Refs = RefEdges.takeVector();
537 }
538 // Explicit add hot edges to enforce importing for designated GUIDs for
539 // sample PGO, to enable the same inlines as the profiled optimized binary.
540 for (auto &I : F.getImportGUIDs())
541 CallGraphEdges[Index.getOrInsertValueInfo(I)].updateHotness(
543 ? CalleeInfo::HotnessType::Cold
544 : CalleeInfo::HotnessType::Critical);
545
546 bool NonRenamableLocal = isNonRenamableLocal(F);
547 bool NotEligibleForImport = NonRenamableLocal ||
548 HasInlineAsmMaybeReferencingInternal ||
549 HasIndirBranchToBlockAddress;
551 F.getLinkage(), F.getVisibility(), NotEligibleForImport,
552 /* Live = */ false, F.isDSOLocal(), F.canBeOmittedFromSymbolTable());
554 F.doesNotAccessMemory(), F.onlyReadsMemory() && !F.doesNotAccessMemory(),
555 F.hasFnAttribute(Attribute::NoRecurse), F.returnDoesNotAlias(),
556 // FIXME: refactor this to use the same code that inliner is using.
557 // Don't try to import functions with noinline attribute.
558 F.getAttributes().hasFnAttr(Attribute::NoInline),
559 F.hasFnAttribute(Attribute::AlwaysInline),
560 F.hasFnAttribute(Attribute::NoUnwind), MayThrow, HasUnknownCall,
562 std::vector<FunctionSummary::ParamAccess> ParamAccesses;
563 if (auto *SSI = GetSSICallback(F))
564 ParamAccesses = SSI->getParamAccesses(Index);
565 auto FuncSummary = std::make_unique<FunctionSummary>(
566 Flags, NumInsts, FunFlags, /*EntryCount=*/0, std::move(Refs),
567 CallGraphEdges.takeVector(), TypeTests.takeVector(),
568 TypeTestAssumeVCalls.takeVector(), TypeCheckedLoadVCalls.takeVector(),
569 TypeTestAssumeConstVCalls.takeVector(),
570 TypeCheckedLoadConstVCalls.takeVector(), std::move(ParamAccesses),
571 std::move(Callsites), std::move(Allocs));
572 if (NonRenamableLocal)
573 CantBePromoted.insert(F.getGUID());
574 Index.addGlobalValueSummary(F, std::move(FuncSummary));
575}
576
577/// Find function pointers referenced within the given vtable initializer
578/// (or subset of an initializer) \p I. The starting offset of \p I within
579/// the vtable initializer is \p StartingOffset. Any discovered function
580/// pointers are added to \p VTableFuncs along with their cumulative offset
581/// within the initializer.
582static void findFuncPointers(const Constant *I, uint64_t StartingOffset,
584 VTableFuncList &VTableFuncs,
585 const GlobalVariable &OrigGV) {
586 // First check if this is a function pointer.
587 if (I->getType()->isPointerTy()) {
588 auto C = I->stripPointerCasts();
589 auto A = dyn_cast<GlobalAlias>(C);
590 if (isa<Function>(C) || (A && isa<Function>(A->getAliasee()))) {
591 auto GV = dyn_cast<GlobalValue>(C);
592 assert(GV);
593 // We can disregard __cxa_pure_virtual as a possible call target, as
594 // calls to pure virtuals are UB.
595 if (GV && GV->getName() != "__cxa_pure_virtual")
596 VTableFuncs.push_back({Index.getOrInsertValueInfo(GV), StartingOffset});
597 return;
598 }
599 }
600
601 // Walk through the elements in the constant struct or array and recursively
602 // look for virtual function pointers.
603 const DataLayout &DL = M.getDataLayout();
604 if (auto *C = dyn_cast<ConstantStruct>(I)) {
605 StructType *STy = dyn_cast<StructType>(C->getType());
606 assert(STy);
607 const StructLayout *SL = DL.getStructLayout(C->getType());
608
609 for (auto EI : llvm::enumerate(STy->elements())) {
610 auto Offset = SL->getElementOffset(EI.index());
611 unsigned Op = SL->getElementContainingOffset(Offset);
612 findFuncPointers(cast<Constant>(I->getOperand(Op)),
613 StartingOffset + Offset, M, Index, VTableFuncs, OrigGV);
614 }
615 } else if (auto *C = dyn_cast<ConstantArray>(I)) {
616 ArrayType *ATy = C->getType();
617 Type *EltTy = ATy->getElementType();
618 uint64_t EltSize = DL.getTypeAllocSize(EltTy);
619 for (unsigned i = 0, e = ATy->getNumElements(); i != e; ++i) {
620 findFuncPointers(cast<Constant>(I->getOperand(i)),
621 StartingOffset + i * EltSize, M, Index, VTableFuncs,
622 OrigGV);
623 }
624 } else if (const auto *CE = dyn_cast<ConstantExpr>(I)) {
625 // For relative vtables, the next sub-component should be a trunc.
626 if (CE->getOpcode() != Instruction::Trunc ||
627 !(CE = dyn_cast<ConstantExpr>(CE->getOperand(0))))
628 return;
629
630 // If this constant can be reduced to the offset between a function and a
631 // global, then we know this is a valid virtual function if the RHS is the
632 // original vtable we're scanning through.
633 if (CE->getOpcode() == Instruction::Sub) {
635 APSInt LHSOffset, RHSOffset;
636 if (IsConstantOffsetFromGlobal(CE->getOperand(0), LHS, LHSOffset, DL) &&
637 IsConstantOffsetFromGlobal(CE->getOperand(1), RHS, RHSOffset, DL) &&
638 RHS == &OrigGV &&
639
640 // For relative vtables, this component should point to the callable
641 // function without any offsets.
642 LHSOffset == 0 &&
643
644 // Also, the RHS should always point to somewhere within the vtable.
645 RHSOffset <=
646 static_cast<uint64_t>(DL.getTypeAllocSize(OrigGV.getInitializer()->getType()))) {
647 findFuncPointers(LHS, StartingOffset, M, Index, VTableFuncs, OrigGV);
648 }
649 }
650 }
651}
652
653// Identify the function pointers referenced by vtable definition \p V.
655 const GlobalVariable &V, const Module &M,
656 VTableFuncList &VTableFuncs) {
657 if (!V.isConstant())
658 return;
659
660 findFuncPointers(V.getInitializer(), /*StartingOffset=*/0, M, Index,
661 VTableFuncs, V);
662
663#ifndef NDEBUG
664 // Validate that the VTableFuncs list is ordered by offset.
665 uint64_t PrevOffset = 0;
666 for (auto &P : VTableFuncs) {
667 // The findVFuncPointers traversal should have encountered the
668 // functions in offset order. We need to use ">=" since PrevOffset
669 // starts at 0.
670 assert(P.VTableOffset >= PrevOffset);
671 PrevOffset = P.VTableOffset;
672 }
673#endif
674}
675
676/// Record vtable definition \p V for each type metadata it references.
677static void
679 const GlobalVariable &V,
681 for (MDNode *Type : Types) {
682 auto TypeID = Type->getOperand(1).get();
683
685 cast<ConstantInt>(
686 cast<ConstantAsMetadata>(Type->getOperand(0))->getValue())
687 ->getZExtValue();
688
689 if (auto *TypeId = dyn_cast<MDString>(TypeID))
690 Index.getOrInsertTypeIdCompatibleVtableSummary(TypeId->getString())
691 .push_back({Offset, Index.getOrInsertValueInfo(&V)});
692 }
693}
694
696 const GlobalVariable &V,
697 DenseSet<GlobalValue::GUID> &CantBePromoted,
698 const Module &M,
700 SetVector<ValueInfo> RefEdges;
702 bool HasBlockAddress = findRefEdges(Index, &V, RefEdges, Visited);
703 bool NonRenamableLocal = isNonRenamableLocal(V);
705 V.getLinkage(), V.getVisibility(), NonRenamableLocal,
706 /* Live = */ false, V.isDSOLocal(), V.canBeOmittedFromSymbolTable());
707
708 VTableFuncList VTableFuncs;
709 // If splitting is not enabled, then we compute the summary information
710 // necessary for index-based whole program devirtualization.
711 if (!Index.enableSplitLTOUnit()) {
712 Types.clear();
713 V.getMetadata(LLVMContext::MD_type, Types);
714 if (!Types.empty()) {
715 // Identify the function pointers referenced by this vtable definition.
716 computeVTableFuncs(Index, V, M, VTableFuncs);
717
718 // Record this vtable definition for each type metadata it references.
720 }
721 }
722
723 // Don't mark variables we won't be able to internalize as read/write-only.
724 bool CanBeInternalized =
725 !V.hasComdat() && !V.hasAppendingLinkage() && !V.isInterposable() &&
726 !V.hasAvailableExternallyLinkage() && !V.hasDLLExportStorageClass();
727 bool Constant = V.isConstant();
728 GlobalVarSummary::GVarFlags VarFlags(CanBeInternalized,
729 Constant ? false : CanBeInternalized,
730 Constant, V.getVCallVisibility());
731 auto GVarSummary = std::make_unique<GlobalVarSummary>(Flags, VarFlags,
732 RefEdges.takeVector());
733 if (NonRenamableLocal)
734 CantBePromoted.insert(V.getGUID());
735 if (HasBlockAddress)
736 GVarSummary->setNotEligibleToImport();
737 if (!VTableFuncs.empty())
738 GVarSummary->setVTableFuncs(VTableFuncs);
739 Index.addGlobalValueSummary(V, std::move(GVarSummary));
740}
741
743 DenseSet<GlobalValue::GUID> &CantBePromoted) {
744 // Skip summary for indirect function aliases as summary for aliasee will not
745 // be emitted.
746 const GlobalObject *Aliasee = A.getAliaseeObject();
747 if (isa<GlobalIFunc>(Aliasee))
748 return;
749 bool NonRenamableLocal = isNonRenamableLocal(A);
751 A.getLinkage(), A.getVisibility(), NonRenamableLocal,
752 /* Live = */ false, A.isDSOLocal(), A.canBeOmittedFromSymbolTable());
753 auto AS = std::make_unique<AliasSummary>(Flags);
754 auto AliaseeVI = Index.getValueInfo(Aliasee->getGUID());
755 assert(AliaseeVI && "Alias expects aliasee summary to be available");
756 assert(AliaseeVI.getSummaryList().size() == 1 &&
757 "Expected a single entry per aliasee in per-module index");
758 AS->setAliasee(AliaseeVI, AliaseeVI.getSummaryList()[0].get());
759 if (NonRenamableLocal)
760 CantBePromoted.insert(A.getGUID());
761 Index.addGlobalValueSummary(A, std::move(AS));
762}
763
764// Set LiveRoot flag on entries matching the given value name.
766 if (ValueInfo VI = Index.getValueInfo(GlobalValue::getGUID(Name)))
767 for (const auto &Summary : VI.getSummaryList())
768 Summary->setLive(true);
769}
770
772 const Module &M,
773 std::function<BlockFrequencyInfo *(const Function &F)> GetBFICallback,
775 std::function<const StackSafetyInfo *(const Function &F)> GetSSICallback) {
776 assert(PSI);
777 bool EnableSplitLTOUnit = false;
778 if (auto *MD = mdconst::extract_or_null<ConstantInt>(
779 M.getModuleFlag("EnableSplitLTOUnit")))
780 EnableSplitLTOUnit = MD->getZExtValue();
781 ModuleSummaryIndex Index(/*HaveGVs=*/true, EnableSplitLTOUnit);
782
783 // Identify the local values in the llvm.used and llvm.compiler.used sets,
784 // which should not be exported as they would then require renaming and
785 // promotion, but we may have opaque uses e.g. in inline asm. We collect them
786 // here because we use this information to mark functions containing inline
787 // assembly calls as not importable.
790 // First collect those in the llvm.used set.
791 collectUsedGlobalVariables(M, Used, /*CompilerUsed=*/false);
792 // Next collect those in the llvm.compiler.used set.
793 collectUsedGlobalVariables(M, Used, /*CompilerUsed=*/true);
794 DenseSet<GlobalValue::GUID> CantBePromoted;
795 for (auto *V : Used) {
796 if (V->hasLocalLinkage()) {
797 LocalsUsed.insert(V);
798 CantBePromoted.insert(V->getGUID());
799 }
800 }
801
802 bool HasLocalInlineAsmSymbol = false;
803 if (!M.getModuleInlineAsm().empty()) {
804 // Collect the local values defined by module level asm, and set up
805 // summaries for these symbols so that they can be marked as NoRename,
806 // to prevent export of any use of them in regular IR that would require
807 // renaming within the module level asm. Note we don't need to create a
808 // summary for weak or global defs, as they don't need to be flagged as
809 // NoRename, and defs in module level asm can't be imported anyway.
810 // Also, any values used but not defined within module level asm should
811 // be listed on the llvm.used or llvm.compiler.used global and marked as
812 // referenced from there.
815 // Symbols not marked as Weak or Global are local definitions.
818 return;
819 HasLocalInlineAsmSymbol = true;
820 GlobalValue *GV = M.getNamedValue(Name);
821 if (!GV)
822 return;
823 assert(GV->isDeclaration() && "Def in module asm already has definition");
826 /* NotEligibleToImport = */ true,
827 /* Live = */ true,
828 /* Local */ GV->isDSOLocal(), GV->canBeOmittedFromSymbolTable());
829 CantBePromoted.insert(GV->getGUID());
830 // Create the appropriate summary type.
831 if (Function *F = dyn_cast<Function>(GV)) {
832 std::unique_ptr<FunctionSummary> Summary =
833 std::make_unique<FunctionSummary>(
834 GVFlags, /*InstCount=*/0,
836 F->hasFnAttribute(Attribute::ReadNone),
837 F->hasFnAttribute(Attribute::ReadOnly),
838 F->hasFnAttribute(Attribute::NoRecurse),
839 F->returnDoesNotAlias(),
840 /* NoInline = */ false,
841 F->hasFnAttribute(Attribute::AlwaysInline),
842 F->hasFnAttribute(Attribute::NoUnwind),
843 /* MayThrow */ true,
844 /* HasUnknownCall */ true,
845 /* MustBeUnreachable */ false},
846 /*EntryCount=*/0, ArrayRef<ValueInfo>{},
855 Index.addGlobalValueSummary(*GV, std::move(Summary));
856 } else {
857 std::unique_ptr<GlobalVarSummary> Summary =
858 std::make_unique<GlobalVarSummary>(
859 GVFlags,
861 false, false, cast<GlobalVariable>(GV)->isConstant(),
864 Index.addGlobalValueSummary(*GV, std::move(Summary));
865 }
866 });
867 }
868
869 bool IsThinLTO = true;
870 if (auto *MD =
871 mdconst::extract_or_null<ConstantInt>(M.getModuleFlag("ThinLTO")))
872 IsThinLTO = MD->getZExtValue();
873
874 // Compute summaries for all functions defined in module, and save in the
875 // index.
876 for (const auto &F : M) {
877 if (F.isDeclaration())
878 continue;
879
880 DominatorTree DT(const_cast<Function &>(F));
881 BlockFrequencyInfo *BFI = nullptr;
882 std::unique_ptr<BlockFrequencyInfo> BFIPtr;
883 if (GetBFICallback)
884 BFI = GetBFICallback(F);
885 else if (F.hasProfileData()) {
886 LoopInfo LI{DT};
887 BranchProbabilityInfo BPI{F, LI};
888 BFIPtr = std::make_unique<BlockFrequencyInfo>(F, BPI, LI);
889 BFI = BFIPtr.get();
890 }
891
892 computeFunctionSummary(Index, M, F, BFI, PSI, DT,
893 !LocalsUsed.empty() || HasLocalInlineAsmSymbol,
894 CantBePromoted, IsThinLTO, GetSSICallback);
895 }
896
897 // Compute summaries for all variables defined in module, and save in the
898 // index.
900 for (const GlobalVariable &G : M.globals()) {
901 if (G.isDeclaration())
902 continue;
903 computeVariableSummary(Index, G, CantBePromoted, M, Types);
904 }
905
906 // Compute summaries for all aliases defined in module, and save in the
907 // index.
908 for (const GlobalAlias &A : M.aliases())
909 computeAliasSummary(Index, A, CantBePromoted);
910
911 // Iterate through ifuncs, set their resolvers all alive.
912 for (const GlobalIFunc &I : M.ifuncs()) {
913 I.applyAlongResolverPath([&Index](const GlobalValue &GV) {
914 Index.getGlobalValueSummary(GV)->setLive(true);
915 });
916 }
917
918 for (auto *V : LocalsUsed) {
919 auto *Summary = Index.getGlobalValueSummary(*V);
920 assert(Summary && "Missing summary for global value");
921 Summary->setNotEligibleToImport();
922 }
923
924 // The linker doesn't know about these LLVM produced values, so we need
925 // to flag them as live in the index to ensure index-based dead value
926 // analysis treats them as live roots of the analysis.
927 setLiveRoot(Index, "llvm.used");
928 setLiveRoot(Index, "llvm.compiler.used");
929 setLiveRoot(Index, "llvm.global_ctors");
930 setLiveRoot(Index, "llvm.global_dtors");
931 setLiveRoot(Index, "llvm.global.annotations");
932
933 for (auto &GlobalList : Index) {
934 // Ignore entries for references that are undefined in the current module.
935 if (GlobalList.second.SummaryList.empty())
936 continue;
937
938 assert(GlobalList.second.SummaryList.size() == 1 &&
939 "Expected module's index to have one summary per GUID");
940 auto &Summary = GlobalList.second.SummaryList[0];
941 if (!IsThinLTO) {
942 Summary->setNotEligibleToImport();
943 continue;
944 }
945
946 bool AllRefsCanBeExternallyReferenced =
947 llvm::all_of(Summary->refs(), [&](const ValueInfo &VI) {
948 return !CantBePromoted.count(VI.getGUID());
949 });
950 if (!AllRefsCanBeExternallyReferenced) {
951 Summary->setNotEligibleToImport();
952 continue;
953 }
954
955 if (auto *FuncSummary = dyn_cast<FunctionSummary>(Summary.get())) {
956 bool AllCallsCanBeExternallyReferenced = llvm::all_of(
957 FuncSummary->calls(), [&](const FunctionSummary::EdgeTy &Edge) {
958 return !CantBePromoted.count(Edge.first.getGUID());
959 });
960 if (!AllCallsCanBeExternallyReferenced)
961 Summary->setNotEligibleToImport();
962 }
963 }
964
965 if (!ModuleSummaryDotFile.empty()) {
966 std::error_code EC;
967 raw_fd_ostream OSDot(ModuleSummaryDotFile, EC, sys::fs::OpenFlags::OF_None);
968 if (EC)
969 report_fatal_error(Twine("Failed to open dot file ") +
970 ModuleSummaryDotFile + ": " + EC.message() + "\n");
971 Index.exportToDot(OSDot, {});
972 }
973
974 return Index;
975}
976
977AnalysisKey ModuleSummaryIndexAnalysis::Key;
978
982 auto &FAM = AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
983 bool NeedSSI = needsParamAccessSummary(M);
985 M,
986 [&FAM](const Function &F) {
988 *const_cast<Function *>(&F));
989 },
990 &PSI,
991 [&FAM, NeedSSI](const Function &F) -> const StackSafetyInfo * {
992 return NeedSSI ? &FAM.getResult<StackSafetyAnalysis>(
993 const_cast<Function &>(F))
994 : nullptr;
995 });
996}
997
999
1001 "Module Summary Analysis", false, true)
1007
1009 return new ModuleSummaryIndexWrapperPass();
1010}
1011
1013 : ModulePass(ID) {
1015}
1016
1018 auto *PSI = &getAnalysis<ProfileSummaryInfoWrapperPass>().getPSI();
1019 bool NeedSSI = needsParamAccessSummary(M);
1021 M,
1022 [this](const Function &F) {
1023 return &(this->getAnalysis<BlockFrequencyInfoWrapperPass>(
1024 *const_cast<Function *>(&F))
1025 .getBFI());
1026 },
1027 PSI,
1028 [&](const Function &F) -> const StackSafetyInfo * {
1029 return NeedSSI ? &getAnalysis<StackSafetyInfoWrapperPass>(
1030 const_cast<Function &>(F))
1031 .getResult()
1032 : nullptr;
1033 }));
1034 return false;
1035}
1036
1038 Index.reset();
1039 return false;
1040}
1041
1043 AU.setPreservesAll();
1047}
1048
1050
1056}
1057
1059 AnalysisUsage &AU) const {
1060 AU.setPreservesAll();
1061}
1062
1064 const ModuleSummaryIndex *Index) {
1066}
1067
1069 "Module summary info", false, true)
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
static bool isConstant(const MachineInstr &MI)
amdgpu Simplify well known AMD library false FunctionCallee Callee
amdgpu Simplify well known AMD library false FunctionCallee Value * Arg
This file contains the simple types necessary to represent the attributes associated with functions a...
basic Basic Alias true
block Block Frequency Analysis
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
#define clEnumValN(ENUMVAL, FLAGNAME, DESC)
Definition: CommandLine.h:678
This file contains the declarations for the subclasses of Constant, which represent the different fla...
demanded Demanded bits analysis
This file defines the DenseSet and SmallDenseSet classes.
std::string Name
iv users
Definition: IVUsers.cpp:48
Interface to identify indirect call promotion candidates.
#define F(x, y, z)
Definition: MD5.cpp:55
#define I(x, y, z)
Definition: MD5.cpp:58
#define G(x, y, z)
Definition: MD5.cpp:56
This file implements a map that provides insertion order iteration.
This file contains the declarations for metadata subclasses.
static bool findRefEdges(ModuleSummaryIndex &Index, const User *CurUser, SetVector< ValueInfo > &RefEdges, SmallPtrSet< const User *, 8 > &Visited)
static void computeVTableFuncs(ModuleSummaryIndex &Index, const GlobalVariable &V, const Module &M, VTableFuncList &VTableFuncs)
static void computeAliasSummary(ModuleSummaryIndex &Index, const GlobalAlias &A, DenseSet< GlobalValue::GUID > &CantBePromoted)
static void findFuncPointers(const Constant *I, uint64_t StartingOffset, const Module &M, ModuleSummaryIndex &Index, VTableFuncList &VTableFuncs, const GlobalVariable &OrigGV)
Find function pointers referenced within the given vtable initializer (or subset of an initializer) I...
static void computeVariableSummary(ModuleSummaryIndex &Index, const GlobalVariable &V, DenseSet< GlobalValue::GUID > &CantBePromoted, const Module &M, SmallVectorImpl< MDNode * > &Types)
static void setLiveRoot(ModuleSummaryIndex &Index, StringRef Name)
static CalleeInfo::HotnessType getHotness(uint64_t ProfileCount, ProfileSummaryInfo *PSI)
static bool isNonVolatileLoad(const Instruction *I)
static bool isNonRenamableLocal(const GlobalValue &GV)
static void computeFunctionSummary(ModuleSummaryIndex &Index, const Module &M, const Function &F, BlockFrequencyInfo *BFI, ProfileSummaryInfo *PSI, DominatorTree &DT, bool HasLocalsInUsedOrAsm, DenseSet< GlobalValue::GUID > &CantBePromoted, bool IsThinLTO, std::function< const StackSafetyInfo *(const Function &F)> GetSSICallback)
static cl::opt< FunctionSummary::ForceSummaryHotnessType, true > FSEC("force-summary-edges-cold", cl::Hidden, cl::location(ForceSummaryEdgesCold), cl::desc("Force all edges in the function summary to cold"), cl::values(clEnumValN(FunctionSummary::FSHT_None, "none", "None."), clEnumValN(FunctionSummary::FSHT_AllNonCritical, "all-non-critical", "All non-critical edges."), clEnumValN(FunctionSummary::FSHT_All, "all", "All edges.")))
static bool mustBeUnreachableFunction(const Function &F)
static void addIntrinsicToSummary(const CallInst *CI, SetVector< GlobalValue::GUID > &TypeTests, SetVector< FunctionSummary::VFuncId > &TypeTestAssumeVCalls, SetVector< FunctionSummary::VFuncId > &TypeCheckedLoadVCalls, SetVector< FunctionSummary::ConstVCall > &TypeTestAssumeConstVCalls, SetVector< FunctionSummary::ConstVCall > &TypeCheckedLoadConstVCalls, DominatorTree &DT)
If this intrinsic call requires that we add information to the function summary, do so via the non-co...
static bool isNonVolatileStore(const Instruction *I)
static cl::opt< std::string > ModuleSummaryDotFile("module-summary-dot-file", cl::Hidden, cl::value_desc("filename"), cl::desc("File to emit dot graph of new summary into"))
static void addVCallToSet(DevirtCallSite Call, GlobalValue::GUID Guid, SetVector< FunctionSummary::VFuncId > &VCalls, SetVector< FunctionSummary::ConstVCall > &ConstVCalls)
Determine whether this call has all constant integer arguments (excluding "this") and summarize it to...
static void recordTypeIdCompatibleVtableReferences(ModuleSummaryIndex &Index, const GlobalVariable &V, SmallVectorImpl< MDNode * > &Types)
Record vtable definition V for each type metadata it references.
This is the interface to build a ModuleSummaryIndex for a module.
ModuleSummaryIndex.h This file contains the declarations the classes that hold the module index and s...
Module.h This file contains the declarations for the Module class.
#define P(N)
FunctionAnalysisManager FAM
#define INITIALIZE_PASS_DEPENDENCY(depName)
Definition: PassSupport.h:55
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:59
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:52
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:38
@ SI
@ VI
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file contains some templates that are useful if you are working with the STL at all.
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
@ Flags
Definition: TextStubV5.cpp:93
This defines the Use class.
Value * RHS
Value * LHS
An arbitrary precision integer that knows its signedness.
Definition: APSInt.h:23
A container for analyses that lazily runs them and caches their results.
Definition: PassManager.h:620
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
Definition: PassManager.h:774
Represent the analysis usage information of a pass.
AnalysisUsage & addRequired()
void setPreservesAll()
Set by analyses that do not transform their input at all.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
Definition: ArrayRef.h:41
LLVM Basic Block Representation.
Definition: BasicBlock.h:56
static BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
Definition: Constants.cpp:1777
Analysis pass which computes BlockFrequencyInfo.
Legacy analysis pass which computes BlockFrequencyInfo.
BlockFrequencyInfo pass uses BlockFrequencyInfoImpl implementation to estimate IR basic block frequen...
Analysis providing branch probability information.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
Definition: InstrTypes.h:1408
Value * getArgOperand(unsigned i) const
Definition: InstrTypes.h:1353
This class represents a function call, abstracting a target machine's calling convention.
This is an important base class in LLVM.
Definition: Constant.h:41
A parsed version of the target data layout string in and methods for querying it.
Definition: DataLayout.h:110
Implements a dense probed hash-table based set.
Definition: DenseSet.h:271
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
Definition: Dominators.h:166
std::pair< ValueInfo, CalleeInfo > EdgeTy
<CalleeValueInfo, CalleeInfo> call edge pair.
ForceSummaryHotnessType
Types for -force-summary-edges-cold debugging option.
Class to represent profile counts.
Definition: Function.h:252
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
Definition: Function.h:204
bool isDSOLocal() const
Definition: GlobalValue.h:301
bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
Definition: Globals.cpp:275
bool hasLocalLinkage() const
Definition: GlobalValue.h:523
GUID getGUID() const
Return a 64-bit global unique ID constructed from global value name (i.e.
Definition: GlobalValue.h:591
@ DefaultVisibility
The GV is visible.
Definition: GlobalValue.h:63
static GUID getGUID(StringRef GlobalName)
Return a 64-bit global unique ID constructed from global value name (i.e.
Definition: GlobalValue.h:587
bool hasSection() const
Definition: GlobalValue.h:286
bool canBeOmittedFromSymbolTable() const
True if GV can be left out of the object symbol table.
Definition: Globals.cpp:394
@ InternalLinkage
Rename collisions when linking (static functions).
Definition: GlobalValue.h:55
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
ArrayRef< InstrProfValueData > getPromotionCandidatesForInstruction(const Instruction *I, uint32_t &NumVals, uint64_t &TotalCount, uint32_t &NumCandidates)
Returns reference to array of InstrProfValueData for the given instruction I.
Legacy wrapper pass to provide the ModuleSummaryIndex object.
ImmutableModuleSummaryIndexWrapperPass(const ModuleSummaryIndex *Index=nullptr)
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
ImmutablePass class - This class is used to provide information that does not need to be run.
Definition: Pass.h:279
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
Definition: PassManager.h:933
Metadata node.
Definition: Metadata.h:943
This class implements a map that also provides access to all stored values in a deterministic order.
Definition: MapVector.h:37
VectorType takeVector()
Clear the MapVector and return the underlying vector.
Definition: MapVector.h:56
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
Definition: Pass.h:248
Result run(Module &M, ModuleAnalysisManager &AM)
Legacy wrapper pass to provide the ModuleSummaryIndex object.
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
bool doFinalization(Module &M) override
doFinalization - Virtual method overriden by subclasses to do any necessary clean up after all passes...
bool runOnModule(Module &M) override
runOnModule - Virtual method overriden by subclasses to process the module being operated on.
Class to hold module path string table and global value map, and encapsulate methods for operating on...
static void CollectAsmSymbols(const Module &M, function_ref< void(StringRef, object::BasicSymbolRef::Flags)> AsmSymbol)
Parse inline ASM and collect the symbols that are defined or referenced in the current module.
A Module instance is used to store all the information related to an LLVM module.
Definition: Module.h:65
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
An analysis pass based on the new PM to deliver ProfileSummaryInfo.
An analysis pass based on legacy pass manager to deliver ProfileSummaryInfo.
Analysis providing profile information.
std::optional< uint64_t > getProfileCount(const CallBase &CallInst, BlockFrequencyInfo *BFI, bool AllowSynthetic=false) const
Returns the profile count for CallInst.
bool isColdCount(uint64_t C) const
Returns true if count C is considered cold.
bool isHotCount(uint64_t C) const
Returns true if count C is considered hot.
A vector that has set insertion semantics.
Definition: SetVector.h:40
size_type size() const
Determine the number of elements in the SetVector.
Definition: SetVector.h:77
bool remove(const value_type &X)
Remove an item from the set vector.
Definition: SetVector.h:157
bool insert(const value_type &X)
Insert a new element into the SetVector.
Definition: SetVector.h:141
Vector takeVector()
Clear the SetVector and return the underlying vector.
Definition: SetVector.h:66
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
Definition: SmallPtrSet.h:365
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
Definition: SmallPtrSet.h:450
bool empty() const
Definition: SmallVector.h:94
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
Definition: SmallVector.h:577
void push_back(const T &Elt)
Definition: SmallVector.h:416
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1200
StackSafetyInfo wrapper for the new pass manager.
StackSafetyInfo wrapper for the legacy pass manager.
Interface to access stack safety analysis results for single function.
StringRef - Represent a constant reference to a string, i.e.
Definition: StringRef.h:50
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
Definition: DataLayout.h:618
uint64_t getElementOffset(unsigned Idx) const
Definition: DataLayout.h:648
unsigned getElementContainingOffset(uint64_t Offset) const
Given a valid byte offset into the structure, returns the structure index that contains it.
Definition: DataLayout.cpp:83
Class to represent struct types.
Definition: DerivedTypes.h:213
ArrayRef< Type * > elements() const
Definition: DerivedTypes.h:319
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
Definition: Twine.h:81
The instances of the Type class are immutable: once they are created, they are never changed.
Definition: Type.h:45
TypeID
Definitions of all of the base types for the Type system.
Definition: Type.h:54
A Use represents the edge between a Value definition and its users.
Definition: Use.h:43
LLVM Value Representation.
Definition: Value.h:74
Type * getType() const
All values are typed, get the type of this value.
Definition: Value.h:255
iterator_range< use_iterator > uses()
Definition: Value.h:376
std::pair< iterator, bool > insert(const ValueT &V)
Definition: DenseSet.h:206
Helper class to iterate through stack ids in both metadata (memprof MIB and callsite) and the corresp...
CallStackIterator end() const
CallStackIterator beginAfterSharedPrefix(CallStack &Other)
A raw_ostream that writes to a file descriptor.
Definition: raw_ostream.h:454
@ C
The default llvm calling convention, compatible with C.
Definition: CallingConv.h:34
ValuesClass values(OptsTy... Options)
Helper to build a ValuesClass by forwarding a variable number of arguments as an initializer list to ...
Definition: CommandLine.h:703
LocationClass< Ty > location(Ty &L)
Definition: CommandLine.h:465
AllocationType getMIBAllocType(const MDNode *MIB)
Returns the allocation type from an MIB metadata node.
MDNode * getMIBStackNode(const MDNode *MIB)
Returns the stack node from an MIB metadata node.
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
Definition: STLExtras.h:397
@ Offset
Definition: DWP.cpp:406
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
Definition: STLExtras.h:1782
auto enumerate(FirstRange &&First, RestRanges &&...Rest)
Given two or more input ranges, returns a new range whose values are are tuples (A,...
Definition: STLExtras.h:2387
bool IsConstantOffsetFromGlobal(Constant *C, GlobalValue *&GV, APInt &Offset, const DataLayout &DL, DSOLocalEquivalent **DSOEquiv=nullptr)
If this constant is a constant offset from a global, return the global and the constant.
FunctionSummary::ForceSummaryHotnessType ForceSummaryEdgesCold
bool needsParamAccessSummary(const Module &M)
ModuleSummaryIndex buildModuleSummaryIndex(const Module &M, std::function< BlockFrequencyInfo *(const Function &F)> GetBFICallback, ProfileSummaryInfo *PSI, std::function< const StackSafetyInfo *(const Function &F)> GetSSICallback=[](const Function &F) -> const StackSafetyInfo *{ return nullptr;})
Direct function to compute a ModuleSummaryIndex from a given module.
void initializeModuleSummaryIndexWrapperPassPass(PassRegistry &)
std::vector< VirtFuncOffset > VTableFuncList
List of functions referenced by a particular vtable definition.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
Definition: STLExtras.h:1789
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
Definition: Error.cpp:145
void findDevirtualizableCallsForTypeCheckedLoad(SmallVectorImpl< DevirtCallSite > &DevirtCalls, SmallVectorImpl< Instruction * > &LoadedPtrs, SmallVectorImpl< Instruction * > &Preds, bool &HasNonCallUses, const CallInst *CI, DominatorTree &DT)
Given a call to the intrinsic @llvm.type.checked.load, find all devirtualizable call sites based on t...
ModulePass * createModuleSummaryIndexWrapperPass()
ImmutablePass * createImmutableModuleSummaryIndexWrapperPass(const ModuleSummaryIndex *Index)
void initializeImmutableModuleSummaryIndexWrapperPassPass(PassRegistry &)
void findDevirtualizableCallsForTypeTest(SmallVectorImpl< DevirtCallSite > &DevirtCalls, SmallVectorImpl< CallInst * > &Assumes, const CallInst *CI, DominatorTree &DT)
Given a call to the intrinsic @llvm.type.test, find all devirtualizable call sites based on the call ...
GlobalVariable * collectUsedGlobalVariables(const Module &M, SmallVectorImpl< GlobalValue * > &Vec, bool CompilerUsed)
Given "llvm.used" or "llvm.compiler.used" as a global name, collect the initializer elements of that ...
Definition: Module.cpp:807
Summary of memprof metadata on allocations.
A special type used by analysis passes to provide an address that identifies that particular analysis...
Definition: PassManager.h:69
A call site that could be devirtualized.
Flags specific to function summaries.
Group flags (Linkage, NotEligibleToImport, etc.) as a bitfield.
Summary of a single MIB in a memprof metadata on allocations.
Struct that holds a reference to a particular GUID in a global value summary.