LLVM 19.0.0git
ValueEnumerator.cpp
Go to the documentation of this file.
1//===- ValueEnumerator.cpp - Number values and types for bitcode writer ---===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements the ValueEnumerator class.
10//
11//===----------------------------------------------------------------------===//
12
13#include "ValueEnumerator.h"
15#include "llvm/Config/llvm-config.h"
16#include "llvm/IR/Argument.h"
17#include "llvm/IR/BasicBlock.h"
18#include "llvm/IR/Constant.h"
21#include "llvm/IR/Function.h"
22#include "llvm/IR/GlobalAlias.h"
23#include "llvm/IR/GlobalIFunc.h"
25#include "llvm/IR/GlobalValue.h"
27#include "llvm/IR/Instruction.h"
29#include "llvm/IR/Metadata.h"
30#include "llvm/IR/Module.h"
31#include "llvm/IR/Operator.h"
32#include "llvm/IR/Type.h"
33#include "llvm/IR/Use.h"
34#include "llvm/IR/User.h"
35#include "llvm/IR/Value.h"
39#include "llvm/Support/Debug.h"
42#include <algorithm>
43#include <cstddef>
44#include <iterator>
45#include <tuple>
46
47using namespace llvm;
48
49namespace {
50
51struct OrderMap {
53 unsigned LastGlobalValueID = 0;
54
55 OrderMap() = default;
56
57 bool isGlobalValue(unsigned ID) const {
58 return ID <= LastGlobalValueID;
59 }
60
61 unsigned size() const { return IDs.size(); }
62 std::pair<unsigned, bool> &operator[](const Value *V) { return IDs[V]; }
63
64 std::pair<unsigned, bool> lookup(const Value *V) const {
65 return IDs.lookup(V);
66 }
67
68 void index(const Value *V) {
69 // Explicitly sequence get-size and insert-value operations to avoid UB.
70 unsigned ID = IDs.size() + 1;
71 IDs[V].first = ID;
72 }
73};
74
75} // end anonymous namespace
76
77static void orderValue(const Value *V, OrderMap &OM) {
78 if (OM.lookup(V).first)
79 return;
80
81 if (const Constant *C = dyn_cast<Constant>(V)) {
82 if (C->getNumOperands()) {
83 for (const Value *Op : C->operands())
84 if (!isa<BasicBlock>(Op) && !isa<GlobalValue>(Op))
85 orderValue(Op, OM);
86 if (auto *CE = dyn_cast<ConstantExpr>(C))
87 if (CE->getOpcode() == Instruction::ShuffleVector)
88 orderValue(CE->getShuffleMaskForBitcode(), OM);
89 }
90 }
91
92 // Note: we cannot cache this lookup above, since inserting into the map
93 // changes the map's size, and thus affects the other IDs.
94 OM.index(V);
95}
96
97static OrderMap orderModule(const Module &M) {
98 // This needs to match the order used by ValueEnumerator::ValueEnumerator()
99 // and ValueEnumerator::incorporateFunction().
100 OrderMap OM;
101
102 // Initializers of GlobalValues are processed in
103 // BitcodeReader::ResolveGlobalAndAliasInits(). Match the order there rather
104 // than ValueEnumerator, and match the code in predictValueUseListOrderImpl()
105 // by giving IDs in reverse order.
106 //
107 // Since GlobalValues never reference each other directly (just through
108 // initializers), their relative IDs only matter for determining order of
109 // uses in their initializers.
110 for (const GlobalVariable &G : reverse(M.globals()))
111 orderValue(&G, OM);
112 for (const GlobalAlias &A : reverse(M.aliases()))
113 orderValue(&A, OM);
114 for (const GlobalIFunc &I : reverse(M.ifuncs()))
115 orderValue(&I, OM);
116 for (const Function &F : reverse(M))
117 orderValue(&F, OM);
118 OM.LastGlobalValueID = OM.size();
119
120 auto orderConstantValue = [&OM](const Value *V) {
121 if (isa<Constant>(V) || isa<InlineAsm>(V))
122 orderValue(V, OM);
123 };
124
125 for (const Function &F : M) {
126 if (F.isDeclaration())
127 continue;
128 // Here we need to match the union of ValueEnumerator::incorporateFunction()
129 // and WriteFunction(). Basic blocks are implicitly declared before
130 // anything else (by declaring their size).
131 for (const BasicBlock &BB : F)
132 orderValue(&BB, OM);
133
134 // Metadata used by instructions is decoded before the actual instructions,
135 // so visit any constants used by it beforehand.
136 for (const BasicBlock &BB : F)
137 for (const Instruction &I : BB) {
138 auto OrderConstantFromMetadata = [&](Metadata *MD) {
139 if (const auto *VAM = dyn_cast<ValueAsMetadata>(MD)) {
140 orderConstantValue(VAM->getValue());
141 } else if (const auto *AL = dyn_cast<DIArgList>(MD)) {
142 for (const auto *VAM : AL->getArgs())
143 orderConstantValue(VAM->getValue());
144 }
145 };
146
147 for (DbgVariableRecord &DVR : filterDbgVars(I.getDbgRecordRange())) {
148 OrderConstantFromMetadata(DVR.getRawLocation());
149 if (DVR.isDbgAssign())
150 OrderConstantFromMetadata(DVR.getRawAddress());
151 }
152
153 for (const Value *V : I.operands()) {
154 if (const auto *MAV = dyn_cast<MetadataAsValue>(V))
155 OrderConstantFromMetadata(MAV->getMetadata());
156 }
157 }
158
159 for (const Argument &A : F.args())
160 orderValue(&A, OM);
161 for (const BasicBlock &BB : F)
162 for (const Instruction &I : BB) {
163 for (const Value *Op : I.operands())
164 orderConstantValue(Op);
165 if (auto *SVI = dyn_cast<ShuffleVectorInst>(&I))
166 orderValue(SVI->getShuffleMaskForBitcode(), OM);
167 orderValue(&I, OM);
168 }
169 }
170 return OM;
171}
172
173static void predictValueUseListOrderImpl(const Value *V, const Function *F,
174 unsigned ID, const OrderMap &OM,
175 UseListOrderStack &Stack) {
176 // Predict use-list order for this one.
177 using Entry = std::pair<const Use *, unsigned>;
179 for (const Use &U : V->uses())
180 // Check if this user will be serialized.
181 if (OM.lookup(U.getUser()).first)
182 List.push_back(std::make_pair(&U, List.size()));
183
184 if (List.size() < 2)
185 // We may have lost some users.
186 return;
187
188 bool IsGlobalValue = OM.isGlobalValue(ID);
189 llvm::sort(List, [&](const Entry &L, const Entry &R) {
190 const Use *LU = L.first;
191 const Use *RU = R.first;
192 if (LU == RU)
193 return false;
194
195 auto LID = OM.lookup(LU->getUser()).first;
196 auto RID = OM.lookup(RU->getUser()).first;
197
198 // If ID is 4, then expect: 7 6 5 1 2 3.
199 if (LID < RID) {
200 if (RID <= ID)
201 if (!IsGlobalValue) // GlobalValue uses don't get reversed.
202 return true;
203 return false;
204 }
205 if (RID < LID) {
206 if (LID <= ID)
207 if (!IsGlobalValue) // GlobalValue uses don't get reversed.
208 return false;
209 return true;
210 }
211
212 // LID and RID are equal, so we have different operands of the same user.
213 // Assume operands are added in order for all instructions.
214 if (LID <= ID)
215 if (!IsGlobalValue) // GlobalValue uses don't get reversed.
216 return LU->getOperandNo() < RU->getOperandNo();
217 return LU->getOperandNo() > RU->getOperandNo();
218 });
219
221 // Order is already correct.
222 return;
223
224 // Store the shuffle.
225 Stack.emplace_back(V, F, List.size());
226 assert(List.size() == Stack.back().Shuffle.size() && "Wrong size");
227 for (size_t I = 0, E = List.size(); I != E; ++I)
228 Stack.back().Shuffle[I] = List[I].second;
229}
230
231static void predictValueUseListOrder(const Value *V, const Function *F,
232 OrderMap &OM, UseListOrderStack &Stack) {
233 auto &IDPair = OM[V];
234 assert(IDPair.first && "Unmapped value");
235 if (IDPair.second)
236 // Already predicted.
237 return;
238
239 // Do the actual prediction.
240 IDPair.second = true;
241 if (!V->use_empty() && std::next(V->use_begin()) != V->use_end())
242 predictValueUseListOrderImpl(V, F, IDPair.first, OM, Stack);
243
244 // Recursive descent into constants.
245 if (const Constant *C = dyn_cast<Constant>(V)) {
246 if (C->getNumOperands()) { // Visit GlobalValues.
247 for (const Value *Op : C->operands())
248 if (isa<Constant>(Op)) // Visit GlobalValues.
249 predictValueUseListOrder(Op, F, OM, Stack);
250 if (auto *CE = dyn_cast<ConstantExpr>(C))
251 if (CE->getOpcode() == Instruction::ShuffleVector)
252 predictValueUseListOrder(CE->getShuffleMaskForBitcode(), F, OM,
253 Stack);
254 }
255 }
256}
257
259 OrderMap OM = orderModule(M);
260
261 // Use-list orders need to be serialized after all the users have been added
262 // to a value, or else the shuffles will be incomplete. Store them per
263 // function in a stack.
264 //
265 // Aside from function order, the order of values doesn't matter much here.
266 UseListOrderStack Stack;
267
268 // We want to visit the functions backward now so we can list function-local
269 // constants in the last Function they're used in. Module-level constants
270 // have already been visited above.
271 for (const Function &F : llvm::reverse(M)) {
272 auto PredictValueOrderFromMetadata = [&](Metadata *MD) {
273 if (const auto *VAM = dyn_cast<ValueAsMetadata>(MD)) {
274 predictValueUseListOrder(VAM->getValue(), &F, OM, Stack);
275 } else if (const auto *AL = dyn_cast<DIArgList>(MD)) {
276 for (const auto *VAM : AL->getArgs())
277 predictValueUseListOrder(VAM->getValue(), &F, OM, Stack);
278 }
279 };
280 if (F.isDeclaration())
281 continue;
282 for (const BasicBlock &BB : F)
283 predictValueUseListOrder(&BB, &F, OM, Stack);
284 for (const Argument &A : F.args())
285 predictValueUseListOrder(&A, &F, OM, Stack);
286 for (const BasicBlock &BB : F) {
287 for (const Instruction &I : BB) {
288 for (DbgVariableRecord &DVR : filterDbgVars(I.getDbgRecordRange())) {
289 PredictValueOrderFromMetadata(DVR.getRawLocation());
290 if (DVR.isDbgAssign())
291 PredictValueOrderFromMetadata(DVR.getRawAddress());
292 }
293 for (const Value *Op : I.operands()) {
294 if (isa<Constant>(*Op) || isa<InlineAsm>(*Op)) // Visit GlobalValues.
295 predictValueUseListOrder(Op, &F, OM, Stack);
296 if (const auto *MAV = dyn_cast<MetadataAsValue>(Op))
297 PredictValueOrderFromMetadata(MAV->getMetadata());
298 }
299 if (auto *SVI = dyn_cast<ShuffleVectorInst>(&I))
300 predictValueUseListOrder(SVI->getShuffleMaskForBitcode(), &F, OM,
301 Stack);
302 predictValueUseListOrder(&I, &F, OM, Stack);
303 }
304 }
305 }
306
307 // Visit globals last, since the module-level use-list block will be seen
308 // before the function bodies are processed.
309 for (const GlobalVariable &G : M.globals())
310 predictValueUseListOrder(&G, nullptr, OM, Stack);
311 for (const Function &F : M)
312 predictValueUseListOrder(&F, nullptr, OM, Stack);
313 for (const GlobalAlias &A : M.aliases())
314 predictValueUseListOrder(&A, nullptr, OM, Stack);
315 for (const GlobalIFunc &I : M.ifuncs())
316 predictValueUseListOrder(&I, nullptr, OM, Stack);
317 for (const GlobalVariable &G : M.globals())
318 if (G.hasInitializer())
319 predictValueUseListOrder(G.getInitializer(), nullptr, OM, Stack);
320 for (const GlobalAlias &A : M.aliases())
321 predictValueUseListOrder(A.getAliasee(), nullptr, OM, Stack);
322 for (const GlobalIFunc &I : M.ifuncs())
323 predictValueUseListOrder(I.getResolver(), nullptr, OM, Stack);
324 for (const Function &F : M) {
325 for (const Use &U : F.operands())
326 predictValueUseListOrder(U.get(), nullptr, OM, Stack);
327 }
328
329 return Stack;
330}
331
332static bool isIntOrIntVectorValue(const std::pair<const Value*, unsigned> &V) {
333 return V.first->getType()->isIntOrIntVectorTy();
334}
335
337 bool ShouldPreserveUseListOrder)
338 : ShouldPreserveUseListOrder(ShouldPreserveUseListOrder) {
339 if (ShouldPreserveUseListOrder)
341
342 // Enumerate the global variables.
343 for (const GlobalVariable &GV : M.globals()) {
344 EnumerateValue(&GV);
345 EnumerateType(GV.getValueType());
346 }
347
348 // Enumerate the functions.
349 for (const Function & F : M) {
350 EnumerateValue(&F);
351 EnumerateType(F.getValueType());
352 EnumerateAttributes(F.getAttributes());
353 }
354
355 // Enumerate the aliases.
356 for (const GlobalAlias &GA : M.aliases()) {
357 EnumerateValue(&GA);
358 EnumerateType(GA.getValueType());
359 }
360
361 // Enumerate the ifuncs.
362 for (const GlobalIFunc &GIF : M.ifuncs()) {
363 EnumerateValue(&GIF);
364 EnumerateType(GIF.getValueType());
365 }
366
367 // Remember what is the cutoff between globalvalue's and other constants.
368 unsigned FirstConstant = Values.size();
369
370 // Enumerate the global variable initializers and attributes.
371 for (const GlobalVariable &GV : M.globals()) {
372 if (GV.hasInitializer())
373 EnumerateValue(GV.getInitializer());
374 if (GV.hasAttributes())
375 EnumerateAttributes(GV.getAttributesAsList(AttributeList::FunctionIndex));
376 }
377
378 // Enumerate the aliasees.
379 for (const GlobalAlias &GA : M.aliases())
380 EnumerateValue(GA.getAliasee());
381
382 // Enumerate the ifunc resolvers.
383 for (const GlobalIFunc &GIF : M.ifuncs())
384 EnumerateValue(GIF.getResolver());
385
386 // Enumerate any optional Function data.
387 for (const Function &F : M)
388 for (const Use &U : F.operands())
389 EnumerateValue(U.get());
390
391 // Enumerate the metadata type.
392 //
393 // TODO: Move this to ValueEnumerator::EnumerateOperandType() once bitcode
394 // only encodes the metadata type when it's used as a value.
395 EnumerateType(Type::getMetadataTy(M.getContext()));
396
397 // Insert constants and metadata that are named at module level into the slot
398 // pool so that the module symbol table can refer to them...
399 EnumerateValueSymbolTable(M.getValueSymbolTable());
400 EnumerateNamedMetadata(M);
401
403 for (const GlobalVariable &GV : M.globals()) {
404 MDs.clear();
405 GV.getAllMetadata(MDs);
406 for (const auto &I : MDs)
407 // FIXME: Pass GV to EnumerateMetadata and arrange for the bitcode writer
408 // to write metadata to the global variable's own metadata block
409 // (PR28134).
410 EnumerateMetadata(nullptr, I.second);
411 }
412
413 // Enumerate types used by function bodies and argument lists.
414 for (const Function &F : M) {
415 for (const Argument &A : F.args())
416 EnumerateType(A.getType());
417
418 // Enumerate metadata attached to this function.
419 MDs.clear();
420 F.getAllMetadata(MDs);
421 for (const auto &I : MDs)
422 EnumerateMetadata(F.isDeclaration() ? nullptr : &F, I.second);
423
424 for (const BasicBlock &BB : F)
425 for (const Instruction &I : BB) {
426 // Local metadata is enumerated during function-incorporation, but
427 // any ConstantAsMetadata arguments in a DIArgList should be examined
428 // now.
429 auto EnumerateNonLocalValuesFromMetadata = [&](Metadata *MD) {
430 assert(MD && "Metadata unexpectedly null");
431 if (const auto *AL = dyn_cast<DIArgList>(MD)) {
432 for (const auto *VAM : AL->getArgs()) {
433 if (isa<ConstantAsMetadata>(VAM))
434 EnumerateMetadata(&F, VAM);
435 }
436 return;
437 }
438
439 if (!isa<LocalAsMetadata>(MD))
440 EnumerateMetadata(&F, MD);
441 };
442
443 for (DbgRecord &DR : I.getDbgRecordRange()) {
444 if (DbgLabelRecord *DLR = dyn_cast<DbgLabelRecord>(&DR)) {
445 EnumerateMetadata(&F, DLR->getLabel());
446 EnumerateMetadata(&F, &*DLR->getDebugLoc());
447 continue;
448 }
449 // Enumerate non-local location metadata.
450 DbgVariableRecord &DVR = cast<DbgVariableRecord>(DR);
451 EnumerateNonLocalValuesFromMetadata(DVR.getRawLocation());
452 EnumerateMetadata(&F, DVR.getExpression());
453 EnumerateMetadata(&F, DVR.getVariable());
454 EnumerateMetadata(&F, &*DVR.getDebugLoc());
455 if (DVR.isDbgAssign()) {
456 EnumerateNonLocalValuesFromMetadata(DVR.getRawAddress());
457 EnumerateMetadata(&F, DVR.getAssignID());
458 EnumerateMetadata(&F, DVR.getAddressExpression());
459 }
460 }
461 for (const Use &Op : I.operands()) {
462 auto *MD = dyn_cast<MetadataAsValue>(&Op);
463 if (!MD) {
464 EnumerateOperandType(Op);
465 continue;
466 }
467
468 EnumerateNonLocalValuesFromMetadata(MD->getMetadata());
469 }
470 if (auto *SVI = dyn_cast<ShuffleVectorInst>(&I))
471 EnumerateType(SVI->getShuffleMaskForBitcode()->getType());
472 if (auto *GEP = dyn_cast<GetElementPtrInst>(&I))
473 EnumerateType(GEP->getSourceElementType());
474 if (auto *AI = dyn_cast<AllocaInst>(&I))
475 EnumerateType(AI->getAllocatedType());
476 EnumerateType(I.getType());
477 if (const auto *Call = dyn_cast<CallBase>(&I)) {
478 EnumerateAttributes(Call->getAttributes());
479 EnumerateType(Call->getFunctionType());
480 }
481
482 // Enumerate metadata attached with this instruction.
483 MDs.clear();
484 I.getAllMetadataOtherThanDebugLoc(MDs);
485 for (unsigned i = 0, e = MDs.size(); i != e; ++i)
486 EnumerateMetadata(&F, MDs[i].second);
487
488 // Don't enumerate the location directly -- it has a special record
489 // type -- but enumerate its operands.
490 if (DILocation *L = I.getDebugLoc())
491 for (const Metadata *Op : L->operands())
492 EnumerateMetadata(&F, Op);
493 }
494 }
495
496 // Optimize constant ordering.
497 OptimizeConstants(FirstConstant, Values.size());
498
499 // Organize metadata ordering.
500 organizeMetadata();
501}
502
504 InstructionMapType::const_iterator I = InstructionMap.find(Inst);
505 assert(I != InstructionMap.end() && "Instruction is not mapped!");
506 return I->second;
507}
508
509unsigned ValueEnumerator::getComdatID(const Comdat *C) const {
510 unsigned ComdatID = Comdats.idFor(C);
511 assert(ComdatID && "Comdat not found!");
512 return ComdatID;
513}
514
516 InstructionMap[I] = InstructionCount++;
517}
518
519unsigned ValueEnumerator::getValueID(const Value *V) const {
520 if (auto *MD = dyn_cast<MetadataAsValue>(V))
521 return getMetadataID(MD->getMetadata());
522
524 assert(I != ValueMap.end() && "Value not in slotcalculator!");
525 return I->second-1;
526}
527
528#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
530 print(dbgs(), ValueMap, "Default");
531 dbgs() << '\n';
532 print(dbgs(), MetadataMap, "MetaData");
533 dbgs() << '\n';
534}
535#endif
536
538 const char *Name) const {
539 OS << "Map Name: " << Name << "\n";
540 OS << "Size: " << Map.size() << "\n";
541 for (const auto &I : Map) {
542 const Value *V = I.first;
543 if (V->hasName())
544 OS << "Value: " << V->getName();
545 else
546 OS << "Value: [null]\n";
547 V->print(errs());
548 errs() << '\n';
549
550 OS << " Uses(" << V->getNumUses() << "):";
551 for (const Use &U : V->uses()) {
552 if (&U != &*V->use_begin())
553 OS << ",";
554 if(U->hasName())
555 OS << " " << U->getName();
556 else
557 OS << " [null]";
558
559 }
560 OS << "\n\n";
561 }
562}
563
565 const char *Name) const {
566 OS << "Map Name: " << Name << "\n";
567 OS << "Size: " << Map.size() << "\n";
568 for (const auto &I : Map) {
569 const Metadata *MD = I.first;
570 OS << "Metadata: slot = " << I.second.ID << "\n";
571 OS << "Metadata: function = " << I.second.F << "\n";
572 MD->print(OS);
573 OS << "\n";
574 }
575}
576
577/// OptimizeConstants - Reorder constant pool for denser encoding.
578void ValueEnumerator::OptimizeConstants(unsigned CstStart, unsigned CstEnd) {
579 if (CstStart == CstEnd || CstStart+1 == CstEnd) return;
580
581 if (ShouldPreserveUseListOrder)
582 // Optimizing constants makes the use-list order difficult to predict.
583 // Disable it for now when trying to preserve the order.
584 return;
585
586 std::stable_sort(Values.begin() + CstStart, Values.begin() + CstEnd,
587 [this](const std::pair<const Value *, unsigned> &LHS,
588 const std::pair<const Value *, unsigned> &RHS) {
589 // Sort by plane.
590 if (LHS.first->getType() != RHS.first->getType())
591 return getTypeID(LHS.first->getType()) < getTypeID(RHS.first->getType());
592 // Then by frequency.
593 return LHS.second > RHS.second;
594 });
595
596 // Ensure that integer and vector of integer constants are at the start of the
597 // constant pool. This is important so that GEP structure indices come before
598 // gep constant exprs.
599 std::stable_partition(Values.begin() + CstStart, Values.begin() + CstEnd,
601
602 // Rebuild the modified portion of ValueMap.
603 for (; CstStart != CstEnd; ++CstStart)
604 ValueMap[Values[CstStart].first] = CstStart+1;
605}
606
607/// EnumerateValueSymbolTable - Insert all of the values in the specified symbol
608/// table into the values table.
609void ValueEnumerator::EnumerateValueSymbolTable(const ValueSymbolTable &VST) {
610 for (ValueSymbolTable::const_iterator VI = VST.begin(), VE = VST.end();
611 VI != VE; ++VI)
612 EnumerateValue(VI->getValue());
613}
614
615/// Insert all of the values referenced by named metadata in the specified
616/// module.
617void ValueEnumerator::EnumerateNamedMetadata(const Module &M) {
618 for (const auto &I : M.named_metadata())
619 EnumerateNamedMDNode(&I);
620}
621
622void ValueEnumerator::EnumerateNamedMDNode(const NamedMDNode *MD) {
623 for (unsigned i = 0, e = MD->getNumOperands(); i != e; ++i)
624 EnumerateMetadata(nullptr, MD->getOperand(i));
625}
626
627unsigned ValueEnumerator::getMetadataFunctionID(const Function *F) const {
628 return F ? getValueID(F) + 1 : 0;
629}
630
631void ValueEnumerator::EnumerateMetadata(const Function *F, const Metadata *MD) {
632 EnumerateMetadata(getMetadataFunctionID(F), MD);
633}
634
635void ValueEnumerator::EnumerateFunctionLocalMetadata(
636 const Function &F, const LocalAsMetadata *Local) {
637 EnumerateFunctionLocalMetadata(getMetadataFunctionID(&F), Local);
638}
639
640void ValueEnumerator::EnumerateFunctionLocalListMetadata(
641 const Function &F, const DIArgList *ArgList) {
642 EnumerateFunctionLocalListMetadata(getMetadataFunctionID(&F), ArgList);
643}
644
645void ValueEnumerator::dropFunctionFromMetadata(
646 MetadataMapType::value_type &FirstMD) {
648 auto push = [&Worklist](MetadataMapType::value_type &MD) {
649 auto &Entry = MD.second;
650
651 // Nothing to do if this metadata isn't tagged.
652 if (!Entry.F)
653 return;
654
655 // Drop the function tag.
656 Entry.F = 0;
657
658 // If this is has an ID and is an MDNode, then its operands have entries as
659 // well. We need to drop the function from them too.
660 if (Entry.ID)
661 if (auto *N = dyn_cast<MDNode>(MD.first))
662 Worklist.push_back(N);
663 };
664 push(FirstMD);
665 while (!Worklist.empty())
666 for (const Metadata *Op : Worklist.pop_back_val()->operands()) {
667 if (!Op)
668 continue;
669 auto MD = MetadataMap.find(Op);
670 if (MD != MetadataMap.end())
671 push(*MD);
672 }
673}
674
675void ValueEnumerator::EnumerateMetadata(unsigned F, const Metadata *MD) {
676 // It's vital for reader efficiency that uniqued subgraphs are done in
677 // post-order; it's expensive when their operands have forward references.
678 // If a distinct node is referenced from a uniqued node, it'll be delayed
679 // until the uniqued subgraph has been completely traversed.
680 SmallVector<const MDNode *, 32> DelayedDistinctNodes;
681
682 // Start by enumerating MD, and then work through its transitive operands in
683 // post-order. This requires a depth-first search.
685 if (const MDNode *N = enumerateMetadataImpl(F, MD))
686 Worklist.push_back(std::make_pair(N, N->op_begin()));
687
688 while (!Worklist.empty()) {
689 const MDNode *N = Worklist.back().first;
690
691 // Enumerate operands until we hit a new node. We need to traverse these
692 // nodes' operands before visiting the rest of N's operands.
693 MDNode::op_iterator I = std::find_if(
694 Worklist.back().second, N->op_end(),
695 [&](const Metadata *MD) { return enumerateMetadataImpl(F, MD); });
696 if (I != N->op_end()) {
697 auto *Op = cast<MDNode>(*I);
698 Worklist.back().second = ++I;
699
700 // Delay traversing Op if it's a distinct node and N is uniqued.
701 if (Op->isDistinct() && !N->isDistinct())
702 DelayedDistinctNodes.push_back(Op);
703 else
704 Worklist.push_back(std::make_pair(Op, Op->op_begin()));
705 continue;
706 }
707
708 // All the operands have been visited. Now assign an ID.
709 Worklist.pop_back();
710 MDs.push_back(N);
711 MetadataMap[N].ID = MDs.size();
712
713 // Flush out any delayed distinct nodes; these are all the distinct nodes
714 // that are leaves in last uniqued subgraph.
715 if (Worklist.empty() || Worklist.back().first->isDistinct()) {
716 for (const MDNode *N : DelayedDistinctNodes)
717 Worklist.push_back(std::make_pair(N, N->op_begin()));
718 DelayedDistinctNodes.clear();
719 }
720 }
721}
722
723const MDNode *ValueEnumerator::enumerateMetadataImpl(unsigned F, const Metadata *MD) {
724 if (!MD)
725 return nullptr;
726
727 assert(
728 (isa<MDNode>(MD) || isa<MDString>(MD) || isa<ConstantAsMetadata>(MD)) &&
729 "Invalid metadata kind");
730
731 auto Insertion = MetadataMap.insert(std::make_pair(MD, MDIndex(F)));
732 MDIndex &Entry = Insertion.first->second;
733 if (!Insertion.second) {
734 // Already mapped. If F doesn't match the function tag, drop it.
735 if (Entry.hasDifferentFunction(F))
736 dropFunctionFromMetadata(*Insertion.first);
737 return nullptr;
738 }
739
740 // Don't assign IDs to metadata nodes.
741 if (auto *N = dyn_cast<MDNode>(MD))
742 return N;
743
744 // Save the metadata.
745 MDs.push_back(MD);
746 Entry.ID = MDs.size();
747
748 // Enumerate the constant, if any.
749 if (auto *C = dyn_cast<ConstantAsMetadata>(MD))
750 EnumerateValue(C->getValue());
751
752 return nullptr;
753}
754
755/// EnumerateFunctionLocalMetadata - Incorporate function-local metadata
756/// information reachable from the metadata.
757void ValueEnumerator::EnumerateFunctionLocalMetadata(
758 unsigned F, const LocalAsMetadata *Local) {
759 assert(F && "Expected a function");
760
761 // Check to see if it's already in!
762 MDIndex &Index = MetadataMap[Local];
763 if (Index.ID) {
764 assert(Index.F == F && "Expected the same function");
765 return;
766 }
767
768 MDs.push_back(Local);
769 Index.F = F;
770 Index.ID = MDs.size();
771
772 EnumerateValue(Local->getValue());
773}
774
775/// EnumerateFunctionLocalListMetadata - Incorporate function-local metadata
776/// information reachable from the metadata.
777void ValueEnumerator::EnumerateFunctionLocalListMetadata(
778 unsigned F, const DIArgList *ArgList) {
779 assert(F && "Expected a function");
780
781 // Check to see if it's already in!
782 MDIndex &Index = MetadataMap[ArgList];
783 if (Index.ID) {
784 assert(Index.F == F && "Expected the same function");
785 return;
786 }
787
788 for (ValueAsMetadata *VAM : ArgList->getArgs()) {
789 if (isa<LocalAsMetadata>(VAM)) {
790 assert(MetadataMap.count(VAM) &&
791 "LocalAsMetadata should be enumerated before DIArgList");
792 assert(MetadataMap[VAM].F == F &&
793 "Expected LocalAsMetadata in the same function");
794 } else {
795 assert(isa<ConstantAsMetadata>(VAM) &&
796 "Expected LocalAsMetadata or ConstantAsMetadata");
797 assert(ValueMap.count(VAM->getValue()) &&
798 "Constant should be enumerated beforeDIArgList");
799 EnumerateMetadata(F, VAM);
800 }
801 }
802
803 MDs.push_back(ArgList);
804 Index.F = F;
805 Index.ID = MDs.size();
806}
807
808static unsigned getMetadataTypeOrder(const Metadata *MD) {
809 // Strings are emitted in bulk and must come first.
810 if (isa<MDString>(MD))
811 return 0;
812
813 // ConstantAsMetadata doesn't reference anything. We may as well shuffle it
814 // to the front since we can detect it.
815 auto *N = dyn_cast<MDNode>(MD);
816 if (!N)
817 return 1;
818
819 // The reader is fast forward references for distinct node operands, but slow
820 // when uniqued operands are unresolved.
821 return N->isDistinct() ? 2 : 3;
822}
823
824void ValueEnumerator::organizeMetadata() {
825 assert(MetadataMap.size() == MDs.size() &&
826 "Metadata map and vector out of sync");
827
828 if (MDs.empty())
829 return;
830
831 // Copy out the index information from MetadataMap in order to choose a new
832 // order.
834 Order.reserve(MetadataMap.size());
835 for (const Metadata *MD : MDs)
836 Order.push_back(MetadataMap.lookup(MD));
837
838 // Partition:
839 // - by function, then
840 // - by isa<MDString>
841 // and then sort by the original/current ID. Since the IDs are guaranteed to
842 // be unique, the result of llvm::sort will be deterministic. There's no need
843 // for std::stable_sort.
844 llvm::sort(Order, [this](MDIndex LHS, MDIndex RHS) {
845 return std::make_tuple(LHS.F, getMetadataTypeOrder(LHS.get(MDs)), LHS.ID) <
846 std::make_tuple(RHS.F, getMetadataTypeOrder(RHS.get(MDs)), RHS.ID);
847 });
848
849 // Rebuild MDs, index the metadata ranges for each function in FunctionMDs,
850 // and fix up MetadataMap.
851 std::vector<const Metadata *> OldMDs;
852 MDs.swap(OldMDs);
853 MDs.reserve(OldMDs.size());
854 for (unsigned I = 0, E = Order.size(); I != E && !Order[I].F; ++I) {
855 auto *MD = Order[I].get(OldMDs);
856 MDs.push_back(MD);
857 MetadataMap[MD].ID = I + 1;
858 if (isa<MDString>(MD))
859 ++NumMDStrings;
860 }
861
862 // Return early if there's nothing for the functions.
863 if (MDs.size() == Order.size())
864 return;
865
866 // Build the function metadata ranges.
867 MDRange R;
868 FunctionMDs.reserve(OldMDs.size());
869 unsigned PrevF = 0;
870 for (unsigned I = MDs.size(), E = Order.size(), ID = MDs.size(); I != E;
871 ++I) {
872 unsigned F = Order[I].F;
873 if (!PrevF) {
874 PrevF = F;
875 } else if (PrevF != F) {
876 R.Last = FunctionMDs.size();
877 std::swap(R, FunctionMDInfo[PrevF]);
878 R.First = FunctionMDs.size();
879
880 ID = MDs.size();
881 PrevF = F;
882 }
883
884 auto *MD = Order[I].get(OldMDs);
885 FunctionMDs.push_back(MD);
886 MetadataMap[MD].ID = ++ID;
887 if (isa<MDString>(MD))
888 ++R.NumStrings;
889 }
890 R.Last = FunctionMDs.size();
891 FunctionMDInfo[PrevF] = R;
892}
893
894void ValueEnumerator::incorporateFunctionMetadata(const Function &F) {
895 NumModuleMDs = MDs.size();
896
897 auto R = FunctionMDInfo.lookup(getValueID(&F) + 1);
898 NumMDStrings = R.NumStrings;
899 MDs.insert(MDs.end(), FunctionMDs.begin() + R.First,
900 FunctionMDs.begin() + R.Last);
901}
902
903void ValueEnumerator::EnumerateValue(const Value *V) {
904 assert(!V->getType()->isVoidTy() && "Can't insert void values!");
905 assert(!isa<MetadataAsValue>(V) && "EnumerateValue doesn't handle Metadata!");
906
907 // Check to see if it's already in!
908 unsigned &ValueID = ValueMap[V];
909 if (ValueID) {
910 // Increment use count.
911 Values[ValueID-1].second++;
912 return;
913 }
914
915 if (auto *GO = dyn_cast<GlobalObject>(V))
916 if (const Comdat *C = GO->getComdat())
917 Comdats.insert(C);
918
919 // Enumerate the type of this value.
920 EnumerateType(V->getType());
921
922 if (const Constant *C = dyn_cast<Constant>(V)) {
923 if (isa<GlobalValue>(C)) {
924 // Initializers for globals are handled explicitly elsewhere.
925 } else if (C->getNumOperands()) {
926 // If a constant has operands, enumerate them. This makes sure that if a
927 // constant has uses (for example an array of const ints), that they are
928 // inserted also.
929
930 // We prefer to enumerate them with values before we enumerate the user
931 // itself. This makes it more likely that we can avoid forward references
932 // in the reader. We know that there can be no cycles in the constants
933 // graph that don't go through a global variable.
934 for (User::const_op_iterator I = C->op_begin(), E = C->op_end();
935 I != E; ++I)
936 if (!isa<BasicBlock>(*I)) // Don't enumerate BB operand to BlockAddress.
937 EnumerateValue(*I);
938 if (auto *CE = dyn_cast<ConstantExpr>(C)) {
939 if (CE->getOpcode() == Instruction::ShuffleVector)
940 EnumerateValue(CE->getShuffleMaskForBitcode());
941 if (auto *GEP = dyn_cast<GEPOperator>(CE))
942 EnumerateType(GEP->getSourceElementType());
943 }
944
945 // Finally, add the value. Doing this could make the ValueID reference be
946 // dangling, don't reuse it.
947 Values.push_back(std::make_pair(V, 1U));
948 ValueMap[V] = Values.size();
949 return;
950 }
951 }
952
953 // Add the value.
954 Values.push_back(std::make_pair(V, 1U));
955 ValueID = Values.size();
956}
957
958
959void ValueEnumerator::EnumerateType(Type *Ty) {
960 unsigned *TypeID = &TypeMap[Ty];
961
962 // We've already seen this type.
963 if (*TypeID)
964 return;
965
966 // If it is a non-anonymous struct, mark the type as being visited so that we
967 // don't recursively visit it. This is safe because we allow forward
968 // references of these in the bitcode reader.
969 if (StructType *STy = dyn_cast<StructType>(Ty))
970 if (!STy->isLiteral())
971 *TypeID = ~0U;
972
973 // Enumerate all of the subtypes before we enumerate this type. This ensures
974 // that the type will be enumerated in an order that can be directly built.
975 for (Type *SubTy : Ty->subtypes())
976 EnumerateType(SubTy);
977
978 // Refresh the TypeID pointer in case the table rehashed.
979 TypeID = &TypeMap[Ty];
980
981 // Check to see if we got the pointer another way. This can happen when
982 // enumerating recursive types that hit the base case deeper than they start.
983 //
984 // If this is actually a struct that we are treating as forward ref'able,
985 // then emit the definition now that all of its contents are available.
986 if (*TypeID && *TypeID != ~0U)
987 return;
988
989 // Add this type now that its contents are all happily enumerated.
990 Types.push_back(Ty);
991
992 *TypeID = Types.size();
993}
994
995// Enumerate the types for the specified value. If the value is a constant,
996// walk through it, enumerating the types of the constant.
997void ValueEnumerator::EnumerateOperandType(const Value *V) {
998 EnumerateType(V->getType());
999
1000 assert(!isa<MetadataAsValue>(V) && "Unexpected metadata operand");
1001
1002 const Constant *C = dyn_cast<Constant>(V);
1003 if (!C)
1004 return;
1005
1006 // If this constant is already enumerated, ignore it, we know its type must
1007 // be enumerated.
1008 if (ValueMap.count(C))
1009 return;
1010
1011 // This constant may have operands, make sure to enumerate the types in
1012 // them.
1013 for (const Value *Op : C->operands()) {
1014 // Don't enumerate basic blocks here, this happens as operands to
1015 // blockaddress.
1016 if (isa<BasicBlock>(Op))
1017 continue;
1018
1019 EnumerateOperandType(Op);
1020 }
1021 if (auto *CE = dyn_cast<ConstantExpr>(C)) {
1022 if (CE->getOpcode() == Instruction::ShuffleVector)
1023 EnumerateOperandType(CE->getShuffleMaskForBitcode());
1024 if (CE->getOpcode() == Instruction::GetElementPtr)
1025 EnumerateType(cast<GEPOperator>(CE)->getSourceElementType());
1026 }
1027}
1028
1029void ValueEnumerator::EnumerateAttributes(AttributeList PAL) {
1030 if (PAL.isEmpty()) return; // null is always 0.
1031
1032 // Do a lookup.
1033 unsigned &Entry = AttributeListMap[PAL];
1034 if (Entry == 0) {
1035 // Never saw this before, add it.
1036 AttributeLists.push_back(PAL);
1037 Entry = AttributeLists.size();
1038 }
1039
1040 // Do lookups for all attribute groups.
1041 for (unsigned i : PAL.indexes()) {
1042 AttributeSet AS = PAL.getAttributes(i);
1043 if (!AS.hasAttributes())
1044 continue;
1045 IndexAndAttrSet Pair = {i, AS};
1046 unsigned &Entry = AttributeGroupMap[Pair];
1047 if (Entry == 0) {
1048 AttributeGroups.push_back(Pair);
1049 Entry = AttributeGroups.size();
1050
1051 for (Attribute Attr : AS) {
1052 if (Attr.isTypeAttribute())
1053 EnumerateType(Attr.getValueAsType());
1054 }
1055 }
1056 }
1057}
1058
1060 InstructionCount = 0;
1061 NumModuleValues = Values.size();
1062
1063 // Add global metadata to the function block. This doesn't include
1064 // LocalAsMetadata.
1065 incorporateFunctionMetadata(F);
1066
1067 // Adding function arguments to the value table.
1068 for (const auto &I : F.args()) {
1069 EnumerateValue(&I);
1070 if (I.hasAttribute(Attribute::ByVal))
1071 EnumerateType(I.getParamByValType());
1072 else if (I.hasAttribute(Attribute::StructRet))
1073 EnumerateType(I.getParamStructRetType());
1074 else if (I.hasAttribute(Attribute::ByRef))
1075 EnumerateType(I.getParamByRefType());
1076 }
1077 FirstFuncConstantID = Values.size();
1078
1079 // Add all function-level constants to the value table.
1080 for (const BasicBlock &BB : F) {
1081 for (const Instruction &I : BB) {
1082 for (const Use &OI : I.operands()) {
1083 if ((isa<Constant>(OI) && !isa<GlobalValue>(OI)) || isa<InlineAsm>(OI))
1084 EnumerateValue(OI);
1085 }
1086 if (auto *SVI = dyn_cast<ShuffleVectorInst>(&I))
1087 EnumerateValue(SVI->getShuffleMaskForBitcode());
1088 }
1089 BasicBlocks.push_back(&BB);
1090 ValueMap[&BB] = BasicBlocks.size();
1091 }
1092
1093 // Optimize the constant layout.
1094 OptimizeConstants(FirstFuncConstantID, Values.size());
1095
1096 // Add the function's parameter attributes so they are available for use in
1097 // the function's instruction.
1098 EnumerateAttributes(F.getAttributes());
1099
1100 FirstInstID = Values.size();
1101
1102 SmallVector<LocalAsMetadata *, 8> FnLocalMDVector;
1103 SmallVector<DIArgList *, 8> ArgListMDVector;
1104
1105 auto AddFnLocalMetadata = [&](Metadata *MD) {
1106 if (!MD)
1107 return;
1108 if (auto *Local = dyn_cast<LocalAsMetadata>(MD)) {
1109 // Enumerate metadata after the instructions they might refer to.
1110 FnLocalMDVector.push_back(Local);
1111 } else if (auto *ArgList = dyn_cast<DIArgList>(MD)) {
1112 ArgListMDVector.push_back(ArgList);
1113 for (ValueAsMetadata *VMD : ArgList->getArgs()) {
1114 if (auto *Local = dyn_cast<LocalAsMetadata>(VMD)) {
1115 // Enumerate metadata after the instructions they might refer
1116 // to.
1117 FnLocalMDVector.push_back(Local);
1118 }
1119 }
1120 }
1121 };
1122
1123 // Add all of the instructions.
1124 for (const BasicBlock &BB : F) {
1125 for (const Instruction &I : BB) {
1126 for (const Use &OI : I.operands()) {
1127 if (auto *MD = dyn_cast<MetadataAsValue>(&OI))
1128 AddFnLocalMetadata(MD->getMetadata());
1129 }
1130 /// RemoveDIs: Add non-instruction function-local metadata uses.
1131 for (DbgVariableRecord &DVR : filterDbgVars(I.getDbgRecordRange())) {
1132 assert(DVR.getRawLocation() &&
1133 "DbgVariableRecord location unexpectedly null");
1134 AddFnLocalMetadata(DVR.getRawLocation());
1135 if (DVR.isDbgAssign()) {
1136 assert(DVR.getRawAddress() &&
1137 "DbgVariableRecord location unexpectedly null");
1138 AddFnLocalMetadata(DVR.getRawAddress());
1139 }
1140 }
1141 if (!I.getType()->isVoidTy())
1142 EnumerateValue(&I);
1143 }
1144 }
1145
1146 // Add all of the function-local metadata.
1147 for (unsigned i = 0, e = FnLocalMDVector.size(); i != e; ++i) {
1148 // At this point, every local values have been incorporated, we shouldn't
1149 // have a metadata operand that references a value that hasn't been seen.
1150 assert(ValueMap.count(FnLocalMDVector[i]->getValue()) &&
1151 "Missing value for metadata operand");
1152 EnumerateFunctionLocalMetadata(F, FnLocalMDVector[i]);
1153 }
1154 // DIArgList entries must come after function-local metadata, as it is not
1155 // possible to forward-reference them.
1156 for (const DIArgList *ArgList : ArgListMDVector)
1157 EnumerateFunctionLocalListMetadata(F, ArgList);
1158}
1159
1161 /// Remove purged values from the ValueMap.
1162 for (unsigned i = NumModuleValues, e = Values.size(); i != e; ++i)
1163 ValueMap.erase(Values[i].first);
1164 for (const Metadata *MD : llvm::drop_begin(MDs, NumModuleMDs))
1165 MetadataMap.erase(MD);
1166 for (const BasicBlock *BB : BasicBlocks)
1167 ValueMap.erase(BB);
1168
1169 Values.resize(NumModuleValues);
1170 MDs.resize(NumModuleMDs);
1171 BasicBlocks.clear();
1172 NumMDStrings = 0;
1173}
1174
1177 unsigned Counter = 0;
1178 for (const BasicBlock &BB : *F)
1179 IDMap[&BB] = ++Counter;
1180}
1181
1182/// getGlobalBasicBlockID - This returns the function-specific ID for the
1183/// specified basic block. This is relatively expensive information, so it
1184/// should only be used by rare constructs such as address-of-label.
1186 unsigned &Idx = GlobalBasicBlockIDs[BB];
1187 if (Idx != 0)
1188 return Idx-1;
1189
1190 IncorporateFunctionInfoGlobalBBIDs(BB->getParent(), GlobalBasicBlockIDs);
1191 return getGlobalBasicBlockID(BB);
1192}
1193
1195 return Log2_32_Ceil(getTypes().size() + 1);
1196}
MapVector< const Value *, unsigned > OrderMap
Definition: AsmWriter.cpp:99
static void push(SmallVectorImpl< uint64_t > &R, StringRef Str)
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
#define LLVM_DUMP_METHOD
Mark debug helper function definitions like dump() that should not be stripped from debug builds.
Definition: Compiler.h:529
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
std::string Name
This file contains the declaration of the GlobalIFunc class, which represents a single indirect funct...
Hexagon Common GEP
static bool lookup(const GsymReader &GR, DataExtractor &Data, uint64_t &Offset, uint64_t BaseAddr, uint64_t Addr, SourceLocations &SrcLocs, llvm::Error &Err)
A Lookup helper functions.
Definition: InlineInfo.cpp:109
#define F(x, y, z)
Definition: MD5.cpp:55
#define I(x, y, z)
Definition: MD5.cpp:58
#define G(x, y, z)
Definition: MD5.cpp:56
This file contains the declarations for metadata subclasses.
Module.h This file contains the declarations for the Module class.
if(VerifyEach)
const NodeList & List
Definition: RDFGraph.cpp:201
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
raw_pwrite_stream & OS
This file defines the SmallVector class.
This defines the Use class.
static void predictValueUseListOrderImpl(const Value *V, const Function *F, unsigned ID, const OrderMap &OM, UseListOrderStack &Stack)
static unsigned getMetadataTypeOrder(const Metadata *MD)
static void orderValue(const Value *V, OrderMap &OM)
static void predictValueUseListOrder(const Value *V, const Function *F, OrderMap &OM, UseListOrderStack &Stack)
static UseListOrderStack predictUseListOrder(const Module &M)
static void IncorporateFunctionInfoGlobalBBIDs(const Function *F, DenseMap< const BasicBlock *, unsigned > &IDMap)
static bool isIntOrIntVectorValue(const std::pair< const Value *, unsigned > &V)
static OrderMap orderModule(const Module &M)
Value * RHS
Value * LHS
This class represents an incoming formal argument to a Function.
Definition: Argument.h:31
index_iterator indexes() const
Use this to iterate over the valid attribute indexes.
Definition: Attributes.h:960
bool isEmpty() const
Return true if there are no attributes.
Definition: Attributes.h:972
AttributeSet getAttributes(unsigned Index) const
The attributes for the specified index are returned.
bool hasAttributes() const
Return true if attributes exists in this set.
Definition: Attributes.h:373
LLVM Basic Block Representation.
Definition: BasicBlock.h:60
const Function * getParent() const
Return the enclosing method, or null if none.
Definition: BasicBlock.h:206
This is an important base class in LLVM.
Definition: Constant.h:41
List of ValueAsMetadata, to be used as an argument to a dbg.value intrinsic.
ArrayRef< ValueAsMetadata * > getArgs() const
Debug location.
This class represents an Operation in the Expression.
Records a position in IR for a source label (DILabel).
Base class for non-instruction debug metadata records that have positions within IR.
DebugLoc getDebugLoc() const
Record of a variable value-assignment, aka a non instruction representation of the dbg....
DIExpression * getExpression() const
DILocalVariable * getVariable() const
Metadata * getRawLocation() const
Returns the metadata operand for the first location description.
DIExpression * getAddressExpression() const
ValueT lookup(const_arg_type_t< KeyT > Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
Definition: DenseMap.h:202
iterator find(const_arg_type_t< KeyT > Val)
Definition: DenseMap.h:155
bool erase(const KeyT &Val)
Definition: DenseMap.h:329
unsigned size() const
Definition: DenseMap.h:99
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
Definition: DenseMap.h:151
iterator end()
Definition: DenseMap.h:84
BucketT value_type
Definition: DenseMap.h:69
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition: DenseMap.h:220
Metadata node.
Definition: Metadata.h:1067
Tracking metadata reference owned by Metadata.
Definition: Metadata.h:889
Root of the metadata hierarchy.
Definition: Metadata.h:62
void print(raw_ostream &OS, const Module *M=nullptr, bool IsForDebug=false) const
Print.
Definition: AsmWriter.cpp:5195
A Module instance is used to store all the information related to an LLVM module.
Definition: Module.h:65
A tuple of MDNodes.
Definition: Metadata.h:1729
MDNode * getOperand(unsigned i) const
Definition: Metadata.cpp:1382
unsigned getNumOperands() const
Definition: Metadata.cpp:1378
bool empty() const
Definition: SmallVector.h:94
size_t size() const
Definition: SmallVector.h:91
void reserve(size_type N)
Definition: SmallVector.h:676
void push_back(const T &Elt)
Definition: SmallVector.h:426
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1209
Class to represent struct types.
Definition: DerivedTypes.h:216
The instances of the Type class are immutable: once they are created, they are never changed.
Definition: Type.h:45
static Type * getMetadataTy(LLVMContext &C)
TypeID
Definitions of all of the base types for the Type system.
Definition: Type.h:54
ArrayRef< Type * > subtypes() const
Definition: Type.h:361
unsigned insert(const T &Entry)
insert - Append entry to the vector if it doesn't already exist.
Definition: UniqueVector.h:40
unsigned idFor(const T &Entry) const
idFor - return the ID for an existing entry.
Definition: UniqueVector.h:57
A Use represents the edge between a Value definition and its users.
Definition: Use.h:43
Value wrapper in the Metadata hierarchy.
Definition: Metadata.h:450
unsigned getMetadataID(const Metadata *MD) const
UseListOrderStack UseListOrders
void print(raw_ostream &OS, const ValueMapType &Map, const char *Name) const
unsigned getInstructionID(const Instruction *I) const
void incorporateFunction(const Function &F)
incorporateFunction/purgeFunction - If you'd like to deal with a function, use these two methods to g...
ValueEnumerator(const Module &M, bool ShouldPreserveUseListOrder)
unsigned getComdatID(const Comdat *C) const
unsigned getValueID(const Value *V) const
unsigned getGlobalBasicBlockID(const BasicBlock *BB) const
getGlobalBasicBlockID - This returns the function-specific ID for the specified basic block.
void setInstructionID(const Instruction *I)
std::pair< unsigned, AttributeSet > IndexAndAttrSet
Attribute groups as encoded in bitcode are almost AttributeSets, but they include the AttributeList i...
const TypeList & getTypes() const
uint64_t computeBitsRequiredForTypeIndicies() const
See the file comment.
Definition: ValueMap.h:84
size_type count(const KeyT &Val) const
Return 1 if the specified key is in the map, 0 otherwise.
Definition: ValueMap.h:151
iterator find(const KeyT &Val)
Definition: ValueMap.h:155
iterator end()
Definition: ValueMap.h:135
bool erase(const KeyT &Val)
Definition: ValueMap.h:190
This class provides a symbol table of name/value pairs.
iterator end()
Get an iterator to the end of the symbol table.
iterator begin()
Get an iterator that from the beginning of the symbol table.
LLVM Value Representation.
Definition: Value.h:74
This class implements an extremely fast bulk output stream that can only output to a stream.
Definition: raw_ostream.h:52
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Definition: CallingConv.h:24
@ C
The default llvm calling convention, compatible with C.
Definition: CallingConv.h:34
@ CE
Windows NT (Windows on ARM)
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
Definition: STLExtras.h:329
unsigned Log2_32_Ceil(uint32_t Value)
Return the ceil log base 2 of the specified value, 32 if the value is zero.
Definition: MathExtras.h:326
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
Definition: STLExtras.h:1680
std::vector< UseListOrder > UseListOrderStack
Definition: UseListOrder.h:39
auto reverse(ContainerTy &&C)
Definition: STLExtras.h:419
void sort(IteratorTy Start, IteratorTy End)
Definition: STLExtras.h:1647
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:163
bool is_sorted(R &&Range, Compare C)
Wrapper function around std::is_sorted to check if elements in a range R are sorted with respect to a...
Definition: STLExtras.h:1902
raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
static auto filterDbgVars(iterator_range< simple_ilist< DbgRecord >::iterator > R)
Filter the DbgRecord range to DbgVariableRecord types only and downcast.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
Definition: BitVector.h:860
#define N
Function object to check whether the second component of a container supported by std::get (like std:...
Definition: STLExtras.h:1459