93#define DEBUG_TYPE "asan"
99 std::numeric_limits<uint64_t>::max();
140 "__asan_unregister_image_globals";
153 "__asan_stack_malloc_always_";
167 "__asan_option_detect_stack_use_after_return";
170 "__asan_shadow_memory_dynamic_address";
194 "asan-kernel",
cl::desc(
"Enable KernelAddressSanitizer instrumentation"),
199 cl::desc(
"Enable recovery mode (continue-after-error)."),
203 "asan-guard-against-version-mismatch",
204 cl::desc(
"Guard against compiler/runtime version mismatch."),
209 cl::desc(
"instrument read instructions"),
213 "asan-instrument-writes",
cl::desc(
"instrument write instructions"),
222 "asan-instrument-atomics",
232 "asan-always-slow-path",
237 "asan-force-dynamic-shadow",
238 cl::desc(
"Load shadow address into a local variable for each function"),
243 cl::desc(
"Access dynamic shadow through an ifunc global on "
244 "platforms that support this"),
248 "asan-with-ifunc-suppress-remat",
249 cl::desc(
"Suppress rematerialization of dynamic shadow address by passing "
250 "it through inline asm in prologue."),
258 "asan-max-ins-per-bb",
cl::init(10000),
259 cl::desc(
"maximal number of instructions to instrument in any given BB"),
266 "asan-max-inline-poisoning-size",
268 "Inline shadow poisoning for blocks up to the given size in bytes."),
272 "asan-use-after-return",
273 cl::desc(
"Sets the mode of detection for stack-use-after-return."),
275 clEnumValN(AsanDetectStackUseAfterReturnMode::Never,
"never",
276 "Never detect stack use after return."),
278 AsanDetectStackUseAfterReturnMode::Runtime,
"runtime",
279 "Detect stack use after return if "
280 "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."),
281 clEnumValN(AsanDetectStackUseAfterReturnMode::Always,
"always",
282 "Always detect stack use after return.")),
286 cl::desc(
"Create redzones for byval "
287 "arguments (extra copy "
292 cl::desc(
"Check stack-use-after-scope"),
301 cl::desc(
"Handle C++ initializer order"),
305 "asan-detect-invalid-pointer-pair",
310 "asan-detect-invalid-pointer-cmp",
315 "asan-detect-invalid-pointer-sub",
320 "asan-realign-stack",
321 cl::desc(
"Realign stack to the value of this flag (power of two)"),
325 "asan-instrumentation-with-call-threshold",
327 "If the function being instrumented contains more than "
328 "this number of memory accesses, use callbacks instead of "
329 "inline checks (-1 means never use callbacks)."),
333 "asan-memory-access-callback-prefix",
338 "asan-kernel-mem-intrinsic-prefix",
344 cl::desc(
"instrument dynamic allocas"),
348 "asan-skip-promotable-allocas",
353 "asan-constructor-kind",
354 cl::desc(
"Sets the ASan constructor kind"),
357 "Use global constructors")),
364 cl::desc(
"scale of asan shadow mapping"),
369 cl::desc(
"offset of asan shadow mapping [EXPERIMENTAL]"),
383 "asan-opt-same-temp",
cl::desc(
"Instrument the same temp just once"),
387 cl::desc(
"Don't instrument scalar globals"),
391 "asan-opt-stack",
cl::desc(
"Don't instrument scalar stack variables"),
395 "asan-stack-dynamic-alloca",
400 "asan-force-experiment",
406 cl::desc(
"Use private aliases for global variables"),
411 cl::desc(
"Use odr indicators to improve ODR reporting"),
416 cl::desc(
"Use linker features to support dead "
417 "code stripping of globals"),
424 cl::desc(
"Place ASan constructors in comdat sections"),
428 "asan-destructor-kind",
429 cl::desc(
"Sets the ASan destructor kind. The default is to use the value "
430 "provided to the pass constructor"),
433 "Use global destructors")),
453STATISTIC(NumInstrumentedReads,
"Number of instrumented reads");
454STATISTIC(NumInstrumentedWrites,
"Number of instrumented writes");
456 "Number of optimized accesses to global vars");
458 "Number of optimized accesses to stack vars");
467struct ShadowMapping {
478 bool IsAndroid = TargetTriple.
isAndroid();
481 bool IsMacOS = TargetTriple.
isMacOSX();
484 bool IsPS = TargetTriple.
isPS();
491 bool IsMIPS32 = TargetTriple.
isMIPS32();
492 bool IsMIPS64 = TargetTriple.
isMIPS64();
493 bool IsArmOrThumb = TargetTriple.
isARM() || TargetTriple.
isThumb();
500 bool IsAMDGPU = TargetTriple.
isAMDGPU();
502 ShadowMapping Mapping;
509 if (LongSize == 32) {
512 else if (IsMIPSN32ABI)
524 else if (IsEmscripten)
537 else if (IsFreeBSD && IsAArch64)
539 else if (IsFreeBSD && !IsMIPS64) {
544 }
else if (IsNetBSD) {
551 else if (IsLinux && IsX86_64) {
557 }
else if (IsWindows && IsX86_64) {
563 else if (IsMacOS && IsAArch64)
567 else if (IsLoongArch64)
591 Mapping.OrShadowOffset = !IsAArch64 && !IsPPC64 && !IsSystemZ && !IsPS &&
592 !IsRISCV64 && !IsLoongArch64 &&
593 !(Mapping.Offset & (Mapping.Offset - 1)) &&
595 bool IsAndroidWithIfuncSupport =
597 Mapping.InGlobal =
ClWithIfunc && IsAndroidWithIfuncSupport && IsArmOrThumb;
605 int *MappingScale,
bool *OrShadowOffset) {
607 *ShadowBase = Mapping.Offset;
608 *MappingScale = Mapping.Scale;
609 *OrShadowOffset = Mapping.OrShadowOffset;
619 uint8_t AccessSizeIndex)
623 AccessSizeIndex(AccessSizeIndex), IsWrite(IsWrite),
624 CompileKernel(CompileKernel) {}
631 return std::max(32U, 1U << MappingScale);
645struct AddressSanitizer {
647 bool CompileKernel =
false,
bool Recover =
false,
648 bool UseAfterScope =
false,
650 AsanDetectStackUseAfterReturnMode::Runtime)
658 C = &(
M.getContext());
659 LongSize =
M.getDataLayout().getPointerSizeInBits();
663 TargetTriple =
Triple(
M.getTargetTriple());
667 assert(this->UseAfterReturn != AsanDetectStackUseAfterReturnMode::Invalid);
675 bool isInterestingAlloca(
const AllocaInst &AI);
678 void getInterestingMemoryOperands(
684 void instrumentPointerComparisonOrSubtraction(
Instruction *
I);
690 uint32_t TypeStoreSize,
bool IsWrite,
691 Value *SizeArgument);
694 TypeSize TypeStoreSize,
bool IsWrite,
695 Value *SizeArgument,
bool UseCalls,
700 bool IsWrite,
size_t AccessSizeIndex,
704 bool suppressInstrumentationSiteForDebug(
int &Instrumented);
706 bool maybeInsertAsanInitAtFunctionEntry(
Function &
F);
707 bool maybeInsertDynamicShadowAtFunctionEntry(
Function &
F);
708 void markEscapedLocalAllocas(
Function &
F);
711 friend struct FunctionStackPoisoner;
721 struct FunctionStateRAII {
722 AddressSanitizer *
Pass;
724 FunctionStateRAII(AddressSanitizer *
Pass) :
Pass(
Pass) {
726 "last pass forgot to clear cache");
730 ~FunctionStateRAII() {
731 Pass->LocalDynamicShadow =
nullptr;
732 Pass->ProcessedAllocas.clear();
746 ShadowMapping Mapping;
760 Value *LocalDynamicShadow =
nullptr;
768class ModuleAddressSanitizer {
770 ModuleAddressSanitizer(
Module &M,
bool CompileKernel =
false,
771 bool Recover =
false,
bool UseGlobalsGC =
true,
772 bool UseOdrIndicator =
true,
778 UseGlobalsGC(UseGlobalsGC &&
ClUseGlobalsGC && !this->CompileKernel),
793 UseCtorComdat(UseGlobalsGC &&
ClWithComdat && !this->CompileKernel),
794 DestructorKind(DestructorKind),
795 ConstructorKind(ConstructorKind) {
796 C = &(
M.getContext());
797 int LongSize =
M.getDataLayout().getPointerSizeInBits();
799 TargetTriple =
Triple(
M.getTargetTriple());
804 assert(this->DestructorKind != AsanDtorKind::Invalid);
807 bool instrumentModule(
Module &);
810 void initializeCallbacks(
Module &M);
819 const std::string &UniqueModuleId);
836 bool ShouldUseMachOGlobalsSection()
const;
837 StringRef getGlobalMetadataSection()
const;
840 uint64_t getMinRedzoneSizeForGlobal()
const {
844 int GetAsanVersion(
const Module &M)
const;
849 bool UsePrivateAlias;
850 bool UseOdrIndicator;
857 ShadowMapping Mapping;
867 Function *AsanCtorFunction =
nullptr;
868 Function *AsanDtorFunction =
nullptr;
880struct FunctionStackPoisoner :
public InstVisitor<FunctionStackPoisoner> {
882 AddressSanitizer &ASan;
887 ShadowMapping Mapping;
896 FunctionCallee AsanPoisonStackMemoryFunc, AsanUnpoisonStackMemoryFunc;
900 struct AllocaPoisonCall {
908 bool HasUntracedLifetimeIntrinsic =
false;
915 bool HasInlineAsm =
false;
916 bool HasReturnsTwiceCall =
false;
919 FunctionStackPoisoner(
Function &F, AddressSanitizer &ASan)
921 C(ASan.
C), IntptrTy(ASan.IntptrTy),
922 IntptrPtrTy(
PointerType::
get(IntptrTy, 0)), Mapping(ASan.Mapping),
931 copyArgsPassedByValToAllocas();
936 if (AllocaVec.
empty() && DynamicAllocaVec.
empty())
return false;
938 initializeCallbacks(*
F.getParent());
940 if (HasUntracedLifetimeIntrinsic) {
944 StaticAllocaPoisonCallVec.
clear();
945 DynamicAllocaPoisonCallVec.
clear();
948 processDynamicAllocas();
949 processStaticAllocas();
960 void copyArgsPassedByValToAllocas();
965 void processStaticAllocas();
966 void processDynamicAllocas();
968 void createDynamicAllocasInitStorage();
986 void unpoisonDynamicAllocasBeforeInst(
Instruction *InstBefore,
989 Value *DynamicAreaPtr = IRB.CreatePtrToInt(SavedStack, IntptrTy);
994 if (!isa<ReturnInst>(InstBefore)) {
996 InstBefore->
getModule(), Intrinsic::get_dynamic_area_offset,
999 Value *DynamicAreaOffset = IRB.CreateCall(DynamicAreaOffsetFunc, {});
1001 DynamicAreaPtr = IRB.CreateAdd(IRB.CreatePtrToInt(SavedStack, IntptrTy),
1006 AsanAllocasUnpoisonFunc,
1007 {IRB.CreateLoad(IntptrTy, DynamicAllocaLayout), DynamicAreaPtr});
1011 void unpoisonDynamicAllocas() {
1013 unpoisonDynamicAllocasBeforeInst(Ret, DynamicAllocaLayout);
1015 for (
Instruction *StackRestoreInst : StackRestoreVec)
1016 unpoisonDynamicAllocasBeforeInst(StackRestoreInst,
1017 StackRestoreInst->getOperand(0));
1030 void handleDynamicAllocaCall(
AllocaInst *AI);
1035 if (!ASan.isInterestingAlloca(AI) ||
1040 if (AllocaVec.
empty())
1058 if (
ID == Intrinsic::stackrestore) StackRestoreVec.
push_back(&II);
1059 if (
ID == Intrinsic::localescape) LocalEscapeCall = &II;
1060 if (!ASan.UseAfterScope)
1067 if (
Size->isMinusOne())
return;
1070 const uint64_t SizeValue =
Size->getValue().getLimitedValue();
1071 if (SizeValue == ~0ULL ||
1079 HasUntracedLifetimeIntrinsic =
true;
1083 if (!ASan.isInterestingAlloca(*AI))
1085 bool DoPoison = (
ID == Intrinsic::lifetime_end);
1086 AllocaPoisonCall APC = {&II, AI, SizeValue, DoPoison};
1088 StaticAllocaPoisonCallVec.
push_back(APC);
1090 DynamicAllocaPoisonCallVec.
push_back(APC);
1094 if (
CallInst *CI = dyn_cast<CallInst>(&CB)) {
1095 HasInlineAsm |= CI->isInlineAsm() && &CB != ASan.LocalDynamicShadow;
1096 HasReturnsTwiceCall |= CI->canReturnTwice();
1101 void initializeCallbacks(
Module &M);
1128 OS, MapClassName2PassName);
1140 UseOdrIndicator(UseOdrIndicator), DestructorKind(DestructorKind),
1145 ModuleAddressSanitizer ModuleSanitizer(M, Options.
CompileKernel,
1147 UseOdrIndicator, DestructorKind,
1154 AddressSanitizer FunctionSanitizer(M, SSGI, Options.
CompileKernel,
1158 Modified |= FunctionSanitizer.instrumentFunction(
F, &TLI);
1160 Modified |= ModuleSanitizer.instrumentModule(M);
1181 if (
G->getName().startswith(
"llvm.") ||
1183 G->getName().startswith(
"__llvm_gcov_ctr") ||
1185 G->getName().startswith(
"__llvm_rtti_proxy"))
1198 Type *PtrTy = cast<PointerType>(
Addr->getType()->getScalarType());
1200 if (AddrSpace == 3 || AddrSpace == 5)
1207 Shadow = IRB.
CreateLShr(Shadow, Mapping.Scale);
1208 if (Mapping.Offset == 0)
return Shadow;
1211 if (LocalDynamicShadow)
1212 ShadowBase = LocalDynamicShadow;
1215 if (Mapping.OrShadowOffset)
1216 return IRB.
CreateOr(Shadow, ShadowBase);
1218 return IRB.
CreateAdd(Shadow, ShadowBase);
1224 if (isa<MemTransferInst>(
MI)) {
1226 isa<MemMoveInst>(
MI) ? AsanMemmove : AsanMemcpy,
1230 }
else if (isa<MemSetInst>(
MI)) {
1237 MI->eraseFromParent();
1241bool AddressSanitizer::isInterestingAlloca(
const AllocaInst &AI) {
1242 auto PreviouslySeenAllocaInfo = ProcessedAllocas.find(&AI);
1244 if (PreviouslySeenAllocaInfo != ProcessedAllocas.end())
1245 return PreviouslySeenAllocaInfo->getSecond();
1247 bool IsInteresting =
1260 !(SSGI && SSGI->
isSafe(AI)));
1262 ProcessedAllocas[&AI] = IsInteresting;
1263 return IsInteresting;
1268 Type *PtrTy = cast<PointerType>(
Ptr->getType()->getScalarType());
1277 if (
Ptr->isSwiftError())
1283 if (
auto AI = dyn_cast_or_null<AllocaInst>(
Ptr))
1294void AddressSanitizer::getInterestingMemoryOperands(
1297 if (LocalDynamicShadow ==
I)
1300 if (
LoadInst *LI = dyn_cast<LoadInst>(
I)) {
1303 Interesting.
emplace_back(
I, LI->getPointerOperandIndex(),
false,
1304 LI->getType(), LI->getAlign());
1305 }
else if (
StoreInst *SI = dyn_cast<StoreInst>(
I)) {
1309 SI->getValueOperand()->getType(),
SI->getAlign());
1313 Interesting.
emplace_back(
I, RMW->getPointerOperandIndex(),
true,
1314 RMW->getValOperand()->getType(), std::nullopt);
1318 Interesting.
emplace_back(
I, XCHG->getPointerOperandIndex(),
true,
1319 XCHG->getCompareOperand()->getType(),
1321 }
else if (
auto CI = dyn_cast<CallInst>(
I)) {
1322 if (CI->getIntrinsicID() == Intrinsic::masked_load ||
1323 CI->getIntrinsicID() == Intrinsic::masked_store) {
1324 bool IsWrite = CI->getIntrinsicID() == Intrinsic::masked_store;
1326 unsigned OpOffset = IsWrite ? 1 : 0;
1330 auto BasePtr = CI->getOperand(OpOffset);
1331 if (ignoreAccess(
I, BasePtr))
1333 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1336 if (
auto *Op = dyn_cast<ConstantInt>(CI->getOperand(1 + OpOffset)))
1337 Alignment =
Op->getMaybeAlignValue();
1338 Value *
Mask = CI->getOperand(2 + OpOffset);
1339 Interesting.
emplace_back(
I, OpOffset, IsWrite, Ty, Alignment, Mask);
1341 for (
unsigned ArgNo = 0; ArgNo < CI->arg_size(); ArgNo++) {
1343 ignoreAccess(
I, CI->getArgOperand(ArgNo)))
1345 Type *Ty = CI->getParamByValType(ArgNo);
1353 return V->getType()->isPointerTy() || isa<PtrToIntInst>(V);
1360 if (
ICmpInst *Cmp = dyn_cast<ICmpInst>(
I)) {
1361 if (!Cmp->isRelational())
1375 if (BO->getOpcode() != Instruction::Sub)
1388 if (!
G->hasInitializer())
1391 if (
G->hasSanitizerMetadata() &&
G->getSanitizerMetadata().IsDynInit)
1397void AddressSanitizer::instrumentPointerComparisonOrSubtraction(
1400 FunctionCallee F = isa<ICmpInst>(
I) ? AsanPtrCmpFunction : AsanPtrSubFunction;
1401 Value *
Param[2] = {
I->getOperand(0),
I->getOperand(1)};
1402 for (
Value *&i : Param) {
1403 if (i->getType()->isPointerTy())
1412 TypeSize TypeStoreSize,
bool IsWrite,
1413 Value *SizeArgument,
bool UseCalls,
1419 switch (FixedSize) {
1425 if (!Alignment || *Alignment >= Granularity ||
1426 *Alignment >= FixedSize / 8)
1427 return Pass->instrumentAddress(
I, InsertBefore,
Addr, FixedSize,
1428 IsWrite,
nullptr, UseCalls, Exp);
1431 Pass->instrumentUnusualSizeOrAlignment(
I, InsertBefore,
Addr, TypeStoreSize,
1432 IsWrite,
nullptr, UseCalls, Exp);
1439 unsigned Granularity,
Type *OpType,
1440 bool IsWrite,
Value *SizeArgument,
1442 auto *VTy = cast<VectorType>(OpType);
1444 TypeSize ElemTypeSize =
DL.getTypeStoreSizeInBits(VTy->getScalarType());
1449 Value *MaskElem = IRB.CreateExtractElement(Mask, Index);
1450 if (auto *MaskElemC = dyn_cast<ConstantInt>(MaskElem)) {
1451 if (MaskElemC->isZero())
1457 Instruction *ThenTerm = SplitBlockAndInsertIfThen(MaskElem, &*IRB.GetInsertPoint(), false);
1458 IRB.SetInsertPoint(ThenTerm);
1463 Granularity, ElemTypeSize, IsWrite, SizeArgument,
1491 isSafeAccess(ObjSizeVis,
Addr,
O.TypeStoreSize)) {
1492 NumOptimizedAccessesToGlobalVar++;
1500 isSafeAccess(ObjSizeVis,
Addr,
O.TypeStoreSize)) {
1501 NumOptimizedAccessesToStackVar++;
1507 NumInstrumentedWrites++;
1509 NumInstrumentedReads++;
1511 unsigned Granularity = 1 << Mapping.Scale;
1514 Addr,
O.Alignment, Granularity,
O.OpType,
1515 O.IsWrite,
nullptr, UseCalls, Exp);
1518 Granularity,
O.TypeStoreSize,
O.IsWrite,
nullptr, UseCalls,
1525 size_t AccessSizeIndex,
1526 Value *SizeArgument,
1534 {
Addr, SizeArgument});
1537 {
Addr, SizeArgument, ExpVal});
1541 IRB.
CreateCall(AsanErrorCallback[IsWrite][0][AccessSizeIndex],
Addr);
1543 Call = IRB.
CreateCall(AsanErrorCallback[IsWrite][1][AccessSizeIndex],
1547 Call->setCannotMerge();
1554 size_t Granularity =
static_cast<size_t>(1) << Mapping.Scale;
1556 Value *LastAccessedByte =
1559 if (TypeStoreSize / 8 > 1)
1569Instruction *AddressSanitizer::instrumentAMDGPUAddress(
1571 uint32_t TypeStoreSize,
bool IsWrite,
Value *SizeArgument) {
1575 Type *PtrTy = cast<PointerType>(
Addr->getType()->getScalarType());
1578 return InsertBefore;
1584 Value *IsSharedOrPrivate = IRB.
CreateOr(IsShared, IsPrivate);
1586 Value *AddrSpaceZeroLanding =
1588 InsertBefore = cast<Instruction>(AddrSpaceZeroLanding);
1589 return InsertBefore;
1592void AddressSanitizer::instrumentAddress(
Instruction *OrigIns,
1594 uint32_t TypeStoreSize,
bool IsWrite,
1595 Value *SizeArgument,
bool UseCalls,
1597 if (TargetTriple.isAMDGPU()) {
1598 InsertBefore = instrumentAMDGPUAddress(OrigIns, InsertBefore,
Addr,
1599 TypeStoreSize, IsWrite, SizeArgument);
1606 const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
1609 const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
1621 IRB.
CreateCall(AsanMemoryAccessCallback[IsWrite][0][AccessSizeIndex],
1624 IRB.
CreateCall(AsanMemoryAccessCallback[IsWrite][1][AccessSizeIndex],
1632 Value *ShadowPtr = memToShadow(AddrLong, IRB);
1633 Value *ShadowValue =
1637 size_t Granularity = 1ULL << Mapping.Scale;
1645 assert(cast<BranchInst>(CheckTerm)->isUnconditional());
1648 Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeStoreSize);
1662 Instruction *Crash = generateCrashCode(CrashTerm, AddrLong, IsWrite,
1663 AccessSizeIndex, SizeArgument, Exp);
1671void AddressSanitizer::instrumentUnusualSizeOrAlignment(
1673 bool IsWrite,
Value *SizeArgument,
bool UseCalls,
uint32_t Exp) {
1681 IRB.
CreateCall(AsanMemoryAccessCallbackSized[IsWrite][0],
1684 IRB.
CreateCall(AsanMemoryAccessCallbackSized[IsWrite][1],
1691 instrumentAddress(
I, InsertBefore,
Addr, 8, IsWrite,
Size,
false, Exp);
1692 instrumentAddress(
I, InsertBefore, LastByte, 8, IsWrite,
Size,
false, Exp);
1696void ModuleAddressSanitizer::poisonOneInitializer(
Function &GlobalInit,
1704 IRB.
CreateCall(AsanPoisonGlobals, ModuleNameAddr);
1707 for (
auto &BB : GlobalInit)
1708 if (
ReturnInst *RI = dyn_cast<ReturnInst>(BB.getTerminator()))
1712void ModuleAddressSanitizer::createInitializerPoisonCalls(
1723 if (isa<ConstantAggregateZero>(
OP))
continue;
1729 auto *Priority = cast<ConstantInt>(CS->
getOperand(0));
1739ModuleAddressSanitizer::getExcludedAliasedGlobal(
const GlobalAlias &GA)
const {
1744 assert(CompileKernel &&
"Only expecting to be called when compiling kernel");
1751 return dyn_cast<GlobalVariable>(
C->stripPointerCastsAndAliases());
1756bool ModuleAddressSanitizer::shouldInstrumentGlobal(
GlobalVariable *
G)
const {
1757 Type *Ty =
G->getValueType();
1760 if (
G->hasSanitizerMetadata() &&
G->getSanitizerMetadata().NoAddress)
1762 if (!Ty->
isSized())
return false;
1763 if (!
G->hasInitializer())
return false;
1765 if (
G->getAddressSpace() &&
1772 if (
G->isThreadLocal())
return false;
1774 if (
G->getAlign() && *
G->getAlign() > getMinRedzoneSizeForGlobal())
return false;
1780 if (!TargetTriple.isOSBinFormatCOFF()) {
1781 if (!
G->hasExactDefinition() ||
G->hasComdat())
1785 if (
G->isInterposable())
1792 switch (
C->getSelectionKind()) {
1803 if (
G->hasSection()) {
1813 if (Section ==
"llvm.metadata")
return false;
1820 if (
Section.startswith(
".preinit_array") ||
1821 Section.startswith(
".init_array") ||
1822 Section.startswith(
".fini_array")) {
1828 if (TargetTriple.isOSBinFormatELF()) {
1830 [](
char c) {
return llvm::isAlnum(c) || c ==
'_'; }))
1842 if (TargetTriple.isOSBinFormatCOFF() &&
Section.contains(
'$')) {
1843 LLVM_DEBUG(
dbgs() <<
"Ignoring global in sorted section (contains '$'): "
1848 if (TargetTriple.isOSBinFormatMachO()) {
1850 unsigned TAA = 0, StubSize = 0;
1853 Section, ParsedSegment, ParsedSection, TAA, TAAParsed, StubSize));
1858 if (ParsedSegment ==
"__OBJC" ||
1859 (ParsedSegment ==
"__DATA" && ParsedSection.
startswith(
"__objc_"))) {
1871 if (ParsedSegment ==
"__DATA" && ParsedSection ==
"__cfstring") {
1884 if (CompileKernel) {
1887 if (
G->getName().startswith(
"__"))
1897bool ModuleAddressSanitizer::ShouldUseMachOGlobalsSection()
const {
1898 if (!TargetTriple.isOSBinFormatMachO())
1901 if (TargetTriple.isMacOSX() && !TargetTriple.isMacOSXVersionLT(10, 11))
1903 if (TargetTriple.isiOS() && !TargetTriple.isOSVersionLT(9))
1905 if (TargetTriple.isWatchOS() && !TargetTriple.isOSVersionLT(2))
1907 if (TargetTriple.isDriverKit())
1913StringRef ModuleAddressSanitizer::getGlobalMetadataSection()
const {
1914 switch (TargetTriple.getObjectFormat()) {
1924 "ModuleAddressSanitizer not implemented for object file format");
1931void ModuleAddressSanitizer::initializeCallbacks(
Module &M) {
1937 AsanUnpoisonGlobals =
1941 AsanRegisterGlobals =
M.getOrInsertFunction(
1943 AsanUnregisterGlobals =
M.getOrInsertFunction(
1948 AsanRegisterImageGlobals =
M.getOrInsertFunction(
1950 AsanUnregisterImageGlobals =
M.getOrInsertFunction(
1953 AsanRegisterElfGlobals =
1955 IntptrTy, IntptrTy, IntptrTy);
1956 AsanUnregisterElfGlobals =
1958 IntptrTy, IntptrTy, IntptrTy);
1963void ModuleAddressSanitizer::SetComdatForGlobalMetadata(
1968 if (!
G->hasName()) {
1975 if (!InternalSuffix.
empty() &&
G->hasLocalLinkage()) {
1976 std::string
Name = std::string(
G->getName());
1977 Name += InternalSuffix;
1978 C =
M.getOrInsertComdat(
Name);
1980 C =
M.getOrInsertComdat(
G->getName());
1986 if (TargetTriple.isOSBinFormatCOFF()) {
1988 if (
G->hasPrivateLinkage())
2001ModuleAddressSanitizer::CreateMetadataGlobal(
Module &M,
Constant *Initializer,
2003 auto Linkage = TargetTriple.isOSBinFormatMachO()
2007 M, Initializer->
getType(),
false, Linkage, Initializer,
2009 Metadata->setSection(getGlobalMetadataSection());
2017 AsanDtorFunction->addFnAttr(Attribute::NoUnwind);
2025void ModuleAddressSanitizer::InstrumentGlobalsCOFF(
2029 auto &
DL =
M.getDataLayout();
2032 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2033 Constant *Initializer = MetadataInitializers[i];
2036 CreateMetadataGlobal(M, Initializer,
G->getName());
2038 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2044 unsigned SizeOfGlobalStruct =
DL.getTypeAllocSize(Initializer->
getType());
2046 "global metadata will not be padded appropriately");
2049 SetComdatForGlobalMetadata(
G,
Metadata,
"");
2054 if (!MetadataGlobals.empty())
2058void ModuleAddressSanitizer::InstrumentGlobalsELF(
2061 const std::string &UniqueModuleId) {
2068 bool UseComdatForGlobalsGC = UseOdrIndicator;
2071 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2074 CreateMetadataGlobal(M, MetadataInitializers[i],
G->getName());
2076 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2079 if (UseComdatForGlobalsGC)
2080 SetComdatForGlobalMetadata(
G,
Metadata, UniqueModuleId);
2085 if (!MetadataGlobals.empty())
2102 "__start_" + getGlobalMetadataSection());
2106 "__stop_" + getGlobalMetadataSection());
2120 IrbDtor.CreateCall(AsanUnregisterElfGlobals,
2127void ModuleAddressSanitizer::InstrumentGlobalsMachO(
2138 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2139 Constant *Initializer = MetadataInitializers[i];
2142 CreateMetadataGlobal(M, Initializer,
G->getName());
2146 auto LivenessBinder =
2151 Twine(
"__asan_binder_") +
G->getName());
2152 Liveness->
setSection(
"__DATA,__asan_liveness,regular,live_support");
2153 LivenessGlobals[i] = Liveness;
2160 if (!LivenessGlobals.empty())
2182 IrbDtor.CreateCall(AsanUnregisterImageGlobals,
2187void ModuleAddressSanitizer::InstrumentGlobalsWithMetadataArray(
2191 unsigned N = ExtendedGlobals.
size();
2201 if (Mapping.Scale > 3)
2202 AllGlobals->setAlignment(
Align(1ULL << Mapping.Scale));
2213 IrbDtor.CreateCall(AsanUnregisterGlobals,
2226 *CtorComdat =
false;
2231 if (CompileKernel) {
2232 for (
auto &GA :
M.aliases()) {
2234 AliasedGlobalExclusions.
insert(GV);
2239 for (
auto &
G :
M.globals()) {
2240 if (!AliasedGlobalExclusions.
count(&
G) && shouldInstrumentGlobal(&
G))
2244 size_t n = GlobalsToChange.
size();
2250 auto &
DL =
M.getDataLayout();
2264 IntptrTy, IntptrTy, IntptrTy);
2268 bool HasDynamicallyInitializedGlobals =
false;
2275 for (
size_t i = 0; i < n; i++) {
2279 if (
G->hasSanitizerMetadata())
2280 MD =
G->getSanitizerMetadata();
2285 std::string NameForGlobal =
G->getName().str();
2290 Type *Ty =
G->getValueType();
2291 const uint64_t SizeInBytes =
DL.getTypeAllocSize(Ty);
2292 const uint64_t RightRedzoneSize = getRedzoneSizeForGlobal(SizeInBytes);
2304 M, NewTy,
G->isConstant(), Linkage, NewInitializer,
"",
G,
2305 G->getThreadLocalMode(),
G->getAddressSpace());
2315 if (TargetTriple.isOSBinFormatMachO() && !
G->hasSection() &&
2317 auto Seq = dyn_cast<ConstantDataSequential>(
G->getInitializer());
2318 if (Seq && Seq->isCString())
2319 NewGlobal->
setSection(
"__TEXT,__asan_cstring,regular");
2330 G->replaceAllUsesWith(
2333 G->eraseFromParent();
2334 NewGlobals[i] = NewGlobal;
2339 bool CanUsePrivateAliases =
2340 TargetTriple.isOSBinFormatELF() || TargetTriple.isOSBinFormatMachO() ||
2341 TargetTriple.isOSBinFormatWasm();
2342 if (CanUsePrivateAliases && UsePrivateAlias) {
2345 InstrumentedGlobal =
2353 }
else if (UseOdrIndicator) {
2356 auto *ODRIndicatorSym =
2365 ODRIndicatorSym->setAlignment(
Align(1));
2366 ODRIndicator = ODRIndicatorSym;
2381 HasDynamicallyInitializedGlobals =
true;
2385 Initializers[i] = Initializer;
2391 for (
size_t i = 0; i < n; i++) {
2393 if (
G->getName().empty())
continue;
2398 std::string ELFUniqueModuleId =
2402 if (!ELFUniqueModuleId.empty()) {
2403 InstrumentGlobalsELF(IRB, M, NewGlobals, Initializers, ELFUniqueModuleId);
2405 }
else if (UseGlobalsGC && TargetTriple.isOSBinFormatCOFF()) {
2406 InstrumentGlobalsCOFF(IRB, M, NewGlobals, Initializers);
2407 }
else if (UseGlobalsGC && ShouldUseMachOGlobalsSection()) {
2408 InstrumentGlobalsMachO(IRB, M, NewGlobals, Initializers);
2410 InstrumentGlobalsWithMetadataArray(IRB, M, NewGlobals, Initializers);
2414 if (HasDynamicallyInitializedGlobals)
2422ModuleAddressSanitizer::getRedzoneSizeForGlobal(
uint64_t SizeInBytes)
const {
2423 constexpr uint64_t kMaxRZ = 1 << 18;
2424 const uint64_t MinRZ = getMinRedzoneSizeForGlobal();
2427 if (SizeInBytes <= MinRZ / 2) {
2431 RZ = MinRZ - SizeInBytes;
2434 RZ = std::clamp((SizeInBytes / MinRZ / 4) * MinRZ, MinRZ, kMaxRZ);
2437 if (SizeInBytes % MinRZ)
2438 RZ += MinRZ - (SizeInBytes % MinRZ);
2441 assert((RZ + SizeInBytes) % MinRZ == 0);
2446int ModuleAddressSanitizer::GetAsanVersion(
const Module &M)
const {
2447 int LongSize =
M.getDataLayout().getPointerSizeInBits();
2452 Version += (LongSize == 32 && isAndroid);
2456bool ModuleAddressSanitizer::instrumentModule(
Module &M) {
2457 initializeCallbacks(M);
2462 if (CompileKernel) {
2467 std::string AsanVersion = std::to_string(GetAsanVersion(M));
2468 std::string VersionCheckName =
2470 std::tie(AsanCtorFunction, std::ignore) =
2473 {}, VersionCheckName);
2477 bool CtorComdat =
true;
2480 if (AsanCtorFunction) {
2481 IRBuilder<> IRB(AsanCtorFunction->getEntryBlock().getTerminator());
2482 InstrumentGlobals(IRB, M, &CtorComdat);
2485 InstrumentGlobals(IRB, M, &CtorComdat);
2494 if (UseCtorComdat && TargetTriple.isOSBinFormatELF() && CtorComdat) {
2495 if (AsanCtorFunction) {
2499 if (AsanDtorFunction) {
2504 if (AsanCtorFunction)
2506 if (AsanDtorFunction)
2517 for (
int Exp = 0;
Exp < 2;
Exp++) {
2518 for (
size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
2519 const std::string TypeStr = AccessIsWrite ?
"store" :
"load";
2520 const std::string ExpStr =
Exp ?
"exp_" :
"";
2521 const std::string EndingStr = Recover ?
"_noabort" :
"";
2530 Args1.push_back(ExpType);
2531 if (
auto AK = TLI->getExtAttrForI32Param(
false)) {
2536 AsanErrorCallbackSized[AccessIsWrite][
Exp] =
M.getOrInsertFunction(
2540 AsanMemoryAccessCallbackSized[AccessIsWrite][
Exp] =
M.getOrInsertFunction(
2545 AccessSizeIndex++) {
2546 const std::string Suffix = TypeStr + itostr(1ULL << AccessSizeIndex);
2547 AsanErrorCallback[AccessIsWrite][
Exp][AccessSizeIndex] =
2548 M.getOrInsertFunction(
2552 AsanMemoryAccessCallback[AccessIsWrite][
Exp][AccessSizeIndex] =
2553 M.getOrInsertFunction(
2560 const std::string MemIntrinCallbackPrefix =
2564 AsanMemmove =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memmove",
2567 AsanMemcpy =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memcpy",
2570 AsanMemset =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memset",
2575 AsanHandleNoReturnFunc =
2578 AsanPtrCmpFunction =
2580 AsanPtrSubFunction =
2582 if (Mapping.InGlobal)
2583 AsanShadowGlobal =
M.getOrInsertGlobal(
"__asan_shadow",
2586 AMDGPUAddressShared =
M.getOrInsertFunction(
2588 AMDGPUAddressPrivate =
M.getOrInsertFunction(
2592bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(
Function &
F) {
2600 if (
F.getName().find(
" load]") != std::string::npos) {
2610bool AddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(
Function &
F) {
2616 if (Mapping.InGlobal) {
2624 LocalDynamicShadow =
2625 IRB.
CreateCall(Asm, {AsanShadowGlobal},
".asan.shadow");
2627 LocalDynamicShadow =
2631 Value *GlobalDynamicAddress =
F.getParent()->getOrInsertGlobal(
2633 LocalDynamicShadow = IRB.
CreateLoad(IntptrTy, GlobalDynamicAddress);
2638void AddressSanitizer::markEscapedLocalAllocas(
Function &
F) {
2643 assert(ProcessedAllocas.empty() &&
"must process localescape before allocas");
2647 if (!
F.getParent()->getFunction(
"llvm.localescape"))
return;
2656 AllocaInst *AI = dyn_cast<AllocaInst>(
Arg->stripPointerCasts());
2658 "non-static alloca arg to localescape");
2659 ProcessedAllocas[AI] =
false;
2666bool AddressSanitizer::suppressInstrumentationSiteForDebug(
int &Instrumented) {
2667 bool ShouldInstrument =
2671 return !ShouldInstrument;
2674bool AddressSanitizer::instrumentFunction(
Function &
F,
2680 if (
F.getName().startswith(
"__asan_"))
return false;
2682 bool FunctionModified =
false;
2687 if (maybeInsertAsanInitAtFunctionEntry(
F))
2688 FunctionModified =
true;
2691 if (!
F.hasFnAttribute(Attribute::SanitizeAddress))
return FunctionModified;
2693 if (
F.hasFnAttribute(Attribute::DisableSanitizerInstrumentation))
2694 return FunctionModified;
2698 initializeCallbacks(*
F.getParent(), TLI);
2700 FunctionStateRAII CleanupObj(
this);
2702 FunctionModified |= maybeInsertDynamicShadowAtFunctionEntry(
F);
2706 markEscapedLocalAllocas(
F);
2718 for (
auto &BB :
F) {
2720 TempsToInstrument.
clear();
2721 int NumInsnsPerBB = 0;
2722 for (
auto &Inst : BB) {
2723 if (LooksLikeCodeInBug11395(&Inst))
return false;
2728 getInterestingMemoryOperands(&Inst, InterestingOperands);
2730 if (!InterestingOperands.
empty()) {
2731 for (
auto &Operand : InterestingOperands) {
2737 if (Operand.MaybeMask) {
2741 if (!TempsToInstrument.
insert(
Ptr).second)
2745 OperandsToInstrument.
push_back(Operand);
2752 PointerComparisonsOrSubtracts.
push_back(&Inst);
2758 if (
auto *CB = dyn_cast<CallBase>(&Inst)) {
2760 TempsToInstrument.
clear();
2764 if (
CallInst *CI = dyn_cast<CallInst>(&Inst))
2772 OperandsToInstrument.
size() + IntrinToInstrument.
size() >
2780 int NumInstrumented = 0;
2781 for (
auto &Operand : OperandsToInstrument) {
2782 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
2783 instrumentMop(ObjSizeVis, Operand, UseCalls,
2784 F.getParent()->getDataLayout());
2785 FunctionModified =
true;
2787 for (
auto *Inst : IntrinToInstrument) {
2788 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
2789 instrumentMemIntrinsic(Inst);
2790 FunctionModified =
true;
2793 FunctionStackPoisoner FSP(
F, *
this);
2794 bool ChangedStack = FSP.runOnFunction();
2798 for (
auto *CI : NoReturnCalls) {
2803 for (
auto *Inst : PointerComparisonsOrSubtracts) {
2804 instrumentPointerComparisonOrSubtraction(Inst);
2805 FunctionModified =
true;
2808 if (ChangedStack || !NoReturnCalls.empty())
2809 FunctionModified =
true;
2811 LLVM_DEBUG(
dbgs() <<
"ASAN done instrumenting: " << FunctionModified <<
" "
2814 return FunctionModified;
2820bool AddressSanitizer::LooksLikeCodeInBug11395(
Instruction *
I) {
2821 if (LongSize != 32)
return false;
2830void FunctionStackPoisoner::initializeCallbacks(
Module &M) {
2834 const char *MallocNameTemplate =
2839 std::string Suffix = itostr(
Index);
2840 AsanStackMallocFunc[
Index] =
M.getOrInsertFunction(
2841 MallocNameTemplate + Suffix, IntptrTy, IntptrTy);
2842 AsanStackFreeFunc[
Index] =
2847 if (ASan.UseAfterScope) {
2848 AsanPoisonStackMemoryFunc =
M.getOrInsertFunction(
2850 AsanUnpoisonStackMemoryFunc =
M.getOrInsertFunction(
2854 for (
size_t Val : {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0xf1, 0xf2,
2855 0xf3, 0xf5, 0xf8}) {
2856 std::ostringstream
Name;
2858 Name << std::setw(2) << std::setfill(
'0') << std::hex << Val;
2859 AsanSetShadowFunc[Val] =
2860 M.getOrInsertFunction(
Name.str(), IRB.
getVoidTy(), IntptrTy, IntptrTy);
2863 AsanAllocaPoisonFunc =
M.getOrInsertFunction(
2865 AsanAllocasUnpoisonFunc =
M.getOrInsertFunction(
2871 size_t Begin,
size_t End,
2873 Value *ShadowBase) {
2877 const size_t LargestStoreSizeInBytes =
2878 std::min<size_t>(
sizeof(
uint64_t), ASan.LongSize / 8);
2880 const bool IsLittleEndian =
F.getParent()->getDataLayout().isLittleEndian();
2886 for (
size_t i = Begin; i < End;) {
2887 if (!ShadowMask[i]) {
2893 size_t StoreSizeInBytes = LargestStoreSizeInBytes;
2895 while (StoreSizeInBytes > End - i)
2896 StoreSizeInBytes /= 2;
2899 for (
size_t j = StoreSizeInBytes - 1;
j && !ShadowMask[i +
j]; --
j) {
2900 while (j <= StoreSizeInBytes / 2)
2901 StoreSizeInBytes /= 2;
2905 for (
size_t j = 0;
j < StoreSizeInBytes;
j++) {
2907 Val |= (
uint64_t)ShadowBytes[i + j] << (8 * j);
2909 Val = (Val << 8) | ShadowBytes[i + j];
2913 Value *Poison = IRB.
getIntN(StoreSizeInBytes * 8, Val);
2918 i += StoreSizeInBytes;
2925 copyToShadow(ShadowMask, ShadowBytes, 0, ShadowMask.
size(), IRB, ShadowBase);
2930 size_t Begin,
size_t End,
2933 size_t Done = Begin;
2934 for (
size_t i = Begin, j = Begin + 1; i < End; i =
j++) {
2935 if (!ShadowMask[i]) {
2939 uint8_t Val = ShadowBytes[i];
2940 if (!AsanSetShadowFunc[Val])
2944 for (;
j < End && ShadowMask[
j] && Val == ShadowBytes[
j]; ++
j) {
2948 copyToShadowInline(ShadowMask, ShadowBytes,
Done, i, IRB, ShadowBase);
2956 copyToShadowInline(ShadowMask, ShadowBytes,
Done, End, IRB, ShadowBase);
2964 for (
int i = 0;; i++, MaxSize *= 2)
2965 if (LocalStackSize <= MaxSize)
return i;
2969void FunctionStackPoisoner::copyArgsPassedByValToAllocas() {
2971 if (CopyInsertPoint == ASan.LocalDynamicShadow) {
2979 if (
Arg.hasByValAttr()) {
2980 Type *Ty =
Arg.getParamByValType();
2981 const Align Alignment =
2982 DL.getValueOrABITypeAlignment(
Arg.getParamAlign(), Ty);
2986 (
Arg.hasName() ?
Arg.getName() :
"Arg" +
Twine(
Arg.getArgNo())) +
2989 Arg.replaceAllUsesWith(AI);
2991 uint64_t AllocSize =
DL.getTypeAllocSize(Ty);
3000 Value *ValueIfFalse) {
3003 PHI->addIncoming(ValueIfFalse, CondBlock);
3005 PHI->addIncoming(ValueIfTrue, ThenBlock);
3009Value *FunctionStackPoisoner::createAllocaForLayout(
3018 nullptr,
"MyAlloca");
3027void FunctionStackPoisoner::createDynamicAllocasInitStorage() {
3030 DynamicAllocaLayout = IRB.
CreateAlloca(IntptrTy,
nullptr);
3035void FunctionStackPoisoner::processDynamicAllocas() {
3042 for (
const auto &APC : DynamicAllocaPoisonCallVec) {
3045 assert(ASan.isInterestingAlloca(*APC.AI));
3046 assert(!APC.AI->isStaticAlloca());
3049 poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
3056 createDynamicAllocasInitStorage();
3057 for (
auto &AI : DynamicAllocaVec)
3058 handleDynamicAllocaCall(AI);
3059 unpoisonDynamicAllocas();
3071 for (
Instruction *It = Start; It; It = It->getNextNonDebugInstruction()) {
3081 if (isa<AllocaInst>(It) || isa<CastInst>(It))
3083 if (
auto *Store = dyn_cast<StoreInst>(It)) {
3087 auto *Alloca = dyn_cast<AllocaInst>(Store->getPointerOperand());
3088 if (!Alloca || ASan.isInterestingAlloca(*Alloca))
3091 Value *Val = Store->getValueOperand();
3092 bool IsDirectArgInit = isa<Argument>(Val);
3093 bool IsArgInitViaCast =
3094 isa<CastInst>(Val) &&
3095 isa<Argument>(cast<CastInst>(Val)->getOperand(0)) &&
3098 Val == It->getPrevNonDebugInstruction();
3099 bool IsArgInit = IsDirectArgInit || IsArgInitViaCast;
3103 if (IsArgInitViaCast)
3104 InitInsts.
push_back(cast<Instruction>(Val));
3115void FunctionStackPoisoner::processStaticAllocas() {
3116 if (AllocaVec.
empty()) {
3121 int StackMallocIdx = -1;
3123 if (
auto SP =
F.getSubprogram())
3124 EntryDebugLocation =
3133 auto InsBeforeB = InsBefore->
getParent();
3134 assert(InsBeforeB == &
F.getEntryBlock());
3135 for (
auto *AI : StaticAllocasToMoveUp)
3146 ArgInitInst->moveBefore(InsBefore);
3149 if (LocalEscapeCall) LocalEscapeCall->
moveBefore(InsBefore);
3155 ASan.getAllocaSizeInBytes(*AI),
3166 uint64_t Granularity = 1ULL << Mapping.Scale;
3167 uint64_t MinHeaderSize = std::max((
uint64_t)ASan.LongSize / 2, Granularity);
3173 for (
auto &Desc : SVD)
3174 AllocaToSVDMap[Desc.AI] = &Desc;
3177 for (
const auto &APC : StaticAllocaPoisonCallVec) {
3180 assert(ASan.isInterestingAlloca(*APC.AI));
3181 assert(APC.AI->isStaticAlloca());
3186 if (
const DILocation *LifetimeLoc = APC.InsBefore->getDebugLoc().get()) {
3187 if (LifetimeLoc->getFile() == FnLoc->getFile())
3188 if (
unsigned Line = LifetimeLoc->getLine())
3189 Desc.
Line = std::min(Desc.
Line ? Desc.
Line : Line, Line);
3195 LLVM_DEBUG(
dbgs() << DescriptionString <<
" --- " <<
L.FrameSize <<
"\n");
3197 bool DoStackMalloc =
3207 DoDynamicAlloca &= !HasInlineAsm && !HasReturnsTwiceCall;
3208 DoStackMalloc &= !HasInlineAsm && !HasReturnsTwiceCall;
3210 Value *StaticAlloca =
3211 DoDynamicAlloca ? nullptr : createAllocaForLayout(IRB, L,
false);
3214 Value *LocalStackBase;
3215 Value *LocalStackBaseAlloca;
3218 if (DoStackMalloc) {
3219 LocalStackBaseAlloca =
3220 IRB.
CreateAlloca(IntptrTy,
nullptr,
"asan_local_stack_base");
3227 Constant *OptionDetectUseAfterReturn =
F.getParent()->getOrInsertGlobal(
3237 Value *FakeStackValue =
3238 IRBIf.CreateCall(AsanStackMallocFunc[StackMallocIdx],
3241 FakeStack = createPHI(IRB, UseAfterReturnIsEnabled, FakeStackValue, Term,
3249 FakeStack = IRB.
CreateCall(AsanStackMallocFunc[StackMallocIdx],
3252 Value *NoFakeStack =
3257 Value *AllocaValue =
3258 DoDynamicAlloca ? createAllocaForLayout(IRBIf, L,
true) : StaticAlloca;
3261 LocalStackBase = createPHI(IRB, NoFakeStack, AllocaValue, Term, FakeStack);
3262 IRB.
CreateStore(LocalStackBase, LocalStackBaseAlloca);
3269 DoDynamicAlloca ? createAllocaForLayout(IRB, L,
true) : StaticAlloca;
3270 LocalStackBaseAlloca = LocalStackBase;
3276 Value *LocalStackBaseAllocaPtr =
3277 isa<PtrToIntInst>(LocalStackBaseAlloca)
3278 ? cast<PtrToIntInst>(LocalStackBaseAlloca)->getPointerOperand()
3279 : LocalStackBaseAlloca;
3280 assert(isa<AllocaInst>(LocalStackBaseAllocaPtr) &&
3281 "Variable descriptions relative to ASan stack base will be dropped");
3284 for (
const auto &Desc : SVD) {
3319 Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB);
3322 copyToShadow(ShadowAfterScope, ShadowAfterScope, IRB, ShadowBase);
3324 if (!StaticAllocaPoisonCallVec.empty()) {
3328 for (
const auto &APC : StaticAllocaPoisonCallVec) {
3331 size_t Begin = Desc.
Offset /
L.Granularity;
3332 size_t End = Begin + (APC.Size +
L.Granularity - 1) /
L.Granularity;
3335 copyToShadow(ShadowAfterScope,
3336 APC.DoPoison ? ShadowAfterScope : ShadowInScope, Begin, End,
3350 if (DoStackMalloc) {
3351 assert(StackMallocIdx >= 0);
3368 if (StackMallocIdx <= 4) {
3370 ShadowAfterReturn.
resize(ClassSize /
L.Granularity,
3372 copyToShadow(ShadowAfterReturn, ShadowAfterReturn, IRBPoison,
3374 Value *SavedFlagPtrPtr = IRBPoison.CreateAdd(
3377 Value *SavedFlagPtr = IRBPoison.CreateLoad(
3378 IntptrTy, IRBPoison.CreateIntToPtr(SavedFlagPtrPtr, IntptrPtrTy));
3379 IRBPoison.CreateStore(
3381 IRBPoison.CreateIntToPtr(SavedFlagPtr, IRBPoison.getInt8PtrTy()));
3384 IRBPoison.CreateCall(
3385 AsanStackFreeFunc[StackMallocIdx],
3390 copyToShadow(ShadowAfterScope, ShadowClean, IRBElse, ShadowBase);
3392 copyToShadow(ShadowAfterScope, ShadowClean, IRBRet, ShadowBase);
3397 for (
auto *AI : AllocaVec)
3407 DoPoison ? AsanPoisonStackMemoryFunc : AsanUnpoisonStackMemoryFunc,
3408 {AddrArg, SizeArg});
3419void FunctionStackPoisoner::handleDynamicAllocaCall(
AllocaInst *AI) {
3433 const unsigned ElementSize =
3468 IRB.
CreateCall(AsanAllocaPoisonFunc, {NewAddress, OldSize});
3493 if (!ObjSizeVis.
bothKnown(SizeOffset))
return false;
3495 int64_t
Offset = SizeOffset.second.getSExtValue();
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
static cl::opt< bool > ClUseStackSafety("stack-tagging-use-stack-safety", cl::Hidden, cl::init(true), cl::desc("Use Stack Safety analysis results"))
amdgpu Simplify well known AMD library false FunctionCallee Value * Arg
static void findStoresToUninstrumentedArgAllocas(AddressSanitizer &ASan, Instruction &InsBefore, SmallVectorImpl< Instruction * > &InitInsts)
Collect instructions in the entry block after InsBefore which initialize permanent storage for a func...
static const uint64_t kDefaultShadowScale
constexpr size_t kAccessSizeIndexMask
static cl::opt< int > ClDebugMin("asan-debug-min", cl::desc("Debug min inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClUsePrivateAlias("asan-use-private-alias", cl::desc("Use private aliases for global variables"), cl::Hidden, cl::init(true))
static const uint64_t kPS_ShadowOffset64
static const uint64_t kFreeBSD_ShadowOffset32
constexpr size_t kIsWriteShift
static const uint64_t kSmallX86_64ShadowOffsetAlignMask
static bool isInterestingPointerSubtraction(Instruction *I)
const char kAMDGPUAddressSharedName[]
const char kAsanStackFreeNameTemplate[]
constexpr size_t kCompileKernelMask
static cl::opt< bool > ClForceDynamicShadow("asan-force-dynamic-shadow", cl::desc("Load shadow address into a local variable for each function"), cl::Hidden, cl::init(false))
const char kAsanOptionDetectUseAfterReturn[]
static cl::opt< std::string > ClMemoryAccessCallbackPrefix("asan-memory-access-callback-prefix", cl::desc("Prefix for memory access callbacks"), cl::Hidden, cl::init("__asan_"))
static const uint64_t kRISCV64_ShadowOffset64
static cl::opt< bool > ClInsertVersionCheck("asan-guard-against-version-mismatch", cl::desc("Guard against compiler/runtime version mismatch."), cl::Hidden, cl::init(true))
const char kAsanSetShadowPrefix[]
static cl::opt< AsanDtorKind > ClOverrideDestructorKind("asan-destructor-kind", cl::desc("Sets the ASan destructor kind. The default is to use the value " "provided to the pass constructor"), cl::values(clEnumValN(AsanDtorKind::None, "none", "No destructors"), clEnumValN(AsanDtorKind::Global, "global", "Use global destructors")), cl::init(AsanDtorKind::Invalid), cl::Hidden)
static cl::opt< bool > ClInstrumentWrites("asan-instrument-writes", cl::desc("instrument write instructions"), cl::Hidden, cl::init(true))
static uint64_t GetCtorAndDtorPriority(Triple &TargetTriple)
const char kAsanStackMallocNameTemplate[]
static cl::opt< bool > ClInstrumentByval("asan-instrument-byval", cl::desc("instrument byval call arguments"), cl::Hidden, cl::init(true))
const char kAsanInitName[]
static cl::opt< bool > ClGlobals("asan-globals", cl::desc("Handle global objects"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClRedzoneByvalArgs("asan-redzone-byval-args", cl::desc("Create redzones for byval " "arguments (extra copy " "required)"), cl::Hidden, cl::init(true))
static const uint64_t kWindowsShadowOffset64
static const uint64_t kEmscriptenShadowOffset
const char kAsanGenPrefix[]
constexpr size_t kIsWriteMask
static uint64_t getRedzoneSizeForScale(int MappingScale)
static void doInstrumentAddress(AddressSanitizer *Pass, Instruction *I, Instruction *InsertBefore, Value *Addr, MaybeAlign Alignment, unsigned Granularity, TypeSize TypeStoreSize, bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp)
static const uint64_t kDefaultShadowOffset64
static cl::opt< bool > ClOptimizeCallbacks("asan-optimize-callbacks", cl::desc("Optimize callbacks"), cl::Hidden, cl::init(false))
const char kAsanUnregisterGlobalsName[]
static const uint64_t kAsanCtorAndDtorPriority
static cl::opt< bool > ClUseStackSafety("asan-use-stack-safety", cl::Hidden, cl::init(false), cl::Hidden, cl::desc("Use Stack Safety analysis results"), cl::Optional)
const char kAsanUnpoisonGlobalsName[]
static cl::opt< bool > ClWithIfuncSuppressRemat("asan-with-ifunc-suppress-remat", cl::desc("Suppress rematerialization of dynamic shadow address by passing " "it through inline asm in prologue."), cl::Hidden, cl::init(true))
static cl::opt< int > ClDebugStack("asan-debug-stack", cl::desc("debug stack"), cl::Hidden, cl::init(0))
const char kAsanUnregisterElfGlobalsName[]
static bool isUnsupportedAMDGPUAddrspace(Value *Addr)
const char kAsanRegisterImageGlobalsName[]
static cl::opt< bool > ClOpt("asan-opt", cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true))
static const uint64_t kAllocaRzSize
const char kODRGenPrefix[]
static const uint64_t kSystemZ_ShadowOffset64
static const uint64_t kDefaultShadowOffset32
const char kAsanShadowMemoryDynamicAddress[]
static cl::opt< bool > ClUseOdrIndicator("asan-use-odr-indicator", cl::desc("Use odr indicators to improve ODR reporting"), cl::Hidden, cl::init(true))
static bool GlobalWasGeneratedByCompiler(GlobalVariable *G)
Check if G has been created by a trusted compiler pass.
const char kAsanStackMallocAlwaysNameTemplate[]
static cl::opt< bool > ClInvalidPointerCmp("asan-detect-invalid-pointer-cmp", cl::desc("Instrument <, <=, >, >= with pointer operands"), cl::Hidden, cl::init(false))
static const uint64_t kAsanEmscriptenCtorAndDtorPriority
static cl::opt< int > ClDebugMax("asan-debug-max", cl::desc("Debug max inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClInvalidPointerSub("asan-detect-invalid-pointer-sub", cl::desc("Instrument - operations with pointer operands"), cl::Hidden, cl::init(false))
static bool isPointerOperand(Value *V)
static const uint64_t kFreeBSD_ShadowOffset64
static cl::opt< uint32_t > ClForceExperiment("asan-force-experiment", cl::desc("Force optimization experiment (for testing)"), cl::Hidden, cl::init(0))
const char kSanCovGenPrefix[]
static const uint64_t kFreeBSDKasan_ShadowOffset64
const char kAsanModuleDtorName[]
static const uint64_t kDynamicShadowSentinel
static bool isInterestingPointerComparison(Instruction *I)
static cl::opt< bool > ClStack("asan-stack", cl::desc("Handle stack memory"), cl::Hidden, cl::init(true))
static const uint64_t kMIPS64_ShadowOffset64
static const uint64_t kLinuxKasan_ShadowOffset64
static int StackMallocSizeClass(uint64_t LocalStackSize)
static cl::opt< uint32_t > ClMaxInlinePoisoningSize("asan-max-inline-poisoning-size", cl::desc("Inline shadow poisoning for blocks up to the given size in bytes."), cl::Hidden, cl::init(64))
static cl::opt< bool > ClInstrumentAtomics("asan-instrument-atomics", cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClUseAfterScope("asan-use-after-scope", cl::desc("Check stack-use-after-scope"), cl::Hidden, cl::init(false))
constexpr size_t kAccessSizeIndexShift
static cl::opt< int > ClMappingScale("asan-mapping-scale", cl::desc("scale of asan shadow mapping"), cl::Hidden, cl::init(0))
const char kAsanPoisonStackMemoryName[]
static cl::opt< bool > ClEnableKasan("asan-kernel", cl::desc("Enable KernelAddressSanitizer instrumentation"), cl::Hidden, cl::init(false))
static cl::opt< std::string > ClDebugFunc("asan-debug-func", cl::Hidden, cl::desc("Debug func"))
static cl::opt< bool > ClUseGlobalsGC("asan-globals-live-support", cl::desc("Use linker features to support dead " "code stripping of globals"), cl::Hidden, cl::init(true))
static const size_t kNumberOfAccessSizes
const char kAsanUnpoisonStackMemoryName[]
static const uint64_t kLoongArch64_ShadowOffset64
const char kAsanRegisterGlobalsName[]
static cl::opt< bool > ClInstrumentDynamicAllocas("asan-instrument-dynamic-allocas", cl::desc("instrument dynamic allocas"), cl::Hidden, cl::init(true))
const char kAsanModuleCtorName[]
const char kAsanGlobalsRegisteredFlagName[]
static const size_t kMaxStackMallocSize
static cl::opt< bool > ClRecover("asan-recover", cl::desc("Enable recovery mode (continue-after-error)."), cl::Hidden, cl::init(false))
static cl::opt< bool > ClOptSameTemp("asan-opt-same-temp", cl::desc("Instrument the same temp just once"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClDynamicAllocaStack("asan-stack-dynamic-alloca", cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClOptStack("asan-opt-stack", cl::desc("Don't instrument scalar stack variables"), cl::Hidden, cl::init(false))
static const uint64_t kMIPS_ShadowOffsetN32
const char kAsanUnregisterImageGlobalsName[]
static cl::opt< AsanDetectStackUseAfterReturnMode > ClUseAfterReturn("asan-use-after-return", cl::desc("Sets the mode of detection for stack-use-after-return."), cl::values(clEnumValN(AsanDetectStackUseAfterReturnMode::Never, "never", "Never detect stack use after return."), clEnumValN(AsanDetectStackUseAfterReturnMode::Runtime, "runtime", "Detect stack use after return if " "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."), clEnumValN(AsanDetectStackUseAfterReturnMode::Always, "always", "Always detect stack use after return.")), cl::Hidden, cl::init(AsanDetectStackUseAfterReturnMode::Runtime))
static cl::opt< bool > ClOptGlobals("asan-opt-globals", cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true))
static const uintptr_t kCurrentStackFrameMagic
static cl::opt< int > ClInstrumentationWithCallsThreshold("asan-instrumentation-with-call-threshold", cl::desc("If the function being instrumented contains more than " "this number of memory accesses, use callbacks instead of " "inline checks (-1 means never use callbacks)."), cl::Hidden, cl::init(7000))
static ShadowMapping getShadowMapping(const Triple &TargetTriple, int LongSize, bool IsKasan)
static const uint64_t kPPC64_ShadowOffset64
static cl::opt< AsanCtorKind > ClConstructorKind("asan-constructor-kind", cl::desc("Sets the ASan constructor kind"), cl::values(clEnumValN(AsanCtorKind::None, "none", "No constructors"), clEnumValN(AsanCtorKind::Global, "global", "Use global constructors")), cl::init(AsanCtorKind::Global), cl::Hidden)
static const int kMaxAsanStackMallocSizeClass
static const uint64_t kMIPS32_ShadowOffset32
static cl::opt< bool > ClAlwaysSlowPath("asan-always-slow-path", cl::desc("use instrumentation with slow path for all accesses"), cl::Hidden, cl::init(false))
static const uint64_t kNetBSD_ShadowOffset32
static const uint64_t kFreeBSDAArch64_ShadowOffset64
static const uint64_t kSmallX86_64ShadowOffsetBase
static cl::opt< bool > ClInitializers("asan-initialization-order", cl::desc("Handle C++ initializer order"), cl::Hidden, cl::init(true))
static const uint64_t kNetBSD_ShadowOffset64
static cl::opt< unsigned > ClRealignStack("asan-realign-stack", cl::desc("Realign stack to the value of this flag (power of two)"), cl::Hidden, cl::init(32))
static const uint64_t kWindowsShadowOffset32
static cl::opt< bool > ClInstrumentReads("asan-instrument-reads", cl::desc("instrument read instructions"), cl::Hidden, cl::init(true))
static size_t TypeStoreSizeToSizeIndex(uint32_t TypeSize)
const char kAsanAllocaPoison[]
constexpr size_t kCompileKernelShift
static cl::opt< bool > ClWithIfunc("asan-with-ifunc", cl::desc("Access dynamic shadow through an ifunc global on " "platforms that support this"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClKasanMemIntrinCallbackPrefix("asan-kernel-mem-intrinsic-prefix", cl::desc("Use prefix for memory intrinsics in KASAN mode"), cl::Hidden, cl::init(false))
const char kAsanVersionCheckNamePrefix[]
const char kAMDGPUAddressPrivateName[]
static const uint64_t kNetBSDKasan_ShadowOffset64
static void instrumentMaskedLoadOrStore(AddressSanitizer *Pass, const DataLayout &DL, Type *IntptrTy, Value *Mask, Instruction *I, Value *Addr, MaybeAlign Alignment, unsigned Granularity, Type *OpType, bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp)
const char kAsanRegisterElfGlobalsName[]
static cl::opt< uint64_t > ClMappingOffset("asan-mapping-offset", cl::desc("offset of asan shadow mapping [EXPERIMENTAL]"), cl::Hidden, cl::init(0))
const char kAsanReportErrorTemplate[]
static cl::opt< bool > ClWithComdat("asan-with-comdat", cl::desc("Place ASan constructors in comdat sections"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClSkipPromotableAllocas("asan-skip-promotable-allocas", cl::desc("Do not instrument promotable allocas"), cl::Hidden, cl::init(true))
static cl::opt< int > ClMaxInsnsToInstrumentPerBB("asan-max-ins-per-bb", cl::init(10000), cl::desc("maximal number of instructions to instrument in any given BB"), cl::Hidden)
static const uintptr_t kRetiredStackFrameMagic
const char kAsanPoisonGlobalsName[]
const char kAsanHandleNoReturnName[]
static const size_t kMinStackMallocSize
static cl::opt< int > ClDebug("asan-debug", cl::desc("debug"), cl::Hidden, cl::init(0))
const char kAsanAllocasUnpoison[]
static const uint64_t kAArch64_ShadowOffset64
static cl::opt< bool > ClInvalidPointerPairs("asan-detect-invalid-pointer-pair", cl::desc("Instrument <, <=, >, >=, - with pointer operands"), cl::Hidden, cl::init(false))
This file contains the simple types necessary to represent the attributes associated with functions a...
static const Function * getParent(const Value *V)
SmallVector< MachineOperand, 4 > Cond
static GCRegistry::Add< StatepointGC > D("statepoint-example", "an example strategy for statepoint")
#define clEnumValN(ENUMVAL, FLAGNAME, DESC)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
This file builds on the ADT/GraphTraits.h file to build generic depth first graph iterator.
static bool runOnFunction(Function &F, bool PostInlining)
This is the interface for a simple mod/ref and alias analysis over globals.
Module.h This file contains the declarations for the Module class.
FunctionAnalysisManager FAM
ModuleAnalysisManager MAM
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
This defines the Use class.
AddressSanitizerPass(const AddressSanitizerOptions &Options, bool UseGlobalGC=true, bool UseOdrIndicator=true, AsanDtorKind DestructorKind=AsanDtorKind::Global, AsanCtorKind ConstructorKind=AsanCtorKind::Global)
PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM)
void printPipeline(raw_ostream &OS, function_ref< StringRef(StringRef)> MapClassName2PassName)
an instruction to allocate memory on the stack
bool isSwiftError() const
Return true if this alloca is used as a swifterror argument to a call.
bool isStaticAlloca() const
Return true if this alloca is in the entry block of the function and is a constant size.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
PointerType * getType() const
Overload to return most specific pointer type.
Type * getAllocatedType() const
Return the type that is being allocated by the instruction.
bool isUsedWithInAlloca() const
Return true if this alloca is used as an inalloca argument to a call.
std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
void setAlignment(Align Align)
const Value * getArraySize() const
Get the number of elements allocated.
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
static ArrayType * get(Type *ElementType, uint64_t NumElements)
This static method is the primary way to construct an ArrayType.
An instruction that atomically checks whether a specified value is in a memory location,...
an instruction that atomically reads a memory location, combines it with another value,...
AttributeList addParamAttribute(LLVMContext &C, unsigned ArgNo, Attribute::AttrKind Kind) const
Add an argument attribute to the list.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Function * getParent() const
Return the enclosing method, or null if none.
const CallInst * getTerminatingMustTailCall() const
Returns the call instruction marked 'musttail' prior to the terminating return instruction of this ba...
Conditional or Unconditional Branch instruction.
static BranchInst * Create(BasicBlock *IfTrue, Instruction *InsertBefore=nullptr)
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
bool isInlineAsm() const
Check if this call is an inline asm statement.
Value * getArgOperand(unsigned i) const
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
bool doesNotReturn() const
Determine if the call cannot return.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
@ Largest
The linker will choose the largest COMDAT.
@ SameSize
The data referenced by the COMDAT must be the same size.
@ Any
The linker may choose any COMDAT.
@ NoDeduplicate
No deduplication is performed.
@ ExactMatch
The data referenced by the COMDAT must be the same.
ConstantArray - Constant Array Declarations.
static Constant * get(ArrayType *T, ArrayRef< Constant * > V)
static Constant * getIntToPtr(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList, bool InBounds=false, std::optional< unsigned > InRangeIndex=std::nullopt, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
static bool isValueValidForType(Type *Ty, uint64_t V)
This static method returns true if the type Ty is big enough to represent the value V.
static Constant * get(Type *Ty, uint64_t V, bool IsSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
static Constant * get(StructType *T, ArrayRef< Constant * > V)
This is an important base class in LLVM.
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
Constant * getAggregateElement(unsigned Elt) const
For aggregates (struct/array/vector) return the constant that corresponds to the specified element if...
A parsed version of the target data layout string in and methods for querying it.
DILocation * get() const
Get the underlying DILocation.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
static FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
const BasicBlock & front() const
static Function * createWithDefaultAttr(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
Creates a function with some attributes recorded in llvm.module.flags applied.
const Constant * getAliasee() const
static GlobalAlias * create(Type *Ty, unsigned AddressSpace, LinkageTypes Linkage, const Twine &Name, Constant *Aliasee, Module *Parent)
If a parent module is specified, the alias is automatically inserted into the end of the specified mo...
void setAlignment(Align Align)
Sets the alignment attribute of the GlobalObject.
void copyMetadata(const GlobalObject *Src, unsigned Offset)
Copy metadata from Src, adjusting offsets by Offset.
void setComdat(Comdat *C)
void setSection(StringRef S)
Change the section for this global.
VisibilityTypes getVisibility() const
void setUnnamedAddr(UnnamedAddr Val)
bool hasLocalLinkage() const
static StringRef dropLLVMManglingEscape(StringRef Name)
If the given string begins with the GlobalValue name mangling escape character '\1',...
ThreadLocalMode getThreadLocalMode() const
Module * getParent()
Get the module that this global value is contained inside of...
@ HiddenVisibility
The GV is hidden.
void setVisibility(VisibilityTypes V)
LinkageTypes
An enumeration for the kinds of linkage for global values.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ CommonLinkage
Tentative definitions.
@ InternalLinkage
Rename collisions when linking (static functions).
@ AvailableExternallyLinkage
Available for inspection, not emission.
@ ExternalWeakLinkage
ExternalWeak linkage description.
DLLStorageClassTypes getDLLStorageClass() const
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void copyAttributesFrom(const GlobalVariable *Src)
copyAttributesFrom - copy all additional attributes (those not needed to create a GlobalVariable) fro...
Analysis pass providing a never-invalidated alias analysis result.
This instruction compares its operands according to the predicate given to the constructor.
Common base class shared among various IRBuilders.
AllocaInst * CreateAlloca(Type *Ty, unsigned AddrSpace, Value *ArraySize=nullptr, const Twine &Name="")
IntegerType * getInt1Ty()
Fetch the type representing a single bit.
Value * CreatePointerCast(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateICmpSGE(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateSelect(Value *C, Value *True, Value *False, const Twine &Name="", Instruction *MDFrom=nullptr)
BasicBlock::iterator GetInsertPoint() const
Value * CreateIntToPtr(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateTypeSize(Type *DstType, TypeSize Size)
Create an expression which evaluates to the number of units in Size at runtime.
Value * CreateLShr(Value *LHS, Value *RHS, const Twine &Name="", bool isExact=false)
IntegerType * getInt32Ty()
Fetch the type representing a 32-bit integer.
BasicBlock * GetInsertBlock() const
IntegerType * getInt64Ty()
Fetch the type representing a 64-bit integer.
Value * CreateICmpNE(Value *LHS, Value *RHS, const Twine &Name="")
ConstantInt * getInt32(uint32_t C)
Get a constant 32-bit value.
PHINode * CreatePHI(Type *Ty, unsigned NumReservedValues, const Twine &Name="")
Value * CreateNot(Value *V, const Twine &Name="")
Value * CreateICmpEQ(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateSub(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
ConstantInt * getIntN(unsigned N, uint64_t C)
Get a constant N-bit value, zero extended or truncated from a 64-bit value.
LoadInst * CreateLoad(Type *Ty, Value *Ptr, const char *Name)
Provided to resolve 'CreateLoad(Ty, Ptr, "...")' correctly, instead of converting the string to 'bool...
PointerType * getInt8PtrTy(unsigned AddrSpace=0)
Fetch the type representing a pointer to an 8-bit integer value.
Value * CreateAnd(Value *LHS, Value *RHS, const Twine &Name="")
StoreInst * CreateStore(Value *Val, Value *Ptr, bool isVolatile=false)
Value * CreateAdd(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
Value * CreatePtrToInt(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateIsNotNull(Value *Arg, const Twine &Name="")
Return a boolean value testing if Arg != 0.
Value * CreateOr(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateIntCast(Value *V, Type *DestTy, bool isSigned, const Twine &Name="")
void SetInsertPoint(BasicBlock *TheBB)
This specifies that created instructions should be appended to the end of the specified block.
Type * getVoidTy()
Fetch the type representing void.
StoreInst * CreateAlignedStore(Value *Val, Value *Ptr, MaybeAlign Align, bool isVolatile=false)
CallInst * CreateCall(FunctionType *FTy, Value *Callee, ArrayRef< Value * > Args=std::nullopt, const Twine &Name="", MDNode *FPMathTag=nullptr)
Value * CreateGEP(Type *Ty, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &Name="", bool IsInBounds=false)
IntegerType * getInt8Ty()
Fetch the type representing an 8-bit integer.
CallInst * CreateMemCpy(Value *Dst, MaybeAlign DstAlign, Value *Src, MaybeAlign SrcAlign, uint64_t Size, bool isVolatile=false, MDNode *TBAATag=nullptr, MDNode *TBAAStructTag=nullptr, MDNode *ScopeTag=nullptr, MDNode *NoAliasTag=nullptr)
Create and insert a memcpy between the specified pointers.
Value * CreateMul(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
static InlineAsm * get(FunctionType *Ty, StringRef AsmString, StringRef Constraints, bool hasSideEffects, bool isAlignStack=false, AsmDialect asmDialect=AD_ATT, bool canThrow=false)
InlineAsm::get - Return the specified uniqued inline asm string.
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
Base class for instruction visitors.
RetTy visitCallBase(CallBase &I)
RetTy visitCleanupReturnInst(CleanupReturnInst &I)
RetTy visitIntrinsicInst(IntrinsicInst &I)
void visit(Iterator Start, Iterator End)
RetTy visitReturnInst(ReturnInst &I)
RetTy visitAllocaInst(AllocaInst &I)
RetTy visitResumeInst(ResumeInst &I)
bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
const Module * getModule() const
Return the module owning the function this instruction belongs to or nullptr it the function does not...
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
const BasicBlock * getParent() const
BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
const Instruction * getNextNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the next non-debug instruction in the same basic block as 'this',...
SymbolTableList< Instruction >::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
void moveBefore(Instruction *MovePos)
Unlink this instruction from its current basic block and insert it into the basic block that MovePos ...
static IntegerType * get(LLVMContext &C, unsigned NumBits)
This static method is the primary way of constructing an IntegerType.
A wrapper class for inspecting calls to intrinsic functions.
Intrinsic::ID getIntrinsicID() const
Return the intrinsic ID of this intrinsic.
This is an important class for using LLVM in a threaded context.
An instruction for reading from memory.
static Error ParseSectionSpecifier(StringRef Spec, StringRef &Segment, StringRef &Section, unsigned &TAA, bool &TAAParsed, unsigned &StubSize)
Parse the section specifier indicated by "Spec".
MDNode * createBranchWeights(uint32_t TrueWeight, uint32_t FalseWeight)
Return metadata containing two branch weights.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
This is the common base class for memset/memcpy/memmove.
A Module instance is used to store all the information related to an LLVM module.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
Evaluate the size and offset of an object pointed to by a Value* statically.
static bool bothKnown(const SizeOffsetType &SizeOffset)
SizeOffsetType compute(Value *V)
Pass interface - Implemented by all 'passes'.
static PointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
void abandon()
Mark an analysis as abandoned.
Resume the propagation of an exception.
Return a value (possibly void), from a function.
static ReturnInst * Create(LLVMContext &C, Value *retVal=nullptr, Instruction *InsertBefore=nullptr)
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
This pass performs the global (interprocedural) stack safety analysis (new pass manager).
bool stackAccessIsSafe(const Instruction &I) const
bool isSafe(const AllocaInst &AI) const
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
constexpr bool empty() const
empty - Check if the string is empty.
bool startswith(StringRef Prefix) const
const char * data() const
data - Get a pointer to the start of the string (which may not be null terminated).
Class to represent struct types.
static StructType * get(LLVMContext &Context, ArrayRef< Type * > Elements, bool isPacked=false)
This static method is the primary way to create a literal StructType.
Analysis pass providing the TargetLibraryInfo.
Provides information about what library functions are available for the current target.
AttributeList getAttrList(LLVMContext *C, ArrayRef< unsigned > ArgNos, bool Signed, bool Ret=false, AttributeList AL=AttributeList()) const
Triple - Helper class for working with autoconf configuration names.
bool isAndroidVersionLT(unsigned Major) const
bool isThumb() const
Tests whether the target is Thumb (little and big endian).
bool isDriverKit() const
Is this an Apple DriverKit triple.
bool isAndroid() const
Tests whether the target is Android.
bool isMIPS64() const
Tests whether the target is MIPS 64-bit (little and big endian).
ArchType getArch() const
Get the parsed architecture type of this triple.
EnvironmentType getEnvironment() const
Get the parsed environment type of this triple.
bool isMIPS32() const
Tests whether the target is MIPS 32-bit (little and big endian).
bool isOSWindows() const
Tests whether the OS is Windows.
bool isARM() const
Tests whether the target is ARM (little and big endian).
bool isOSLinux() const
Tests whether the OS is Linux.
bool isMacOSX() const
Is this a Mac OS X triple.
bool isOSEmscripten() const
Tests whether the OS is Emscripten.
bool isWatchOS() const
Is this an Apple watchOS triple.
bool isiOS() const
Is this an iOS triple.
bool isPS() const
Tests whether the target is the PS4 or PS5 platform.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
PointerType * getPointerTo(unsigned AddrSpace=0) const
Return a pointer to the current type.
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
static IntegerType * getIntNTy(LLVMContext &C, unsigned N)
static Type * getVoidTy(LLVMContext &C)
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
static PointerType * getInt8PtrTy(LLVMContext &C, unsigned AS=0)
static IntegerType * getInt32Ty(LLVMContext &C)
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
An efficient, type-erasing, non-owning reference to a callable.
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
This file contains the declaration of the Comdat class, which represents a single COMDAT in LLVM.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
@ C
The default llvm calling convention, compatible with C.
Function * getDeclaration(Module *M, ID id, ArrayRef< Type * > Tys=std::nullopt)
Create or insert an LLVM Function declaration for an intrinsic, and return it.
@ S_CSTRING_LITERALS
S_CSTRING_LITERALS - Section with literal C strings.
ValuesClass values(OptsTy... Options)
Helper to build a ValuesClass by forwarding a variable number of arguments as an initializer list to ...
initializer< Ty > init(const Ty &Val)
Linkage
Describes symbol linkage.
uint64_t getAllocaSizeInBytes(const AllocaInst &AI)
This is an optimization pass for GlobalISel generic memory operations.
void ReplaceInstWithInst(BasicBlock *BB, BasicBlock::iterator &BI, Instruction *I)
Replace the instruction specified by BI with the instruction specified by I.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
SmallVector< uint8_t, 64 > GetShadowBytesAfterScope(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
std::string demangle(const std::string &MangledName)
Attempt to demangle a string using different demangling schemes.
std::pair< APInt, APInt > SizeOffsetType
AllocaInst * findAllocaForValue(Value *V, bool OffsetZero=false)
Returns unique alloca where the value comes from, or nullptr.
Function * createSanitizerCtor(Module &M, StringRef CtorName)
Creates sanitizer constructor function.
AsanDetectStackUseAfterReturnMode
Mode of ASan detect stack use after return.
@ Always
Always detect stack use after return.
@ Never
Never detect stack use after return.
@ Runtime
Detect stack use after return if not disabled runtime with (ASAN_OPTIONS=detect_stack_use_after_retur...
GlobalVariable * createPrivateGlobalForString(Module &M, StringRef Str, bool AllowMerging, const char *NamePrefix="")
const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=6)
This method strips off any GEP address adjustments and pointer casts from the specified value,...
void SplitBlockAndInsertIfThenElse(Value *Cond, Instruction *SplitBefore, Instruction **ThenTerm, Instruction **ElseTerm, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr)
SplitBlockAndInsertIfThenElse is similar to SplitBlockAndInsertIfThen, but also creates the ElseBlock...
bool isAllocaPromotable(const AllocaInst *AI)
Return true if this alloca is legal for promotion.
SmallString< 64 > ComputeASanStackFrameDescription(const SmallVectorImpl< ASanStackVariableDescription > &Vars)
SmallVector< uint8_t, 64 > GetShadowBytes(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
int countr_zero(T Val)
Count number of 0's from the least significant bit to the most stopping at the first 1.
FunctionCallee declareSanitizerInitFunction(Module &M, StringRef InitName, ArrayRef< Type * > InitArgTypes, bool Weak=false)
std::string getUniqueModuleId(Module *M)
Produce a unique identifier for this module by taking the MD5 sum of the names of the module's strong...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
std::pair< Function *, FunctionCallee > createSanitizerCtorAndInitFunctions(Module &M, StringRef CtorName, StringRef InitName, ArrayRef< Type * > InitArgTypes, ArrayRef< Value * > InitArgs, StringRef VersionCheckName=StringRef(), bool Weak=false)
Creates sanitizer constructor function, and calls sanitizer's init function from it.
decltype(auto) get(const PointerIntPair< PointerTy, IntBits, IntType, PtrTraits, Info > &Pair)
void SplitBlockAndInsertForEachLane(ElementCount EC, Type *IndexTy, Instruction *InsertBefore, std::function< void(IRBuilderBase &, Value *)> Func)
Utility function for performing a given action on each lane of a vector with EC elements.
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
AsanDtorKind
Types of ASan module destructors supported.
@ None
Do not emit any destructors for ASan.
ASanStackFrameLayout ComputeASanStackFrameLayout(SmallVectorImpl< ASanStackVariableDescription > &Vars, uint64_t Granularity, uint64_t MinHeaderSize)
void cantFail(Error Err, const char *Msg=nullptr)
Report a fatal error if Err is a failure value.
void appendToCompilerUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.compiler.used list.
static const int kAsanStackUseAfterReturnMagic
@ Dynamic
Denotes mode unknown at compile time.
void appendToGlobalCtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Append F to the list of global ctors of module M with the given Priority.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
iterator_range< df_iterator< T > > depth_first(const T &G)
Instruction * SplitBlockAndInsertIfThen(Value *Cond, Instruction *SplitBefore, bool Unreachable, MDNode *BranchWeights, DominatorTree *DT, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
AsanCtorKind
Types of ASan module constructors supported.
void maybeMarkSanitizerLibraryCallNoBuiltin(CallInst *CI, const TargetLibraryInfo *TLI)
Given a CallInst, check if it calls a string function known to CodeGen, and mark it with NoBuiltin if...
void appendToUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.used list.
void appendToGlobalDtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Same as appendToGlobalCtors(), but for global dtors.
void getAddressSanitizerParams(const Triple &TargetTriple, int LongSize, bool IsKasan, uint64_t *ShadowBase, int *MappingScale, bool *OrShadowOffset)
bool replaceDbgDeclare(Value *Address, Value *NewAddress, DIBuilder &Builder, uint8_t DIExprFlags, int Offset)
Replaces llvm.dbg.declare instruction when the address it describes is replaced with a new value.
ASanAccessInfo(int32_t Packed)
AsanDetectStackUseAfterReturnMode UseAfterReturn
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
Various options to control the behavior of getObjectSize.
bool RoundToAlign
Whether to round the result up to the alignment of allocas, byval arguments, and global variables.
A CRTP mix-in to automatically provide informational APIs needed for passes.