94#define DEBUG_TYPE "asan"
100 std::numeric_limits<uint64_t>::max();
141 "__asan_unregister_image_globals";
154 "__asan_stack_malloc_always_";
168 "__asan_option_detect_stack_use_after_return";
171 "__asan_shadow_memory_dynamic_address";
197 "asan-kernel",
cl::desc(
"Enable KernelAddressSanitizer instrumentation"),
202 cl::desc(
"Enable recovery mode (continue-after-error)."),
206 "asan-guard-against-version-mismatch",
212 cl::desc(
"instrument read instructions"),
216 "asan-instrument-writes",
cl::desc(
"instrument write instructions"),
225 "asan-instrument-atomics",
235 "asan-always-slow-path",
240 "asan-force-dynamic-shadow",
241 cl::desc(
"Load shadow address into a local variable for each function"),
246 cl::desc(
"Access dynamic shadow through an ifunc global on "
247 "platforms that support this"),
251 "asan-with-ifunc-suppress-remat",
252 cl::desc(
"Suppress rematerialization of dynamic shadow address by passing "
253 "it through inline asm in prologue."),
261 "asan-max-ins-per-bb",
cl::init(10000),
262 cl::desc(
"maximal number of instructions to instrument in any given BB"),
269 "asan-max-inline-poisoning-size",
271 "Inline shadow poisoning for blocks up to the given size in bytes."),
275 "asan-use-after-return",
276 cl::desc(
"Sets the mode of detection for stack-use-after-return."),
278 clEnumValN(AsanDetectStackUseAfterReturnMode::Never,
"never",
279 "Never detect stack use after return."),
281 AsanDetectStackUseAfterReturnMode::Runtime,
"runtime",
282 "Detect stack use after return if "
283 "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."),
284 clEnumValN(AsanDetectStackUseAfterReturnMode::Always,
"always",
285 "Always detect stack use after return.")),
289 cl::desc(
"Create redzones for byval "
290 "arguments (extra copy "
295 cl::desc(
"Check stack-use-after-scope"),
304 cl::desc(
"Handle C++ initializer order"),
308 "asan-detect-invalid-pointer-pair",
313 "asan-detect-invalid-pointer-cmp",
318 "asan-detect-invalid-pointer-sub",
323 "asan-realign-stack",
324 cl::desc(
"Realign stack to the value of this flag (power of two)"),
328 "asan-instrumentation-with-call-threshold",
329 cl::desc(
"If the function being instrumented contains more than "
330 "this number of memory accesses, use callbacks instead of "
331 "inline checks (-1 means never use callbacks)."),
335 "asan-memory-access-callback-prefix",
340 "asan-kernel-mem-intrinsic-prefix",
346 cl::desc(
"instrument dynamic allocas"),
350 "asan-skip-promotable-allocas",
355 "asan-constructor-kind",
356 cl::desc(
"Sets the ASan constructor kind"),
359 "Use global constructors")),
366 cl::desc(
"scale of asan shadow mapping"),
371 cl::desc(
"offset of asan shadow mapping [EXPERIMENTAL]"),
385 "asan-opt-same-temp",
cl::desc(
"Instrument the same temp just once"),
389 cl::desc(
"Don't instrument scalar globals"),
393 "asan-opt-stack",
cl::desc(
"Don't instrument scalar stack variables"),
397 "asan-stack-dynamic-alloca",
402 "asan-force-experiment",
408 cl::desc(
"Use private aliases for global variables"),
413 cl::desc(
"Use odr indicators to improve ODR reporting"),
418 cl::desc(
"Use linker features to support dead "
419 "code stripping of globals"),
426 cl::desc(
"Place ASan constructors in comdat sections"),
430 "asan-destructor-kind",
431 cl::desc(
"Sets the ASan destructor kind. The default is to use the value "
432 "provided to the pass constructor"),
435 "Use global destructors")),
455STATISTIC(NumInstrumentedReads,
"Number of instrumented reads");
456STATISTIC(NumInstrumentedWrites,
"Number of instrumented writes");
458 "Number of optimized accesses to global vars");
460 "Number of optimized accesses to stack vars");
469struct ShadowMapping {
480 bool IsAndroid = TargetTriple.
isAndroid();
483 bool IsMacOS = TargetTriple.
isMacOSX();
486 bool IsPS = TargetTriple.
isPS();
493 bool IsMIPS32 = TargetTriple.
isMIPS32();
494 bool IsMIPS64 = TargetTriple.
isMIPS64();
495 bool IsArmOrThumb = TargetTriple.
isARM() || TargetTriple.
isThumb();
503 bool IsAMDGPU = TargetTriple.
isAMDGPU();
505 ShadowMapping Mapping;
512 if (LongSize == 32) {
515 else if (IsMIPSN32ABI)
527 else if (IsEmscripten)
540 else if (IsFreeBSD && IsAArch64)
542 else if (IsFreeBSD && !IsMIPS64) {
547 }
else if (IsNetBSD) {
554 else if (IsLinux && IsX86_64) {
560 }
else if (IsWindows && IsX86_64) {
566 else if (IsMacOS && IsAArch64)
570 else if (IsLoongArch64)
594 Mapping.OrShadowOffset = !IsAArch64 && !IsPPC64 && !IsSystemZ && !IsPS &&
595 !IsRISCV64 && !IsLoongArch64 &&
596 !(Mapping.Offset & (Mapping.Offset - 1)) &&
598 bool IsAndroidWithIfuncSupport =
600 Mapping.InGlobal =
ClWithIfunc && IsAndroidWithIfuncSupport && IsArmOrThumb;
608 int *MappingScale,
bool *OrShadowOffset) {
610 *ShadowBase = Mapping.Offset;
611 *MappingScale = Mapping.Scale;
612 *OrShadowOffset = Mapping.OrShadowOffset;
622 uint8_t AccessSizeIndex)
626 AccessSizeIndex(AccessSizeIndex), IsWrite(IsWrite),
627 CompileKernel(CompileKernel) {}
634 return std::max(32U, 1U << MappingScale);
649class RuntimeCallInserter {
651 bool TrackInsertedCalls =
false;
655 RuntimeCallInserter(
Function &Fn) : OwnerFn(&Fn) {
659 TrackInsertedCalls =
true;
663 ~RuntimeCallInserter() {
664 if (InsertedCalls.
empty())
666 assert(TrackInsertedCalls &&
"Calls were wrongly tracked");
669 for (
CallInst *CI : InsertedCalls) {
671 assert(BB &&
"Instruction doesn't belong to a BasicBlock");
673 "Instruction doesn't belong to the expected Function!");
681 if (Colors.
size() != 1) {
683 "Instruction's BasicBlock is not monochromatic");
690 if (EHPad && EHPad->
isEHPad()) {
695 NewCall->copyMetadata(*CI);
696 CI->replaceAllUsesWith(NewCall);
697 CI->eraseFromParent();
708 if (TrackInsertedCalls)
709 InsertedCalls.push_back(Inst);
715struct AddressSanitizer {
717 int InstrumentationWithCallsThreshold,
718 uint32_t MaxInlinePoisoningSize,
bool CompileKernel =
false,
719 bool Recover =
false,
bool UseAfterScope =
false,
721 AsanDetectStackUseAfterReturnMode::Runtime)
729 InstrumentationWithCallsThreshold(
732 : InstrumentationWithCallsThreshold),
735 : MaxInlinePoisoningSize) {
736 C = &(
M.getContext());
737 DL = &
M.getDataLayout();
738 LongSize =
M.getDataLayout().getPointerSizeInBits();
740 PtrTy = PointerType::getUnqual(*C);
742 TargetTriple =
Triple(
M.getTargetTriple());
746 assert(this->UseAfterReturn != AsanDetectStackUseAfterReturnMode::Invalid);
754 bool isInterestingAlloca(
const AllocaInst &AI);
757 void getInterestingMemoryOperands(
762 const DataLayout &DL, RuntimeCallInserter &RTCI);
763 void instrumentPointerComparisonOrSubtraction(
Instruction *
I,
764 RuntimeCallInserter &RTCI);
767 uint32_t TypeStoreSize,
bool IsWrite,
769 RuntimeCallInserter &RTCI);
772 uint32_t TypeStoreSize,
bool IsWrite,
773 Value *SizeArgument);
778 TypeSize TypeStoreSize,
bool IsWrite,
779 Value *SizeArgument,
bool UseCalls,
781 RuntimeCallInserter &RTCI);
782 void instrumentMaskedLoadOrStore(AddressSanitizer *
Pass,
const DataLayout &DL,
786 Type *OpType,
bool IsWrite,
787 Value *SizeArgument,
bool UseCalls,
788 uint32_t Exp, RuntimeCallInserter &RTCI);
792 bool IsWrite,
size_t AccessSizeIndex,
794 RuntimeCallInserter &RTCI);
795 void instrumentMemIntrinsic(
MemIntrinsic *
MI, RuntimeCallInserter &RTCI);
797 bool suppressInstrumentationSiteForDebug(
int &Instrumented);
799 bool maybeInsertAsanInitAtFunctionEntry(
Function &
F);
800 bool maybeInsertDynamicShadowAtFunctionEntry(
Function &
F);
801 void markEscapedLocalAllocas(
Function &
F);
804 friend struct FunctionStackPoisoner;
814 struct FunctionStateRAII {
815 AddressSanitizer *
Pass;
817 FunctionStateRAII(AddressSanitizer *
Pass) :
Pass(
Pass) {
819 "last pass forgot to clear cache");
823 ~FunctionStateRAII() {
824 Pass->LocalDynamicShadow =
nullptr;
825 Pass->ProcessedAllocas.clear();
840 ShadowMapping Mapping;
854 Value *LocalDynamicShadow =
nullptr;
860 int InstrumentationWithCallsThreshold;
864class ModuleAddressSanitizer {
866 ModuleAddressSanitizer(
Module &M,
bool InsertVersionCheck,
867 bool CompileKernel =
false,
bool Recover =
false,
868 bool UseGlobalsGC =
true,
bool UseOdrIndicator =
true,
875 : InsertVersionCheck),
877 UseGlobalsGC(UseGlobalsGC &&
ClUseGlobalsGC && !this->CompileKernel),
892 UseCtorComdat(UseGlobalsGC &&
ClWithComdat && !this->CompileKernel),
893 DestructorKind(DestructorKind),
897 C = &(
M.getContext());
898 int LongSize =
M.getDataLayout().getPointerSizeInBits();
900 PtrTy = PointerType::getUnqual(*C);
901 TargetTriple =
Triple(
M.getTargetTriple());
906 assert(this->DestructorKind != AsanDtorKind::Invalid);
909 bool instrumentModule(
Module &);
912 void initializeCallbacks(
Module &M);
921 const std::string &UniqueModuleId);
938 bool ShouldUseMachOGlobalsSection()
const;
939 StringRef getGlobalMetadataSection()
const;
942 uint64_t getMinRedzoneSizeForGlobal()
const {
946 int GetAsanVersion(
const Module &M)
const;
949 bool InsertVersionCheck;
952 bool UsePrivateAlias;
953 bool UseOdrIndicator;
961 ShadowMapping Mapping;
971 Function *AsanCtorFunction =
nullptr;
972 Function *AsanDtorFunction =
nullptr;
984struct FunctionStackPoisoner :
public InstVisitor<FunctionStackPoisoner> {
986 AddressSanitizer &ASan;
987 RuntimeCallInserter &RTCI;
992 ShadowMapping Mapping;
1001 FunctionCallee AsanPoisonStackMemoryFunc, AsanUnpoisonStackMemoryFunc;
1005 struct AllocaPoisonCall {
1013 bool HasUntracedLifetimeIntrinsic =
false;
1020 bool HasInlineAsm =
false;
1021 bool HasReturnsTwiceCall =
false;
1024 FunctionStackPoisoner(
Function &F, AddressSanitizer &ASan,
1025 RuntimeCallInserter &RTCI)
1026 :
F(
F), ASan(ASan), RTCI(RTCI),
1028 IntptrTy(ASan.IntptrTy), IntptrPtrTy(
PointerType::
get(IntptrTy, 0)),
1029 Mapping(ASan.Mapping),
1038 copyArgsPassedByValToAllocas();
1043 if (AllocaVec.
empty() && DynamicAllocaVec.
empty())
return false;
1045 initializeCallbacks(*
F.getParent());
1047 if (HasUntracedLifetimeIntrinsic) {
1051 StaticAllocaPoisonCallVec.
clear();
1052 DynamicAllocaPoisonCallVec.
clear();
1055 processDynamicAllocas();
1056 processStaticAllocas();
1067 void copyArgsPassedByValToAllocas();
1072 void processStaticAllocas();
1073 void processDynamicAllocas();
1075 void createDynamicAllocasInitStorage();
1093 void unpoisonDynamicAllocasBeforeInst(
Instruction *InstBefore,
1094 Value *SavedStack) {
1101 if (!isa<ReturnInst>(InstBefore)) {
1103 InstBefore->
getModule(), Intrinsic::get_dynamic_area_offset,
1112 RTCI.createRuntimeCall(
1113 IRB, AsanAllocasUnpoisonFunc,
1114 {IRB.
CreateLoad(IntptrTy, DynamicAllocaLayout), DynamicAreaPtr});
1118 void unpoisonDynamicAllocas() {
1120 unpoisonDynamicAllocasBeforeInst(Ret, DynamicAllocaLayout);
1122 for (
Instruction *StackRestoreInst : StackRestoreVec)
1123 unpoisonDynamicAllocasBeforeInst(StackRestoreInst,
1124 StackRestoreInst->getOperand(0));
1137 void handleDynamicAllocaCall(
AllocaInst *AI);
1142 if (!ASan.isInterestingAlloca(AI) ||
1147 if (AllocaVec.
empty())
1165 if (
ID == Intrinsic::stackrestore) StackRestoreVec.
push_back(&II);
1166 if (
ID == Intrinsic::localescape) LocalEscapeCall = &II;
1167 if (!ASan.UseAfterScope)
1174 if (
Size->isMinusOne())
return;
1177 const uint64_t SizeValue =
Size->getValue().getLimitedValue();
1178 if (SizeValue == ~0ULL ||
1186 HasUntracedLifetimeIntrinsic =
true;
1190 if (!ASan.isInterestingAlloca(*AI))
1192 bool DoPoison = (
ID == Intrinsic::lifetime_end);
1193 AllocaPoisonCall APC = {&II, AI, SizeValue, DoPoison};
1195 StaticAllocaPoisonCallVec.
push_back(APC);
1197 DynamicAllocaPoisonCallVec.
push_back(APC);
1201 if (
CallInst *CI = dyn_cast<CallInst>(&CB)) {
1202 HasInlineAsm |= CI->isInlineAsm() && &CB != ASan.LocalDynamicShadow;
1203 HasReturnsTwiceCall |= CI->canReturnTwice();
1208 void initializeCallbacks(
Module &M);
1235 OS, MapClassName2PassName);
1247 UseOdrIndicator(UseOdrIndicator), DestructorKind(DestructorKind),
1248 ConstructorKind(ConstructorKind) {}
1252 ModuleAddressSanitizer ModuleSanitizer(
1254 UseGlobalGC, UseOdrIndicator, DestructorKind, ConstructorKind);
1260 AddressSanitizer FunctionSanitizer(
1265 Modified |= FunctionSanitizer.instrumentFunction(
F, &TLI);
1267 Modified |= ModuleSanitizer.instrumentModule(M);
1288 if (
G->getName().starts_with(
"llvm.") ||
1290 G->getName().starts_with(
"__llvm_gcov_ctr") ||
1292 G->getName().starts_with(
"__llvm_rtti_proxy"))
1305 Type *PtrTy = cast<PointerType>(
Addr->getType()->getScalarType());
1307 if (AddrSpace == 3 || AddrSpace == 5)
1314 Shadow = IRB.
CreateLShr(Shadow, Mapping.Scale);
1315 if (Mapping.Offset == 0)
return Shadow;
1318 if (LocalDynamicShadow)
1319 ShadowBase = LocalDynamicShadow;
1321 ShadowBase = ConstantInt::get(IntptrTy, Mapping.Offset);
1322 if (Mapping.OrShadowOffset)
1323 return IRB.
CreateOr(Shadow, ShadowBase);
1325 return IRB.
CreateAdd(Shadow, ShadowBase);
1330 RuntimeCallInserter &RTCI) {
1332 if (isa<MemTransferInst>(
MI)) {
1333 RTCI.createRuntimeCall(
1334 IRB, isa<MemMoveInst>(
MI) ? AsanMemmove : AsanMemcpy,
1338 }
else if (isa<MemSetInst>(
MI)) {
1339 RTCI.createRuntimeCall(
1345 MI->eraseFromParent();
1349bool AddressSanitizer::isInterestingAlloca(
const AllocaInst &AI) {
1350 auto PreviouslySeenAllocaInfo = ProcessedAllocas.find(&AI);
1352 if (PreviouslySeenAllocaInfo != ProcessedAllocas.end())
1353 return PreviouslySeenAllocaInfo->getSecond();
1355 bool IsInteresting =
1368 !(SSGI && SSGI->
isSafe(AI)));
1370 ProcessedAllocas[&AI] = IsInteresting;
1371 return IsInteresting;
1376 Type *PtrTy = cast<PointerType>(
Ptr->getType()->getScalarType());
1385 if (
Ptr->isSwiftError())
1391 if (
auto AI = dyn_cast_or_null<AllocaInst>(
Ptr))
1402void AddressSanitizer::getInterestingMemoryOperands(
1405 if (LocalDynamicShadow ==
I)
1408 if (
LoadInst *LI = dyn_cast<LoadInst>(
I)) {
1411 Interesting.
emplace_back(
I, LI->getPointerOperandIndex(),
false,
1412 LI->getType(), LI->getAlign());
1413 }
else if (
StoreInst *SI = dyn_cast<StoreInst>(
I)) {
1417 SI->getValueOperand()->getType(),
SI->getAlign());
1421 Interesting.
emplace_back(
I, RMW->getPointerOperandIndex(),
true,
1422 RMW->getValOperand()->getType(), std::nullopt);
1426 Interesting.
emplace_back(
I, XCHG->getPointerOperandIndex(),
true,
1427 XCHG->getCompareOperand()->getType(),
1429 }
else if (
auto CI = dyn_cast<CallInst>(
I)) {
1430 switch (CI->getIntrinsicID()) {
1431 case Intrinsic::masked_load:
1432 case Intrinsic::masked_store:
1433 case Intrinsic::masked_gather:
1434 case Intrinsic::masked_scatter: {
1435 bool IsWrite = CI->getType()->isVoidTy();
1437 unsigned OpOffset = IsWrite ? 1 : 0;
1441 auto BasePtr = CI->getOperand(OpOffset);
1442 if (ignoreAccess(
I, BasePtr))
1444 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1447 if (
auto *
Op = dyn_cast<ConstantInt>(CI->getOperand(1 + OpOffset)))
1448 Alignment =
Op->getMaybeAlignValue();
1449 Value *
Mask = CI->getOperand(2 + OpOffset);
1450 Interesting.
emplace_back(
I, OpOffset, IsWrite, Ty, Alignment, Mask);
1453 case Intrinsic::masked_expandload:
1454 case Intrinsic::masked_compressstore: {
1455 bool IsWrite = CI->getIntrinsicID() == Intrinsic::masked_compressstore;
1456 unsigned OpOffset = IsWrite ? 1 : 0;
1459 auto BasePtr = CI->getOperand(OpOffset);
1460 if (ignoreAccess(
I, BasePtr))
1463 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1466 Value *
Mask = CI->getOperand(1 + OpOffset);
1469 Value *ExtMask =
IB.CreateZExt(Mask, ExtTy);
1470 Value *EVL =
IB.CreateAddReduce(ExtMask);
1471 Value *TrueMask = ConstantInt::get(
Mask->getType(), 1);
1472 Interesting.
emplace_back(
I, OpOffset, IsWrite, Ty, Alignment, TrueMask,
1476 case Intrinsic::vp_load:
1477 case Intrinsic::vp_store:
1478 case Intrinsic::experimental_vp_strided_load:
1479 case Intrinsic::experimental_vp_strided_store: {
1480 auto *VPI = cast<VPIntrinsic>(CI);
1481 unsigned IID = CI->getIntrinsicID();
1482 bool IsWrite = CI->getType()->isVoidTy();
1485 unsigned PtrOpNo = *VPI->getMemoryPointerParamPos(IID);
1486 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1487 MaybeAlign Alignment = VPI->getOperand(PtrOpNo)->getPointerAlignment(*
DL);
1488 Value *Stride =
nullptr;
1489 if (IID == Intrinsic::experimental_vp_strided_store ||
1490 IID == Intrinsic::experimental_vp_strided_load) {
1491 Stride = VPI->getOperand(PtrOpNo + 1);
1496 if (!isa<ConstantInt>(Stride) ||
1497 cast<ConstantInt>(Stride)->getZExtValue() % PointerAlign != 0)
1498 Alignment =
Align(1);
1500 Interesting.
emplace_back(
I, PtrOpNo, IsWrite, Ty, Alignment,
1501 VPI->getMaskParam(), VPI->getVectorLengthParam(),
1505 case Intrinsic::vp_gather:
1506 case Intrinsic::vp_scatter: {
1507 auto *VPI = cast<VPIntrinsic>(CI);
1508 unsigned IID = CI->getIntrinsicID();
1509 bool IsWrite = IID == Intrinsic::vp_scatter;
1512 unsigned PtrOpNo = *VPI->getMemoryPointerParamPos(IID);
1513 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1514 MaybeAlign Alignment = VPI->getPointerAlignment();
1515 Interesting.
emplace_back(
I, PtrOpNo, IsWrite, Ty, Alignment,
1516 VPI->getMaskParam(),
1517 VPI->getVectorLengthParam());
1521 for (
unsigned ArgNo = 0; ArgNo < CI->arg_size(); ArgNo++) {
1523 ignoreAccess(
I, CI->getArgOperand(ArgNo)))
1525 Type *Ty = CI->getParamByValType(ArgNo);
1533 return V->getType()->isPointerTy() || isa<PtrToIntInst>(V);
1540 if (
ICmpInst *Cmp = dyn_cast<ICmpInst>(
I)) {
1541 if (!Cmp->isRelational())
1555 if (BO->getOpcode() != Instruction::Sub)
1568 if (!
G->hasInitializer())
1571 if (
G->hasSanitizerMetadata() &&
G->getSanitizerMetadata().IsDynInit)
1577void AddressSanitizer::instrumentPointerComparisonOrSubtraction(
1580 FunctionCallee F = isa<ICmpInst>(
I) ? AsanPtrCmpFunction : AsanPtrSubFunction;
1581 Value *
Param[2] = {
I->getOperand(0),
I->getOperand(1)};
1582 for (
Value *&i : Param) {
1583 if (i->getType()->isPointerTy())
1586 RTCI.createRuntimeCall(IRB,
F, Param);
1592 TypeSize TypeStoreSize,
bool IsWrite,
1593 Value *SizeArgument,
bool UseCalls,
1594 uint32_t Exp, RuntimeCallInserter &RTCI) {
1599 switch (FixedSize) {
1605 if (!Alignment || *Alignment >= Granularity ||
1606 *Alignment >= FixedSize / 8)
1607 return Pass->instrumentAddress(
I, InsertBefore,
Addr, Alignment,
1608 FixedSize, IsWrite,
nullptr, UseCalls,
1612 Pass->instrumentUnusualSizeOrAlignment(
I, InsertBefore,
Addr, TypeStoreSize,
1613 IsWrite,
nullptr, UseCalls, Exp, RTCI);
1616void AddressSanitizer::instrumentMaskedLoadOrStore(
1619 MaybeAlign Alignment,
unsigned Granularity,
Type *OpType,
bool IsWrite,
1621 RuntimeCallInserter &RTCI) {
1622 auto *VTy = cast<VectorType>(OpType);
1623 TypeSize ElemTypeSize =
DL.getTypeStoreSizeInBits(VTy->getScalarType());
1624 auto Zero = ConstantInt::get(IntptrTy, 0);
1632 Value *IsEVLZero =
IB.CreateICmpNE(EVL, ConstantInt::get(EVLType, 0));
1634 IB.SetInsertPoint(LoopInsertBefore);
1636 EVL =
IB.CreateZExtOrTrunc(EVL, IntptrTy);
1639 Value *
EC =
IB.CreateElementCount(IntptrTy, VTy->getElementCount());
1640 EVL =
IB.CreateBinaryIntrinsic(Intrinsic::umin, EVL, EC);
1642 EVL =
IB.CreateElementCount(IntptrTy, VTy->getElementCount());
1647 Stride =
IB.CreateZExtOrTrunc(Stride, IntptrTy);
1652 if (
auto *MaskElemC = dyn_cast<ConstantInt>(MaskElem)) {
1653 if (MaskElemC->isZero())
1664 Value *InstrumentedAddress;
1665 if (isa<VectorType>(
Addr->getType())) {
1667 cast<VectorType>(
Addr->getType())->getElementType()->isPointerTy() &&
1668 "Expected vector of pointer.");
1670 }
else if (Stride) {
1677 Alignment, Granularity, ElemTypeSize, IsWrite,
1678 SizeArgument, UseCalls, Exp, RTCI);
1685 RuntimeCallInserter &RTCI) {
1706 isSafeAccess(ObjSizeVis,
Addr,
O.TypeStoreSize)) {
1707 NumOptimizedAccessesToGlobalVar++;
1715 isSafeAccess(ObjSizeVis,
Addr,
O.TypeStoreSize)) {
1716 NumOptimizedAccessesToStackVar++;
1722 NumInstrumentedWrites++;
1724 NumInstrumentedReads++;
1726 unsigned Granularity = 1 << Mapping.Scale;
1728 instrumentMaskedLoadOrStore(
this,
DL, IntptrTy,
O.MaybeMask,
O.MaybeEVL,
1729 O.MaybeStride,
O.getInsn(),
Addr,
O.Alignment,
1730 Granularity,
O.OpType,
O.IsWrite,
nullptr,
1731 UseCalls, Exp, RTCI);
1734 Granularity,
O.TypeStoreSize,
O.IsWrite,
nullptr,
1735 UseCalls, Exp, RTCI);
1741 size_t AccessSizeIndex,
1742 Value *SizeArgument,
1744 RuntimeCallInserter &RTCI) {
1750 Call = RTCI.createRuntimeCall(IRB, AsanErrorCallbackSized[IsWrite][0],
1751 {
Addr, SizeArgument});
1753 Call = RTCI.createRuntimeCall(IRB, AsanErrorCallbackSized[IsWrite][1],
1754 {
Addr, SizeArgument, ExpVal});
1757 Call = RTCI.createRuntimeCall(
1758 IRB, AsanErrorCallback[IsWrite][0][AccessSizeIndex],
Addr);
1760 Call = RTCI.createRuntimeCall(
1761 IRB, AsanErrorCallback[IsWrite][1][AccessSizeIndex], {
Addr, ExpVal});
1764 Call->setCannotMerge();
1771 size_t Granularity =
static_cast<size_t>(1) << Mapping.Scale;
1773 Value *LastAccessedByte =
1774 IRB.
CreateAnd(AddrLong, ConstantInt::get(IntptrTy, Granularity - 1));
1776 if (TypeStoreSize / 8 > 1)
1778 LastAccessedByte, ConstantInt::get(IntptrTy, TypeStoreSize / 8 - 1));
1786Instruction *AddressSanitizer::instrumentAMDGPUAddress(
1788 uint32_t TypeStoreSize,
bool IsWrite,
Value *SizeArgument) {
1792 Type *PtrTy = cast<PointerType>(
Addr->getType()->getScalarType());
1795 return InsertBefore;
1800 Value *IsSharedOrPrivate = IRB.
CreateOr(IsShared, IsPrivate);
1802 Value *AddrSpaceZeroLanding =
1804 InsertBefore = cast<Instruction>(AddrSpaceZeroLanding);
1805 return InsertBefore;
1821 Trm->getParent()->setName(
"asan.report");
1832void AddressSanitizer::instrumentAddress(
Instruction *OrigIns,
1835 uint32_t TypeStoreSize,
bool IsWrite,
1836 Value *SizeArgument,
bool UseCalls,
1838 RuntimeCallInserter &RTCI) {
1839 if (TargetTriple.isAMDGPU()) {
1840 InsertBefore = instrumentAMDGPUAddress(OrigIns, InsertBefore,
Addr,
1841 TypeStoreSize, IsWrite, SizeArgument);
1848 const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
1851 const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
1856 ConstantInt::get(
Int32Ty, AccessInfo.Packed)});
1863 RTCI.createRuntimeCall(
1864 IRB, AsanMemoryAccessCallback[IsWrite][0][AccessSizeIndex], AddrLong);
1866 RTCI.createRuntimeCall(
1867 IRB, AsanMemoryAccessCallback[IsWrite][1][AccessSizeIndex],
1868 {AddrLong, ConstantInt::get(IRB.
getInt32Ty(), Exp)});
1875 Value *ShadowPtr = memToShadow(AddrLong, IRB);
1877 std::max<uint64_t>(Alignment.
valueOrOne().
value() >> Mapping.Scale, 1);
1882 size_t Granularity = 1ULL << Mapping.Scale;
1885 bool GenSlowPath = (
ClAlwaysSlowPath || (TypeStoreSize < 8 * Granularity));
1887 if (TargetTriple.isAMDGCN()) {
1889 auto *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeStoreSize);
1892 CrashTerm = genAMDGPUReportBlock(IRB, Cmp, Recover);
1893 }
else if (GenSlowPath) {
1898 assert(cast<BranchInst>(CheckTerm)->isUnconditional());
1901 Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeStoreSize);
1916 CrashTerm, AddrLong, IsWrite, AccessSizeIndex, SizeArgument, Exp, RTCI);
1925void AddressSanitizer::instrumentUnusualSizeOrAlignment(
1927 TypeSize TypeStoreSize,
bool IsWrite,
Value *SizeArgument,
bool UseCalls,
1928 uint32_t Exp, RuntimeCallInserter &RTCI) {
1936 RTCI.createRuntimeCall(IRB, AsanMemoryAccessCallbackSized[IsWrite][0],
1939 RTCI.createRuntimeCall(
1940 IRB, AsanMemoryAccessCallbackSized[IsWrite][1],
1947 instrumentAddress(
I, InsertBefore,
Addr, {}, 8, IsWrite,
Size,
false,
Exp,
1949 instrumentAddress(
I, InsertBefore, LastByte, {}, 8, IsWrite,
Size,
false,
1954void ModuleAddressSanitizer::poisonOneInitializer(
Function &GlobalInit,
1962 IRB.
CreateCall(AsanPoisonGlobals, ModuleNameAddr);
1965 for (
auto &BB : GlobalInit)
1970void ModuleAddressSanitizer::createInitializerPoisonCalls(
1981 if (isa<ConstantAggregateZero>(
OP))
continue;
1987 auto *Priority = cast<ConstantInt>(CS->
getOperand(0));
1997ModuleAddressSanitizer::getExcludedAliasedGlobal(
const GlobalAlias &GA)
const {
2002 assert(CompileKernel &&
"Only expecting to be called when compiling kernel");
2009 return dyn_cast<GlobalVariable>(
C->stripPointerCastsAndAliases());
2014bool ModuleAddressSanitizer::shouldInstrumentGlobal(
GlobalVariable *
G)
const {
2015 Type *Ty =
G->getValueType();
2018 if (
G->hasSanitizerMetadata() &&
G->getSanitizerMetadata().NoAddress)
2020 if (!Ty->
isSized())
return false;
2021 if (!
G->hasInitializer())
return false;
2023 if (
G->getAddressSpace() &&
2030 if (
G->isThreadLocal())
return false;
2032 if (
G->getAlign() && *
G->getAlign() > getMinRedzoneSizeForGlobal())
return false;
2038 if (!TargetTriple.isOSBinFormatCOFF()) {
2039 if (!
G->hasExactDefinition() ||
G->hasComdat())
2043 if (
G->isInterposable())
2047 if (
G->hasAvailableExternallyLinkage())
2054 switch (
C->getSelectionKind()) {
2065 if (
G->hasSection()) {
2075 if (Section ==
"llvm.metadata")
return false;
2082 if (
Section.starts_with(
".preinit_array") ||
2083 Section.starts_with(
".init_array") ||
2084 Section.starts_with(
".fini_array")) {
2090 if (TargetTriple.isOSBinFormatELF()) {
2092 [](
char c) {
return llvm::isAlnum(c) || c ==
'_'; }))
2104 if (TargetTriple.isOSBinFormatCOFF() &&
Section.contains(
'$')) {
2105 LLVM_DEBUG(
dbgs() <<
"Ignoring global in sorted section (contains '$'): "
2110 if (TargetTriple.isOSBinFormatMachO()) {
2112 unsigned TAA = 0, StubSize = 0;
2115 Section, ParsedSegment, ParsedSection, TAA, TAAParsed, StubSize));
2120 if (ParsedSegment ==
"__OBJC" ||
2121 (ParsedSegment ==
"__DATA" && ParsedSection.
starts_with(
"__objc_"))) {
2133 if (ParsedSegment ==
"__DATA" && ParsedSection ==
"__cfstring") {
2146 if (CompileKernel) {
2149 if (
G->getName().starts_with(
"__"))
2159bool ModuleAddressSanitizer::ShouldUseMachOGlobalsSection()
const {
2160 if (!TargetTriple.isOSBinFormatMachO())
2163 if (TargetTriple.isMacOSX() && !TargetTriple.isMacOSXVersionLT(10, 11))
2165 if (TargetTriple.isiOS() && !TargetTriple.isOSVersionLT(9))
2167 if (TargetTriple.isWatchOS() && !TargetTriple.isOSVersionLT(2))
2169 if (TargetTriple.isDriverKit())
2171 if (TargetTriple.isXROS())
2177StringRef ModuleAddressSanitizer::getGlobalMetadataSection()
const {
2178 switch (TargetTriple.getObjectFormat()) {
2188 "ModuleAddressSanitizer not implemented for object file format");
2195void ModuleAddressSanitizer::initializeCallbacks(
Module &M) {
2201 AsanUnpoisonGlobals =
2205 AsanRegisterGlobals =
M.getOrInsertFunction(
2207 AsanUnregisterGlobals =
M.getOrInsertFunction(
2212 AsanRegisterImageGlobals =
M.getOrInsertFunction(
2214 AsanUnregisterImageGlobals =
M.getOrInsertFunction(
2217 AsanRegisterElfGlobals =
2219 IntptrTy, IntptrTy, IntptrTy);
2220 AsanUnregisterElfGlobals =
2222 IntptrTy, IntptrTy, IntptrTy);
2227void ModuleAddressSanitizer::SetComdatForGlobalMetadata(
2232 if (!
G->hasName()) {
2239 if (!InternalSuffix.
empty() &&
G->hasLocalLinkage()) {
2240 std::string
Name = std::string(
G->getName());
2241 Name += InternalSuffix;
2242 C =
M.getOrInsertComdat(
Name);
2244 C =
M.getOrInsertComdat(
G->getName());
2250 if (TargetTriple.isOSBinFormatCOFF()) {
2252 if (
G->hasPrivateLinkage())
2265ModuleAddressSanitizer::CreateMetadataGlobal(
Module &M,
Constant *Initializer,
2267 auto Linkage = TargetTriple.isOSBinFormatMachO()
2271 M, Initializer->
getType(),
false, Linkage, Initializer,
2273 Metadata->setSection(getGlobalMetadataSection());
2284 AsanDtorFunction->addFnAttr(Attribute::NoUnwind);
2292void ModuleAddressSanitizer::InstrumentGlobalsCOFF(
2296 auto &
DL =
M.getDataLayout();
2299 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2300 Constant *Initializer = MetadataInitializers[i];
2303 CreateMetadataGlobal(M, Initializer,
G->getName());
2305 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2311 unsigned SizeOfGlobalStruct =
DL.getTypeAllocSize(Initializer->
getType());
2313 "global metadata will not be padded appropriately");
2316 SetComdatForGlobalMetadata(
G,
Metadata,
"");
2321 if (!MetadataGlobals.empty())
2325void ModuleAddressSanitizer::instrumentGlobalsELF(
2328 const std::string &UniqueModuleId) {
2335 bool UseComdatForGlobalsGC = UseOdrIndicator && !UniqueModuleId.empty();
2338 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2341 CreateMetadataGlobal(M, MetadataInitializers[i],
G->getName());
2343 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2346 if (UseComdatForGlobalsGC)
2347 SetComdatForGlobalMetadata(
G,
Metadata, UniqueModuleId);
2352 if (!MetadataGlobals.empty())
2369 "__start_" + getGlobalMetadataSection());
2373 "__stop_" + getGlobalMetadataSection());
2387 IrbDtor.CreateCall(AsanUnregisterElfGlobals,
2394void ModuleAddressSanitizer::InstrumentGlobalsMachO(
2405 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2406 Constant *Initializer = MetadataInitializers[i];
2409 CreateMetadataGlobal(M, Initializer,
G->getName());
2413 auto LivenessBinder =
2418 Twine(
"__asan_binder_") +
G->getName());
2419 Liveness->
setSection(
"__DATA,__asan_liveness,regular,live_support");
2420 LivenessGlobals[i] = Liveness;
2427 if (!LivenessGlobals.empty())
2449 IrbDtor.CreateCall(AsanUnregisterImageGlobals,
2454void ModuleAddressSanitizer::InstrumentGlobalsWithMetadataArray(
2458 unsigned N = ExtendedGlobals.
size();
2468 if (Mapping.Scale > 3)
2469 AllGlobals->setAlignment(
Align(1ULL << Mapping.Scale));
2474 ConstantInt::get(IntptrTy,
N)});
2480 IrbDtor.CreateCall(AsanUnregisterGlobals,
2482 ConstantInt::get(IntptrTy,
N)});
2496 if (CompileKernel) {
2497 for (
auto &GA :
M.aliases()) {
2499 AliasedGlobalExclusions.
insert(GV);
2504 for (
auto &
G :
M.globals()) {
2505 if (!AliasedGlobalExclusions.
count(&
G) && shouldInstrumentGlobal(&
G))
2509 size_t n = GlobalsToChange.
size();
2510 auto &
DL =
M.getDataLayout();
2524 IntptrTy, IntptrTy, IntptrTy);
2528 bool HasDynamicallyInitializedGlobals =
false;
2538 for (
size_t i = 0; i < n; i++) {
2542 if (
G->hasSanitizerMetadata())
2543 MD =
G->getSanitizerMetadata();
2548 std::string NameForGlobal =
G->getName().str();
2553 Type *Ty =
G->getValueType();
2554 const uint64_t SizeInBytes =
DL.getTypeAllocSize(Ty);
2555 const uint64_t RightRedzoneSize = getRedzoneSizeForGlobal(SizeInBytes);
2567 M, NewTy,
G->isConstant(), Linkage, NewInitializer,
"",
G,
2568 G->getThreadLocalMode(),
G->getAddressSpace());
2578 if (TargetTriple.isOSBinFormatMachO() && !
G->hasSection() &&
2580 auto Seq = dyn_cast<ConstantDataSequential>(
G->getInitializer());
2581 if (Seq && Seq->isCString())
2582 NewGlobal->
setSection(
"__TEXT,__asan_cstring,regular");
2593 G->replaceAllUsesWith(
2596 G->eraseFromParent();
2597 NewGlobals[i] = NewGlobal;
2602 bool CanUsePrivateAliases =
2603 TargetTriple.isOSBinFormatELF() || TargetTriple.isOSBinFormatMachO() ||
2604 TargetTriple.isOSBinFormatWasm();
2605 if (CanUsePrivateAliases && UsePrivateAlias) {
2608 InstrumentedGlobal =
2616 }
else if (UseOdrIndicator) {
2619 auto *ODRIndicatorSym =
2628 ODRIndicatorSym->setAlignment(
Align(1));
2629 ODRIndicator = ODRIndicatorSym;
2635 ConstantInt::get(IntptrTy, SizeInBytes),
2636 ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize),
2639 ConstantInt::get(IntptrTy, MD.
IsDynInit),
2644 HasDynamicallyInitializedGlobals =
true;
2648 Initializers[i] = Initializer;
2654 for (
size_t i = 0; i < n; i++) {
2656 if (
G->getName().empty())
continue;
2661 if (UseGlobalsGC && TargetTriple.isOSBinFormatELF()) {
2667 instrumentGlobalsELF(IRB, M, NewGlobals, Initializers,
2669 }
else if (n == 0) {
2672 *CtorComdat = TargetTriple.isOSBinFormatELF();
2674 *CtorComdat =
false;
2675 if (UseGlobalsGC && TargetTriple.isOSBinFormatCOFF()) {
2676 InstrumentGlobalsCOFF(IRB, M, NewGlobals, Initializers);
2677 }
else if (UseGlobalsGC && ShouldUseMachOGlobalsSection()) {
2678 InstrumentGlobalsMachO(IRB, M, NewGlobals, Initializers);
2680 InstrumentGlobalsWithMetadataArray(IRB, M, NewGlobals, Initializers);
2685 if (HasDynamicallyInitializedGlobals)
2692ModuleAddressSanitizer::getRedzoneSizeForGlobal(
uint64_t SizeInBytes)
const {
2693 constexpr uint64_t kMaxRZ = 1 << 18;
2694 const uint64_t MinRZ = getMinRedzoneSizeForGlobal();
2697 if (SizeInBytes <= MinRZ / 2) {
2701 RZ = MinRZ - SizeInBytes;
2704 RZ = std::clamp((SizeInBytes / MinRZ / 4) * MinRZ, MinRZ, kMaxRZ);
2707 if (SizeInBytes % MinRZ)
2708 RZ += MinRZ - (SizeInBytes % MinRZ);
2711 assert((RZ + SizeInBytes) % MinRZ == 0);
2716int ModuleAddressSanitizer::GetAsanVersion(
const Module &M)
const {
2717 int LongSize =
M.getDataLayout().getPointerSizeInBits();
2722 Version += (LongSize == 32 && isAndroid);
2726bool ModuleAddressSanitizer::instrumentModule(
Module &M) {
2727 initializeCallbacks(M);
2732 if (CompileKernel) {
2737 std::string AsanVersion = std::to_string(GetAsanVersion(M));
2738 std::string VersionCheckName =
2740 std::tie(AsanCtorFunction, std::ignore) =
2743 {}, VersionCheckName);
2747 bool CtorComdat =
true;
2750 if (AsanCtorFunction) {
2751 IRBuilder<> IRB(AsanCtorFunction->getEntryBlock().getTerminator());
2752 instrumentGlobals(IRB, M, &CtorComdat);
2755 instrumentGlobals(IRB, M, &CtorComdat);
2764 if (UseCtorComdat && TargetTriple.isOSBinFormatELF() && CtorComdat) {
2765 if (AsanCtorFunction) {
2769 if (AsanDtorFunction) {
2774 if (AsanCtorFunction)
2776 if (AsanDtorFunction)
2787 for (
int Exp = 0;
Exp < 2;
Exp++) {
2788 for (
size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
2789 const std::string TypeStr = AccessIsWrite ?
"store" :
"load";
2790 const std::string ExpStr =
Exp ?
"exp_" :
"";
2791 const std::string EndingStr = Recover ?
"_noabort" :
"";
2800 Args1.push_back(ExpType);
2801 if (
auto AK = TLI->getExtAttrForI32Param(
false)) {
2806 AsanErrorCallbackSized[AccessIsWrite][
Exp] =
M.getOrInsertFunction(
2810 AsanMemoryAccessCallbackSized[AccessIsWrite][
Exp] =
M.getOrInsertFunction(
2815 AccessSizeIndex++) {
2816 const std::string Suffix = TypeStr + itostr(1ULL << AccessSizeIndex);
2817 AsanErrorCallback[AccessIsWrite][
Exp][AccessSizeIndex] =
2818 M.getOrInsertFunction(
2822 AsanMemoryAccessCallback[AccessIsWrite][
Exp][AccessSizeIndex] =
2823 M.getOrInsertFunction(
2830 const std::string MemIntrinCallbackPrefix =
2834 AsanMemmove =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memmove",
2835 PtrTy, PtrTy, PtrTy, IntptrTy);
2836 AsanMemcpy =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memcpy", PtrTy,
2837 PtrTy, PtrTy, IntptrTy);
2838 AsanMemset =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memset",
2842 AsanHandleNoReturnFunc =
2845 AsanPtrCmpFunction =
2847 AsanPtrSubFunction =
2849 if (Mapping.InGlobal)
2850 AsanShadowGlobal =
M.getOrInsertGlobal(
"__asan_shadow",
2853 AMDGPUAddressShared =
2855 AMDGPUAddressPrivate =
2859bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(
Function &
F) {
2867 if (
F.getName().contains(
" load]")) {
2877bool AddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(
Function &
F) {
2883 if (Mapping.InGlobal) {
2891 LocalDynamicShadow =
2892 IRB.
CreateCall(Asm, {AsanShadowGlobal},
".asan.shadow");
2894 LocalDynamicShadow =
2898 Value *GlobalDynamicAddress =
F.getParent()->getOrInsertGlobal(
2900 LocalDynamicShadow = IRB.
CreateLoad(IntptrTy, GlobalDynamicAddress);
2905void AddressSanitizer::markEscapedLocalAllocas(
Function &
F) {
2910 assert(ProcessedAllocas.empty() &&
"must process localescape before allocas");
2914 if (!
F.getParent()->getFunction(
"llvm.localescape"))
return;
2923 AllocaInst *AI = dyn_cast<AllocaInst>(Arg->stripPointerCasts());
2925 "non-static alloca arg to localescape");
2926 ProcessedAllocas[AI] =
false;
2933bool AddressSanitizer::suppressInstrumentationSiteForDebug(
int &Instrumented) {
2934 bool ShouldInstrument =
2938 return !ShouldInstrument;
2941bool AddressSanitizer::instrumentFunction(
Function &
F,
2947 if (
F.getName().starts_with(
"__asan_"))
return false;
2949 bool FunctionModified =
false;
2954 if (maybeInsertAsanInitAtFunctionEntry(
F))
2955 FunctionModified =
true;
2958 if (!
F.hasFnAttribute(Attribute::SanitizeAddress))
return FunctionModified;
2960 if (
F.hasFnAttribute(Attribute::DisableSanitizerInstrumentation))
2961 return FunctionModified;
2965 initializeCallbacks(*
F.getParent(), TLI);
2967 FunctionStateRAII CleanupObj(
this);
2969 RuntimeCallInserter RTCI(
F);
2971 FunctionModified |= maybeInsertDynamicShadowAtFunctionEntry(
F);
2975 markEscapedLocalAllocas(
F);
2987 for (
auto &BB :
F) {
2989 TempsToInstrument.
clear();
2990 int NumInsnsPerBB = 0;
2991 for (
auto &Inst : BB) {
2992 if (LooksLikeCodeInBug11395(&Inst))
return false;
2997 getInterestingMemoryOperands(&Inst, InterestingOperands);
2999 if (!InterestingOperands.
empty()) {
3000 for (
auto &Operand : InterestingOperands) {
3006 if (Operand.MaybeMask) {
3010 if (!TempsToInstrument.
insert(
Ptr).second)
3014 OperandsToInstrument.
push_back(Operand);
3021 PointerComparisonsOrSubtracts.
push_back(&Inst);
3027 if (
auto *CB = dyn_cast<CallBase>(&Inst)) {
3029 TempsToInstrument.
clear();
3033 if (
CallInst *CI = dyn_cast<CallInst>(&Inst))
3040 bool UseCalls = (InstrumentationWithCallsThreshold >= 0 &&
3041 OperandsToInstrument.
size() + IntrinToInstrument.
size() >
3042 (
unsigned)InstrumentationWithCallsThreshold);
3049 int NumInstrumented = 0;
3050 for (
auto &Operand : OperandsToInstrument) {
3051 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
3052 instrumentMop(ObjSizeVis, Operand, UseCalls,
3053 F.getParent()->getDataLayout(), RTCI);
3054 FunctionModified =
true;
3056 for (
auto *Inst : IntrinToInstrument) {
3057 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
3058 instrumentMemIntrinsic(Inst, RTCI);
3059 FunctionModified =
true;
3062 FunctionStackPoisoner FSP(
F, *
this, RTCI);
3063 bool ChangedStack = FSP.runOnFunction();
3067 for (
auto *CI : NoReturnCalls) {
3069 RTCI.createRuntimeCall(IRB, AsanHandleNoReturnFunc, {});
3072 for (
auto *Inst : PointerComparisonsOrSubtracts) {
3073 instrumentPointerComparisonOrSubtraction(Inst, RTCI);
3074 FunctionModified =
true;
3077 if (ChangedStack || !NoReturnCalls.empty())
3078 FunctionModified =
true;
3080 LLVM_DEBUG(
dbgs() <<
"ASAN done instrumenting: " << FunctionModified <<
" "
3083 return FunctionModified;
3089bool AddressSanitizer::LooksLikeCodeInBug11395(
Instruction *
I) {
3090 if (LongSize != 32)
return false;
3099void FunctionStackPoisoner::initializeCallbacks(
Module &M) {
3103 const char *MallocNameTemplate =
3108 std::string Suffix = itostr(
Index);
3109 AsanStackMallocFunc[
Index] =
M.getOrInsertFunction(
3110 MallocNameTemplate + Suffix, IntptrTy, IntptrTy);
3111 AsanStackFreeFunc[
Index] =
3116 if (ASan.UseAfterScope) {
3117 AsanPoisonStackMemoryFunc =
M.getOrInsertFunction(
3119 AsanUnpoisonStackMemoryFunc =
M.getOrInsertFunction(
3123 for (
size_t Val : {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0xf1, 0xf2,
3124 0xf3, 0xf5, 0xf8}) {
3125 std::ostringstream
Name;
3127 Name << std::setw(2) << std::setfill(
'0') << std::hex << Val;
3128 AsanSetShadowFunc[Val] =
3129 M.getOrInsertFunction(
Name.str(), IRB.
getVoidTy(), IntptrTy, IntptrTy);
3132 AsanAllocaPoisonFunc =
M.getOrInsertFunction(
3134 AsanAllocasUnpoisonFunc =
M.getOrInsertFunction(
3140 size_t Begin,
size_t End,
3142 Value *ShadowBase) {
3146 const size_t LargestStoreSizeInBytes =
3147 std::min<size_t>(
sizeof(
uint64_t), ASan.LongSize / 8);
3149 const bool IsLittleEndian =
F.getParent()->getDataLayout().isLittleEndian();
3155 for (
size_t i = Begin; i <
End;) {
3156 if (!ShadowMask[i]) {
3162 size_t StoreSizeInBytes = LargestStoreSizeInBytes;
3164 while (StoreSizeInBytes >
End - i)
3165 StoreSizeInBytes /= 2;
3168 for (
size_t j = StoreSizeInBytes - 1;
j && !ShadowMask[i +
j]; --
j) {
3169 while (j <= StoreSizeInBytes / 2)
3170 StoreSizeInBytes /= 2;
3174 for (
size_t j = 0;
j < StoreSizeInBytes;
j++) {
3176 Val |= (
uint64_t)ShadowBytes[i + j] << (8 * j);
3178 Val = (Val << 8) | ShadowBytes[i + j];
3182 Value *Poison = IRB.
getIntN(StoreSizeInBytes * 8, Val);
3187 i += StoreSizeInBytes;
3194 copyToShadow(ShadowMask, ShadowBytes, 0, ShadowMask.
size(), IRB, ShadowBase);
3199 size_t Begin,
size_t End,
3202 size_t Done = Begin;
3203 for (
size_t i = Begin, j = Begin + 1; i <
End; i =
j++) {
3204 if (!ShadowMask[i]) {
3208 uint8_t Val = ShadowBytes[i];
3209 if (!AsanSetShadowFunc[Val])
3213 for (;
j <
End && ShadowMask[
j] && Val == ShadowBytes[
j]; ++
j) {
3216 if (j - i >= ASan.MaxInlinePoisoningSize) {
3217 copyToShadowInline(ShadowMask, ShadowBytes,
Done, i, IRB, ShadowBase);
3218 RTCI.createRuntimeCall(
3219 IRB, AsanSetShadowFunc[Val],
3220 {IRB.
CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i)),
3221 ConstantInt::get(IntptrTy, j - i)});
3226 copyToShadowInline(ShadowMask, ShadowBytes,
Done,
End, IRB, ShadowBase);
3234 for (
int i = 0;; i++, MaxSize *= 2)
3235 if (LocalStackSize <= MaxSize)
return i;
3239void FunctionStackPoisoner::copyArgsPassedByValToAllocas() {
3241 if (CopyInsertPoint == ASan.LocalDynamicShadow) {
3249 if (Arg.hasByValAttr()) {
3250 Type *Ty = Arg.getParamByValType();
3251 const Align Alignment =
3252 DL.getValueOrABITypeAlignment(Arg.getParamAlign(), Ty);
3256 (Arg.hasName() ? Arg.getName() :
"Arg" +
Twine(Arg.getArgNo())) +
3259 Arg.replaceAllUsesWith(AI);
3261 uint64_t AllocSize =
DL.getTypeAllocSize(Ty);
3262 IRB.
CreateMemCpy(AI, Alignment, &Arg, Alignment, AllocSize);
3270 Value *ValueIfFalse) {
3273 PHI->addIncoming(ValueIfFalse, CondBlock);
3275 PHI->addIncoming(ValueIfTrue, ThenBlock);
3279Value *FunctionStackPoisoner::createAllocaForLayout(
3288 nullptr,
"MyAlloca");
3297void FunctionStackPoisoner::createDynamicAllocasInitStorage() {
3300 DynamicAllocaLayout = IRB.
CreateAlloca(IntptrTy,
nullptr);
3305void FunctionStackPoisoner::processDynamicAllocas() {
3312 for (
const auto &APC : DynamicAllocaPoisonCallVec) {
3315 assert(ASan.isInterestingAlloca(*APC.AI));
3316 assert(!APC.AI->isStaticAlloca());
3319 poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
3326 createDynamicAllocasInitStorage();
3327 for (
auto &AI : DynamicAllocaVec)
3328 handleDynamicAllocaCall(AI);
3329 unpoisonDynamicAllocas();
3341 for (
Instruction *It = Start; It; It = It->getNextNonDebugInstruction()) {
3351 if (isa<AllocaInst>(It) || isa<CastInst>(It))
3353 if (
auto *Store = dyn_cast<StoreInst>(It)) {
3357 auto *Alloca = dyn_cast<AllocaInst>(Store->getPointerOperand());
3358 if (!Alloca || ASan.isInterestingAlloca(*Alloca))
3361 Value *Val = Store->getValueOperand();
3362 bool IsDirectArgInit = isa<Argument>(Val);
3363 bool IsArgInitViaCast =
3364 isa<CastInst>(Val) &&
3365 isa<Argument>(cast<CastInst>(Val)->getOperand(0)) &&
3368 Val == It->getPrevNonDebugInstruction();
3369 bool IsArgInit = IsDirectArgInit || IsArgInitViaCast;
3373 if (IsArgInitViaCast)
3374 InitInsts.
push_back(cast<Instruction>(Val));
3385void FunctionStackPoisoner::processStaticAllocas() {
3386 if (AllocaVec.
empty()) {
3391 int StackMallocIdx = -1;
3393 if (
auto SP =
F.getSubprogram())
3394 EntryDebugLocation =
3403 auto InsBeforeB = InsBefore->
getParent();
3404 assert(InsBeforeB == &
F.getEntryBlock());
3405 for (
auto *AI : StaticAllocasToMoveUp)
3416 ArgInitInst->moveBefore(InsBefore);
3419 if (LocalEscapeCall) LocalEscapeCall->
moveBefore(InsBefore);
3425 ASan.getAllocaSizeInBytes(*AI),
3436 uint64_t Granularity = 1ULL << Mapping.Scale;
3437 uint64_t MinHeaderSize = std::max((
uint64_t)ASan.LongSize / 2, Granularity);
3443 for (
auto &
Desc : SVD)
3447 for (
const auto &APC : StaticAllocaPoisonCallVec) {
3450 assert(ASan.isInterestingAlloca(*APC.AI));
3451 assert(APC.AI->isStaticAlloca());
3456 if (
const DILocation *LifetimeLoc = APC.InsBefore->getDebugLoc().get()) {
3457 if (LifetimeLoc->getFile() == FnLoc->getFile())
3458 if (
unsigned Line = LifetimeLoc->getLine())
3459 Desc.Line = std::min(
Desc.Line ?
Desc.Line : Line, Line);
3465 LLVM_DEBUG(
dbgs() << DescriptionString <<
" --- " <<
L.FrameSize <<
"\n");
3467 bool DoStackMalloc =
3477 DoDynamicAlloca &= !HasInlineAsm && !HasReturnsTwiceCall;
3478 DoStackMalloc &= !HasInlineAsm && !HasReturnsTwiceCall;
3480 Value *StaticAlloca =
3481 DoDynamicAlloca ? nullptr : createAllocaForLayout(IRB, L,
false);
3484 Value *LocalStackBase;
3485 Value *LocalStackBaseAlloca;
3488 if (DoStackMalloc) {
3489 LocalStackBaseAlloca =
3490 IRB.
CreateAlloca(IntptrTy,
nullptr,
"asan_local_stack_base");
3497 Constant *OptionDetectUseAfterReturn =
F.getParent()->getOrInsertGlobal(
3507 Value *FakeStackValue =
3508 RTCI.createRuntimeCall(IRBIf, AsanStackMallocFunc[StackMallocIdx],
3509 ConstantInt::get(IntptrTy, LocalStackSize));
3511 FakeStack = createPHI(IRB, UseAfterReturnIsEnabled, FakeStackValue, Term,
3512 ConstantInt::get(IntptrTy, 0));
3520 RTCI.createRuntimeCall(IRB, AsanStackMallocFunc[StackMallocIdx],
3521 ConstantInt::get(IntptrTy, LocalStackSize));
3523 Value *NoFakeStack =
3528 Value *AllocaValue =
3529 DoDynamicAlloca ? createAllocaForLayout(IRBIf, L,
true) : StaticAlloca;
3532 LocalStackBase = createPHI(IRB, NoFakeStack, AllocaValue, Term, FakeStack);
3533 IRB.
CreateStore(LocalStackBase, LocalStackBaseAlloca);
3538 FakeStack = ConstantInt::get(IntptrTy, 0);
3540 DoDynamicAlloca ? createAllocaForLayout(IRB, L,
true) : StaticAlloca;
3541 LocalStackBaseAlloca = LocalStackBase;
3547 Value *LocalStackBaseAllocaPtr =
3548 isa<PtrToIntInst>(LocalStackBaseAlloca)
3549 ? cast<PtrToIntInst>(LocalStackBaseAlloca)->getPointerOperand()
3550 : LocalStackBaseAlloca;
3551 assert(isa<AllocaInst>(LocalStackBaseAllocaPtr) &&
3552 "Variable descriptions relative to ASan stack base will be dropped");
3555 for (
const auto &
Desc : SVD) {
3560 IRB.
CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy,
Desc.Offset)),
3573 ConstantInt::get(IntptrTy, ASan.LongSize / 8)),
3583 ConstantInt::get(IntptrTy, 2 * ASan.LongSize / 8)),
3590 Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB);
3593 copyToShadow(ShadowAfterScope, ShadowAfterScope, IRB, ShadowBase);
3595 if (!StaticAllocaPoisonCallVec.empty()) {
3599 for (
const auto &APC : StaticAllocaPoisonCallVec) {
3602 size_t Begin =
Desc.Offset /
L.Granularity;
3603 size_t End = Begin + (APC.Size +
L.Granularity - 1) /
L.Granularity;
3606 copyToShadow(ShadowAfterScope,
3607 APC.DoPoison ? ShadowAfterScope : ShadowInScope, Begin,
End,
3621 if (DoStackMalloc) {
3622 assert(StackMallocIdx >= 0);
3639 if (ASan.MaxInlinePoisoningSize != 0 && StackMallocIdx <= 4) {
3641 ShadowAfterReturn.
resize(ClassSize /
L.Granularity,
3643 copyToShadow(ShadowAfterReturn, ShadowAfterReturn, IRBPoison,
3645 Value *SavedFlagPtrPtr = IRBPoison.CreateAdd(
3647 ConstantInt::get(IntptrTy, ClassSize - ASan.LongSize / 8));
3648 Value *SavedFlagPtr = IRBPoison.CreateLoad(
3649 IntptrTy, IRBPoison.CreateIntToPtr(SavedFlagPtrPtr, IntptrPtrTy));
3650 IRBPoison.CreateStore(
3652 IRBPoison.CreateIntToPtr(SavedFlagPtr, IRBPoison.getPtrTy()));
3655 RTCI.createRuntimeCall(
3656 IRBPoison, AsanStackFreeFunc[StackMallocIdx],
3657 {FakeStack, ConstantInt::get(IntptrTy, LocalStackSize)});
3661 copyToShadow(ShadowAfterScope, ShadowClean, IRBElse, ShadowBase);
3663 copyToShadow(ShadowAfterScope, ShadowClean, IRBRet, ShadowBase);
3668 for (
auto *AI : AllocaVec)
3676 Value *SizeArg = ConstantInt::get(IntptrTy,
Size);
3677 RTCI.createRuntimeCall(
3678 IRB, DoPoison ? AsanPoisonStackMemoryFunc : AsanUnpoisonStackMemoryFunc,
3679 {AddrArg, SizeArg});
3690void FunctionStackPoisoner::handleDynamicAllocaCall(
AllocaInst *AI) {
3698 Value *AllocaRzMask = ConstantInt::get(IntptrTy, AllocaRedzoneMask);
3704 const unsigned ElementSize =
3708 ConstantInt::get(IntptrTy, ElementSize));
3736 ConstantInt::get(IntptrTy, Alignment.
value()));
3739 RTCI.createRuntimeCall(IRB, AsanAllocaPoisonFunc, {NewAddress, OldSize});
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
static cl::opt< bool > ClUseStackSafety("stack-tagging-use-stack-safety", cl::Hidden, cl::init(true), cl::desc("Use Stack Safety analysis results"))
static void findStoresToUninstrumentedArgAllocas(AddressSanitizer &ASan, Instruction &InsBefore, SmallVectorImpl< Instruction * > &InitInsts)
Collect instructions in the entry block after InsBefore which initialize permanent storage for a func...
static void doInstrumentAddress(AddressSanitizer *Pass, Instruction *I, Instruction *InsertBefore, Value *Addr, MaybeAlign Alignment, unsigned Granularity, TypeSize TypeStoreSize, bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp, RuntimeCallInserter &RTCI)
static const uint64_t kDefaultShadowScale
const char kAMDGPUUnreachableName[]
constexpr size_t kAccessSizeIndexMask
static cl::opt< int > ClDebugMin("asan-debug-min", cl::desc("Debug min inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClUsePrivateAlias("asan-use-private-alias", cl::desc("Use private aliases for global variables"), cl::Hidden, cl::init(true))
static const uint64_t kPS_ShadowOffset64
static const uint64_t kFreeBSD_ShadowOffset32
constexpr size_t kIsWriteShift
static const uint64_t kSmallX86_64ShadowOffsetAlignMask
static bool isInterestingPointerSubtraction(Instruction *I)
const char kAMDGPUAddressSharedName[]
const char kAsanStackFreeNameTemplate[]
constexpr size_t kCompileKernelMask
static cl::opt< bool > ClForceDynamicShadow("asan-force-dynamic-shadow", cl::desc("Load shadow address into a local variable for each function"), cl::Hidden, cl::init(false))
const char kAsanOptionDetectUseAfterReturn[]
static cl::opt< std::string > ClMemoryAccessCallbackPrefix("asan-memory-access-callback-prefix", cl::desc("Prefix for memory access callbacks"), cl::Hidden, cl::init("__asan_"))
static const uint64_t kRISCV64_ShadowOffset64
static cl::opt< bool > ClInsertVersionCheck("asan-guard-against-version-mismatch", cl::desc("Guard against compiler/runtime version mismatch."), cl::Hidden, cl::init(true))
const char kAsanSetShadowPrefix[]
static cl::opt< AsanDtorKind > ClOverrideDestructorKind("asan-destructor-kind", cl::desc("Sets the ASan destructor kind. The default is to use the value " "provided to the pass constructor"), cl::values(clEnumValN(AsanDtorKind::None, "none", "No destructors"), clEnumValN(AsanDtorKind::Global, "global", "Use global destructors")), cl::init(AsanDtorKind::Invalid), cl::Hidden)
static cl::opt< bool > ClInstrumentWrites("asan-instrument-writes", cl::desc("instrument write instructions"), cl::Hidden, cl::init(true))
static uint64_t GetCtorAndDtorPriority(Triple &TargetTriple)
const char kAsanStackMallocNameTemplate[]
static cl::opt< bool > ClInstrumentByval("asan-instrument-byval", cl::desc("instrument byval call arguments"), cl::Hidden, cl::init(true))
const char kAsanInitName[]
static cl::opt< bool > ClGlobals("asan-globals", cl::desc("Handle global objects"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClRedzoneByvalArgs("asan-redzone-byval-args", cl::desc("Create redzones for byval " "arguments (extra copy " "required)"), cl::Hidden, cl::init(true))
static const uint64_t kWindowsShadowOffset64
static const uint64_t kEmscriptenShadowOffset
const char kAsanGenPrefix[]
constexpr size_t kIsWriteMask
static uint64_t getRedzoneSizeForScale(int MappingScale)
static const uint64_t kDefaultShadowOffset64
static cl::opt< bool > ClOptimizeCallbacks("asan-optimize-callbacks", cl::desc("Optimize callbacks"), cl::Hidden, cl::init(false))
const char kAsanUnregisterGlobalsName[]
static const uint64_t kAsanCtorAndDtorPriority
const char kAsanUnpoisonGlobalsName[]
static cl::opt< bool > ClWithIfuncSuppressRemat("asan-with-ifunc-suppress-remat", cl::desc("Suppress rematerialization of dynamic shadow address by passing " "it through inline asm in prologue."), cl::Hidden, cl::init(true))
static cl::opt< int > ClDebugStack("asan-debug-stack", cl::desc("debug stack"), cl::Hidden, cl::init(0))
const char kAsanUnregisterElfGlobalsName[]
static bool isUnsupportedAMDGPUAddrspace(Value *Addr)
const char kAsanRegisterImageGlobalsName[]
static cl::opt< bool > ClOpt("asan-opt", cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true))
static const uint64_t kAllocaRzSize
const char kODRGenPrefix[]
static const uint64_t kSystemZ_ShadowOffset64
static const uint64_t kDefaultShadowOffset32
const char kAsanShadowMemoryDynamicAddress[]
static cl::opt< bool > ClUseOdrIndicator("asan-use-odr-indicator", cl::desc("Use odr indicators to improve ODR reporting"), cl::Hidden, cl::init(true))
static bool GlobalWasGeneratedByCompiler(GlobalVariable *G)
Check if G has been created by a trusted compiler pass.
const char kAsanStackMallocAlwaysNameTemplate[]
static cl::opt< bool > ClInvalidPointerCmp("asan-detect-invalid-pointer-cmp", cl::desc("Instrument <, <=, >, >= with pointer operands"), cl::Hidden, cl::init(false))
static const uint64_t kAsanEmscriptenCtorAndDtorPriority
static cl::opt< int > ClInstrumentationWithCallsThreshold("asan-instrumentation-with-call-threshold", cl::desc("If the function being instrumented contains more than " "this number of memory accesses, use callbacks instead of " "inline checks (-1 means never use callbacks)."), cl::Hidden, cl::init(7000))
static cl::opt< int > ClDebugMax("asan-debug-max", cl::desc("Debug max inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClInvalidPointerSub("asan-detect-invalid-pointer-sub", cl::desc("Instrument - operations with pointer operands"), cl::Hidden, cl::init(false))
static const uint64_t kFreeBSD_ShadowOffset64
static cl::opt< uint32_t > ClForceExperiment("asan-force-experiment", cl::desc("Force optimization experiment (for testing)"), cl::Hidden, cl::init(0))
const char kSanCovGenPrefix[]
static const uint64_t kFreeBSDKasan_ShadowOffset64
const char kAsanModuleDtorName[]
static const uint64_t kDynamicShadowSentinel
static bool isInterestingPointerComparison(Instruction *I)
static cl::opt< bool > ClStack("asan-stack", cl::desc("Handle stack memory"), cl::Hidden, cl::init(true))
static const uint64_t kMIPS64_ShadowOffset64
static const uint64_t kLinuxKasan_ShadowOffset64
static int StackMallocSizeClass(uint64_t LocalStackSize)
static cl::opt< uint32_t > ClMaxInlinePoisoningSize("asan-max-inline-poisoning-size", cl::desc("Inline shadow poisoning for blocks up to the given size in bytes."), cl::Hidden, cl::init(64))
static cl::opt< bool > ClInstrumentAtomics("asan-instrument-atomics", cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClUseAfterScope("asan-use-after-scope", cl::desc("Check stack-use-after-scope"), cl::Hidden, cl::init(false))
constexpr size_t kAccessSizeIndexShift
static cl::opt< int > ClMappingScale("asan-mapping-scale", cl::desc("scale of asan shadow mapping"), cl::Hidden, cl::init(0))
const char kAsanPoisonStackMemoryName[]
static cl::opt< bool > ClEnableKasan("asan-kernel", cl::desc("Enable KernelAddressSanitizer instrumentation"), cl::Hidden, cl::init(false))
static cl::opt< std::string > ClDebugFunc("asan-debug-func", cl::Hidden, cl::desc("Debug func"))
static cl::opt< bool > ClUseGlobalsGC("asan-globals-live-support", cl::desc("Use linker features to support dead " "code stripping of globals"), cl::Hidden, cl::init(true))
static const size_t kNumberOfAccessSizes
const char kAsanUnpoisonStackMemoryName[]
static const uint64_t kLoongArch64_ShadowOffset64
const char kAsanRegisterGlobalsName[]
static cl::opt< bool > ClInstrumentDynamicAllocas("asan-instrument-dynamic-allocas", cl::desc("instrument dynamic allocas"), cl::Hidden, cl::init(true))
const char kAsanModuleCtorName[]
const char kAsanGlobalsRegisteredFlagName[]
static const size_t kMaxStackMallocSize
static cl::opt< bool > ClRecover("asan-recover", cl::desc("Enable recovery mode (continue-after-error)."), cl::Hidden, cl::init(false))
static cl::opt< bool > ClOptSameTemp("asan-opt-same-temp", cl::desc("Instrument the same temp just once"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClDynamicAllocaStack("asan-stack-dynamic-alloca", cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClOptStack("asan-opt-stack", cl::desc("Don't instrument scalar stack variables"), cl::Hidden, cl::init(false))
static const uint64_t kMIPS_ShadowOffsetN32
const char kAsanUnregisterImageGlobalsName[]
static cl::opt< AsanDetectStackUseAfterReturnMode > ClUseAfterReturn("asan-use-after-return", cl::desc("Sets the mode of detection for stack-use-after-return."), cl::values(clEnumValN(AsanDetectStackUseAfterReturnMode::Never, "never", "Never detect stack use after return."), clEnumValN(AsanDetectStackUseAfterReturnMode::Runtime, "runtime", "Detect stack use after return if " "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."), clEnumValN(AsanDetectStackUseAfterReturnMode::Always, "always", "Always detect stack use after return.")), cl::Hidden, cl::init(AsanDetectStackUseAfterReturnMode::Runtime))
static cl::opt< bool > ClOptGlobals("asan-opt-globals", cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true))
static const uintptr_t kCurrentStackFrameMagic
static ShadowMapping getShadowMapping(const Triple &TargetTriple, int LongSize, bool IsKasan)
static const uint64_t kPPC64_ShadowOffset64
static cl::opt< AsanCtorKind > ClConstructorKind("asan-constructor-kind", cl::desc("Sets the ASan constructor kind"), cl::values(clEnumValN(AsanCtorKind::None, "none", "No constructors"), clEnumValN(AsanCtorKind::Global, "global", "Use global constructors")), cl::init(AsanCtorKind::Global), cl::Hidden)
static const int kMaxAsanStackMallocSizeClass
static const uint64_t kMIPS32_ShadowOffset32
static cl::opt< bool > ClAlwaysSlowPath("asan-always-slow-path", cl::desc("use instrumentation with slow path for all accesses"), cl::Hidden, cl::init(false))
static const uint64_t kNetBSD_ShadowOffset32
static const uint64_t kFreeBSDAArch64_ShadowOffset64
static const uint64_t kSmallX86_64ShadowOffsetBase
static cl::opt< bool > ClInitializers("asan-initialization-order", cl::desc("Handle C++ initializer order"), cl::Hidden, cl::init(true))
static const uint64_t kNetBSD_ShadowOffset64
static cl::opt< unsigned > ClRealignStack("asan-realign-stack", cl::desc("Realign stack to the value of this flag (power of two)"), cl::Hidden, cl::init(32))
static const uint64_t kWindowsShadowOffset32
static cl::opt< bool > ClInstrumentReads("asan-instrument-reads", cl::desc("instrument read instructions"), cl::Hidden, cl::init(true))
static size_t TypeStoreSizeToSizeIndex(uint32_t TypeSize)
const char kAsanAllocaPoison[]
constexpr size_t kCompileKernelShift
static cl::opt< bool > ClWithIfunc("asan-with-ifunc", cl::desc("Access dynamic shadow through an ifunc global on " "platforms that support this"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClKasanMemIntrinCallbackPrefix("asan-kernel-mem-intrinsic-prefix", cl::desc("Use prefix for memory intrinsics in KASAN mode"), cl::Hidden, cl::init(false))
const char kAsanVersionCheckNamePrefix[]
const char kAMDGPUAddressPrivateName[]
static const uint64_t kNetBSDKasan_ShadowOffset64
const char kAMDGPUBallotName[]
const char kAsanRegisterElfGlobalsName[]
static cl::opt< uint64_t > ClMappingOffset("asan-mapping-offset", cl::desc("offset of asan shadow mapping [EXPERIMENTAL]"), cl::Hidden, cl::init(0))
const char kAsanReportErrorTemplate[]
static cl::opt< bool > ClWithComdat("asan-with-comdat", cl::desc("Place ASan constructors in comdat sections"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClSkipPromotableAllocas("asan-skip-promotable-allocas", cl::desc("Do not instrument promotable allocas"), cl::Hidden, cl::init(true))
static cl::opt< int > ClMaxInsnsToInstrumentPerBB("asan-max-ins-per-bb", cl::init(10000), cl::desc("maximal number of instructions to instrument in any given BB"), cl::Hidden)
static const uintptr_t kRetiredStackFrameMagic
static cl::opt< bool > ClUseStackSafety("asan-use-stack-safety", cl::Hidden, cl::init(true), cl::Hidden, cl::desc("Use Stack Safety analysis results"), cl::Optional)
const char kAsanPoisonGlobalsName[]
const char kAsanHandleNoReturnName[]
static const size_t kMinStackMallocSize
static cl::opt< int > ClDebug("asan-debug", cl::desc("debug"), cl::Hidden, cl::init(0))
const char kAsanAllocasUnpoison[]
static const uint64_t kAArch64_ShadowOffset64
static cl::opt< bool > ClInvalidPointerPairs("asan-detect-invalid-pointer-pair", cl::desc("Instrument <, <=, >, >=, - with pointer operands"), cl::Hidden, cl::init(false))
This file contains the simple types necessary to represent the attributes associated with functions a...
static bool isPointerOperand(Value *I, User *U)
static const Function * getParent(const Value *V)
static GCRegistry::Add< StatepointGC > D("statepoint-example", "an example strategy for statepoint")
#define clEnumValN(ENUMVAL, FLAGNAME, DESC)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
This file builds on the ADT/GraphTraits.h file to build generic depth first graph iterator.
static bool runOnFunction(Function &F, bool PostInlining)
This is the interface for a simple mod/ref and alias analysis over globals.
Module.h This file contains the declarations for the Module class.
FunctionAnalysisManager FAM
ModuleAnalysisManager MAM
const SmallVectorImpl< MachineOperand > & Cond
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
This defines the Use class.
uint64_t getZExtValue() const
Get zero extended value.
int64_t getSExtValue() const
Get sign extended value.
AddressSanitizerPass(const AddressSanitizerOptions &Options, bool UseGlobalGC=true, bool UseOdrIndicator=true, AsanDtorKind DestructorKind=AsanDtorKind::Global, AsanCtorKind ConstructorKind=AsanCtorKind::Global)
PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM)
void printPipeline(raw_ostream &OS, function_ref< StringRef(StringRef)> MapClassName2PassName)
an instruction to allocate memory on the stack
bool isSwiftError() const
Return true if this alloca is used as a swifterror argument to a call.
bool isStaticAlloca() const
Return true if this alloca is in the entry block of the function and is a constant size.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
PointerType * getType() const
Overload to return most specific pointer type.
Type * getAllocatedType() const
Return the type that is being allocated by the instruction.
bool isUsedWithInAlloca() const
Return true if this alloca is used as an inalloca argument to a call.
std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
void setAlignment(Align Align)
const Value * getArraySize() const
Get the number of elements allocated.
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
static ArrayType * get(Type *ElementType, uint64_t NumElements)
This static method is the primary way to construct an ArrayType.
An instruction that atomically checks whether a specified value is in a memory location,...
an instruction that atomically reads a memory location, combines it with another value,...
AttributeList addParamAttribute(LLVMContext &C, unsigned ArgNo, Attribute::AttrKind Kind) const
Add an argument attribute to the list.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Function * getParent() const
Return the enclosing method, or null if none.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
const CallInst * getTerminatingMustTailCall() const
Returns the call instruction marked 'musttail' prior to the terminating return instruction of this ba...
const Module * getModule() const
Return the module owning the function this basic block belongs to, or nullptr if the function does no...
Conditional or Unconditional Branch instruction.
static BranchInst * Create(BasicBlock *IfTrue, BasicBlock::iterator InsertBefore)
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
bool isInlineAsm() const
Check if this call is an inline asm statement.
Value * getArgOperand(unsigned i) const
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
bool doesNotReturn() const
Determine if the call cannot return.
unsigned arg_size() const
static CallBase * addOperandBundle(CallBase *CB, uint32_t ID, OperandBundleDef OB, Instruction *InsertPt=nullptr)
Create a clone of CB with operand bundle OB added.
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr, BasicBlock::iterator InsertBefore)
@ Largest
The linker will choose the largest COMDAT.
@ SameSize
The data referenced by the COMDAT must be the same size.
@ Any
The linker may choose any COMDAT.
@ NoDeduplicate
No deduplication is performed.
@ ExactMatch
The data referenced by the COMDAT must be the same.
ConstantArray - Constant Array Declarations.
static Constant * get(ArrayType *T, ArrayRef< Constant * > V)
static Constant * getIntToPtr(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList, bool InBounds=false, std::optional< ConstantRange > InRange=std::nullopt, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
static bool isValueValidForType(Type *Ty, uint64_t V)
This static method returns true if the type Ty is big enough to represent the value V.
static ConstantPointerNull * get(PointerType *T)
Static factory methods - Return objects of the specified value.
static Constant * get(StructType *T, ArrayRef< Constant * > V)
This is an important base class in LLVM.
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
Constant * getAggregateElement(unsigned Elt) const
For aggregates (struct/array/vector) return the constant that corresponds to the specified element if...
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
DILocation * get() const
Get the underlying DILocation.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
static FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
const BasicBlock & front() const
static Function * createWithDefaultAttr(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
Creates a function with some attributes recorded in llvm.module.flags applied.
bool hasPersonalityFn() const
Check whether this function has a personality function.
Constant * getPersonalityFn() const
Get the personality function associated with this function.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
const Constant * getAliasee() const
static GlobalAlias * create(Type *Ty, unsigned AddressSpace, LinkageTypes Linkage, const Twine &Name, Constant *Aliasee, Module *Parent)
If a parent module is specified, the alias is automatically inserted into the end of the specified mo...
void setAlignment(Align Align)
Sets the alignment attribute of the GlobalObject.
void copyMetadata(const GlobalObject *Src, unsigned Offset)
Copy metadata from Src, adjusting offsets by Offset.
void setComdat(Comdat *C)
void setSection(StringRef S)
Change the section for this global.
VisibilityTypes getVisibility() const
void setUnnamedAddr(UnnamedAddr Val)
bool hasLocalLinkage() const
static StringRef dropLLVMManglingEscape(StringRef Name)
If the given string begins with the GlobalValue name mangling escape character '\1',...
ThreadLocalMode getThreadLocalMode() const
Module * getParent()
Get the module that this global value is contained inside of...
@ HiddenVisibility
The GV is hidden.
void setVisibility(VisibilityTypes V)
LinkageTypes
An enumeration for the kinds of linkage for global values.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ CommonLinkage
Tentative definitions.
@ InternalLinkage
Rename collisions when linking (static functions).
@ AvailableExternallyLinkage
Available for inspection, not emission.
@ ExternalWeakLinkage
ExternalWeak linkage description.
DLLStorageClassTypes getDLLStorageClass() const
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void copyAttributesFrom(const GlobalVariable *Src)
copyAttributesFrom - copy all additional attributes (those not needed to create a GlobalVariable) fro...
Analysis pass providing a never-invalidated alias analysis result.
This instruction compares its operands according to the predicate given to the constructor.
Common base class shared among various IRBuilders.
AllocaInst * CreateAlloca(Type *Ty, unsigned AddrSpace, Value *ArraySize=nullptr, const Twine &Name="")
IntegerType * getInt1Ty()
Fetch the type representing a single bit.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", bool IsInBounds=false)
Value * CreateExtractElement(Value *Vec, Value *Idx, const Twine &Name="")
LoadInst * CreateAlignedLoad(Type *Ty, Value *Ptr, MaybeAlign Align, const char *Name)
Value * CreatePointerCast(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateICmpSGE(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateSelect(Value *C, Value *True, Value *False, const Twine &Name="", Instruction *MDFrom=nullptr)
BasicBlock::iterator GetInsertPoint() const
Value * CreateIntToPtr(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateTypeSize(Type *DstType, TypeSize Size)
Create an expression which evaluates to the number of units in Size at runtime.
Value * CreateLShr(Value *LHS, Value *RHS, const Twine &Name="", bool isExact=false)
IntegerType * getInt32Ty()
Fetch the type representing a 32-bit integer.
BasicBlock * GetInsertBlock() const
IntegerType * getInt64Ty()
Fetch the type representing a 64-bit integer.
Value * CreateICmpNE(Value *LHS, Value *RHS, const Twine &Name="")
ConstantInt * getInt32(uint32_t C)
Get a constant 32-bit value.
PHINode * CreatePHI(Type *Ty, unsigned NumReservedValues, const Twine &Name="")
Value * CreateNot(Value *V, const Twine &Name="")
Value * CreateICmpEQ(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateSub(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
ConstantInt * getIntN(unsigned N, uint64_t C)
Get a constant N-bit value, zero extended or truncated from a 64-bit value.
LoadInst * CreateLoad(Type *Ty, Value *Ptr, const char *Name)
Provided to resolve 'CreateLoad(Ty, Ptr, "...")' correctly, instead of converting the string to 'bool...
Value * CreateAnd(Value *LHS, Value *RHS, const Twine &Name="")
StoreInst * CreateStore(Value *Val, Value *Ptr, bool isVolatile=false)
Value * CreateAdd(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
Value * CreatePtrToInt(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateIsNotNull(Value *Arg, const Twine &Name="")
Return a boolean value testing if Arg != 0.
Value * CreateOr(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateIntCast(Value *V, Type *DestTy, bool isSigned, const Twine &Name="")
void SetInsertPoint(BasicBlock *TheBB)
This specifies that created instructions should be appended to the end of the specified block.
Type * getVoidTy()
Fetch the type representing void.
StoreInst * CreateAlignedStore(Value *Val, Value *Ptr, MaybeAlign Align, bool isVolatile=false)
CallInst * CreateCall(FunctionType *FTy, Value *Callee, ArrayRef< Value * > Args=std::nullopt, const Twine &Name="", MDNode *FPMathTag=nullptr)
Value * CreateGEP(Type *Ty, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &Name="", bool IsInBounds=false)
IntegerType * getInt8Ty()
Fetch the type representing an 8-bit integer.
CallInst * CreateMemCpy(Value *Dst, MaybeAlign DstAlign, Value *Src, MaybeAlign SrcAlign, uint64_t Size, bool isVolatile=false, MDNode *TBAATag=nullptr, MDNode *TBAAStructTag=nullptr, MDNode *ScopeTag=nullptr, MDNode *NoAliasTag=nullptr)
Create and insert a memcpy between the specified pointers.
Value * CreateAddrSpaceCast(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateMul(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
static InlineAsm * get(FunctionType *Ty, StringRef AsmString, StringRef Constraints, bool hasSideEffects, bool isAlignStack=false, AsmDialect asmDialect=AD_ATT, bool canThrow=false)
InlineAsm::get - Return the specified uniqued inline asm string.
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
Base class for instruction visitors.
RetTy visitCallBase(CallBase &I)
RetTy visitCleanupReturnInst(CleanupReturnInst &I)
RetTy visitIntrinsicInst(IntrinsicInst &I)
void visit(Iterator Start, Iterator End)
RetTy visitReturnInst(ReturnInst &I)
RetTy visitAllocaInst(AllocaInst &I)
RetTy visitResumeInst(ResumeInst &I)
bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
const Module * getModule() const
Return the module owning the function this instruction belongs to or nullptr it the function does not...
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
bool isEHPad() const
Return true if the instruction is a variety of EH-block.
const BasicBlock * getParent() const
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
const Instruction * getNextNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the next non-debug instruction in the same basic block as 'this',...
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
void moveBefore(Instruction *MovePos)
Unlink this instruction from its current basic block and insert it into the basic block that MovePos ...
static IntegerType * get(LLVMContext &C, unsigned NumBits)
This static method is the primary way of constructing an IntegerType.
A wrapper class for inspecting calls to intrinsic functions.
Intrinsic::ID getIntrinsicID() const
Return the intrinsic ID of this intrinsic.
This is an important class for using LLVM in a threaded context.
void emitError(uint64_t LocCookie, const Twine &ErrorStr)
emitError - Emit an error message to the currently installed error handler with optional location inf...
An instruction for reading from memory.
static Error ParseSectionSpecifier(StringRef Spec, StringRef &Segment, StringRef &Section, unsigned &TAA, bool &TAAParsed, unsigned &StubSize)
Parse the section specifier indicated by "Spec".
MDNode * createBranchWeights(uint32_t TrueWeight, uint32_t FalseWeight)
Return metadata containing two branch weights.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
This is the common base class for memset/memcpy/memmove.
A Module instance is used to store all the information related to an LLVM module.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
Evaluate the size and offset of an object pointed to by a Value* statically.
SizeOffsetAPInt compute(Value *V)
A container for an operand bundle being viewed as a set of values rather than a set of uses.
Pass interface - Implemented by all 'passes'.
static PointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the default address space (address sp...
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
void abandon()
Mark an analysis as abandoned.
Resume the propagation of an exception.
Return a value (possibly void), from a function.
static ReturnInst * Create(LLVMContext &C, Value *retVal, BasicBlock::iterator InsertBefore)
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
This pass performs the global (interprocedural) stack safety analysis (new pass manager).
bool stackAccessIsSafe(const Instruction &I) const
bool isSafe(const AllocaInst &AI) const
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
constexpr bool empty() const
empty - Check if the string is empty.
constexpr const char * data() const
data - Get a pointer to the start of the string (which may not be null terminated).
Class to represent struct types.
static StructType * get(LLVMContext &Context, ArrayRef< Type * > Elements, bool isPacked=false)
This static method is the primary way to create a literal StructType.
Analysis pass providing the TargetLibraryInfo.
Provides information about what library functions are available for the current target.
AttributeList getAttrList(LLVMContext *C, ArrayRef< unsigned > ArgNos, bool Signed, bool Ret=false, AttributeList AL=AttributeList()) const
TinyPtrVector - This class is specialized for cases where there are normally 0 or 1 element in a vect...
Triple - Helper class for working with autoconf configuration names.
bool isAndroidVersionLT(unsigned Major) const
bool isThumb() const
Tests whether the target is Thumb (little and big endian).
bool isDriverKit() const
Is this an Apple DriverKit triple.
bool isAndroid() const
Tests whether the target is Android.
bool isMIPS64() const
Tests whether the target is MIPS 64-bit (little and big endian).
ArchType getArch() const
Get the parsed architecture type of this triple.
bool isLoongArch64() const
Tests whether the target is 64-bit LoongArch.
EnvironmentType getEnvironment() const
Get the parsed environment type of this triple.
bool isMIPS32() const
Tests whether the target is MIPS 32-bit (little and big endian).
bool isOSWindows() const
Tests whether the OS is Windows.
bool isARM() const
Tests whether the target is ARM (little and big endian).
bool isOSLinux() const
Tests whether the OS is Linux.
bool isMacOSX() const
Is this a Mac OS X triple.
bool isOSEmscripten() const
Tests whether the OS is Emscripten.
bool isWatchOS() const
Is this an Apple watchOS triple.
bool isiOS() const
Is this an iOS triple.
bool isPS() const
Tests whether the target is the PS4 or PS5 platform.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
static IntegerType * getIntNTy(LLVMContext &C, unsigned N)
static Type * getVoidTy(LLVMContext &C)
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
static IntegerType * getInt32Ty(LLVMContext &C)
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
LLVMContext & getContext() const
All values hold a context through their type.
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
static VectorType * get(Type *ElementType, ElementCount EC)
This static method is the primary way to construct an VectorType.
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
An efficient, type-erasing, non-owning reference to a callable.
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
This file contains the declaration of the Comdat class, which represents a single COMDAT in LLVM.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
@ C
The default llvm calling convention, compatible with C.
Function * getDeclaration(Module *M, ID id, ArrayRef< Type * > Tys=std::nullopt)
Create or insert an LLVM Function declaration for an intrinsic, and return it.
@ S_CSTRING_LITERALS
S_CSTRING_LITERALS - Section with literal C strings.
@ OB
OB - OneByte - Set if this instruction has a one byte opcode.
ValuesClass values(OptsTy... Options)
Helper to build a ValuesClass by forwarding a variable number of arguments as an initializer list to ...
initializer< Ty > init(const Ty &Val)
Linkage
Describes symbol linkage. This can be used to resolve definition clashes.
uint64_t getAllocaSizeInBytes(const AllocaInst &AI)
This is an optimization pass for GlobalISel generic memory operations.
void ReplaceInstWithInst(BasicBlock *BB, BasicBlock::iterator &BI, Instruction *I)
Replace the instruction specified by BI with the instruction specified by I.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
SmallVector< uint8_t, 64 > GetShadowBytesAfterScope(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
AllocaInst * findAllocaForValue(Value *V, bool OffsetZero=false)
Returns unique alloca where the value comes from, or nullptr.
Function * createSanitizerCtor(Module &M, StringRef CtorName)
Creates sanitizer constructor function.
AsanDetectStackUseAfterReturnMode
Mode of ASan detect stack use after return.
@ Always
Always detect stack use after return.
@ Never
Never detect stack use after return.
@ Runtime
Detect stack use after return if not disabled runtime with (ASAN_OPTIONS=detect_stack_use_after_retur...
GlobalVariable * createPrivateGlobalForString(Module &M, StringRef Str, bool AllowMerging, const char *NamePrefix="")
DenseMap< BasicBlock *, ColorVector > colorEHFunclets(Function &F)
If an EH funclet personality is in use (see isFuncletEHPersonality), this will recompute which blocks...
const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=6)
This method strips off any GEP address adjustments and pointer casts from the specified value,...
bool isAllocaPromotable(const AllocaInst *AI)
Return true if this alloca is legal for promotion.
bool isScopedEHPersonality(EHPersonality Pers)
Returns true if this personality uses scope-style EH IR instructions: catchswitch,...
SmallString< 64 > ComputeASanStackFrameDescription(const SmallVectorImpl< ASanStackVariableDescription > &Vars)
SmallVector< uint8_t, 64 > GetShadowBytes(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
int countr_zero(T Val)
Count number of 0's from the least significant bit to the most stopping at the first 1.
FunctionCallee declareSanitizerInitFunction(Module &M, StringRef InitName, ArrayRef< Type * > InitArgTypes, bool Weak=false)
std::string getUniqueModuleId(Module *M)
Produce a unique identifier for this module by taking the MD5 sum of the names of the module's strong...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
std::pair< Function *, FunctionCallee > createSanitizerCtorAndInitFunctions(Module &M, StringRef CtorName, StringRef InitName, ArrayRef< Type * > InitArgTypes, ArrayRef< Value * > InitArgs, StringRef VersionCheckName=StringRef(), bool Weak=false)
Creates sanitizer constructor function, and calls sanitizer's init function from it.
decltype(auto) get(const PointerIntPair< PointerTy, IntBits, IntType, PtrTraits, Info > &Pair)
void SplitBlockAndInsertIfThenElse(Value *Cond, BasicBlock::iterator SplitBefore, Instruction **ThenTerm, Instruction **ElseTerm, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr)
SplitBlockAndInsertIfThenElse is similar to SplitBlockAndInsertIfThen, but also creates the ElseBlock...
void SplitBlockAndInsertForEachLane(ElementCount EC, Type *IndexTy, Instruction *InsertBefore, std::function< void(IRBuilderBase &, Value *)> Func)
Utility function for performing a given action on each lane of a vector with EC elements.
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
AsanDtorKind
Types of ASan module destructors supported.
@ None
Do not emit any destructors for ASan.
ASanStackFrameLayout ComputeASanStackFrameLayout(SmallVectorImpl< ASanStackVariableDescription > &Vars, uint64_t Granularity, uint64_t MinHeaderSize)
void cantFail(Error Err, const char *Msg=nullptr)
Report a fatal error if Err is a failure value.
void appendToCompilerUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.compiler.used list.
static const int kAsanStackUseAfterReturnMagic
void setGlobalVariableLargeSection(const Triple &TargetTriple, GlobalVariable &GV)
@ Dynamic
Denotes mode unknown at compile time.
void appendToGlobalCtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Append F to the list of global ctors of module M with the given Priority.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
iterator_range< df_iterator< T > > depth_first(const T &G)
Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
AsanCtorKind
Types of ASan module constructors supported.
void maybeMarkSanitizerLibraryCallNoBuiltin(CallInst *CI, const TargetLibraryInfo *TLI)
Given a CallInst, check if it calls a string function known to CodeGen, and mark it with NoBuiltin if...
void appendToUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.used list.
void appendToGlobalDtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Same as appendToGlobalCtors(), but for global dtors.
void getAddressSanitizerParams(const Triple &TargetTriple, int LongSize, bool IsKasan, uint64_t *ShadowBase, int *MappingScale, bool *OrShadowOffset)
std::string demangle(std::string_view MangledName)
Attempt to demangle a string using different demangling schemes.
bool replaceDbgDeclare(Value *Address, Value *NewAddress, DIBuilder &Builder, uint8_t DIExprFlags, int Offset)
Replaces llvm.dbg.declare instruction when the address it describes is replaced with a new value.
ASanAccessInfo(int32_t Packed)
AsanDetectStackUseAfterReturnMode UseAfterReturn
int InstrumentationWithCallsThreshold
uint32_t MaxInlinePoisoningSize
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Description of the encoding of one expression Op.
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
Align valueOrOne() const
For convenience, returns a valid alignment or 1 if undefined.
Various options to control the behavior of getObjectSize.
bool RoundToAlign
Whether to round the result up to the alignment of allocas, byval arguments, and global variables.
A CRTP mix-in to automatically provide informational APIs needed for passes.
SizeOffsetAPInt - Used by ObjectSizeOffsetVisitor, which works with APInts.