1 //===-- AddressSanitizer.cpp - memory error detector ------------*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file is a part of AddressSanitizer, an address sanity checker.
11 // Details of the algorithm:
12 // http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
14 //===----------------------------------------------------------------------===//
16 #define DEBUG_TYPE "asan"
18 #include "llvm/Transforms/Instrumentation.h"
19 #include "llvm/ADT/ArrayRef.h"
20 #include "llvm/ADT/DenseMap.h"
21 #include "llvm/ADT/DepthFirstIterator.h"
22 #include "llvm/ADT/OwningPtr.h"
23 #include "llvm/ADT/SmallSet.h"
24 #include "llvm/ADT/SmallString.h"
25 #include "llvm/ADT/SmallVector.h"
26 #include "llvm/ADT/Statistic.h"
27 #include "llvm/ADT/StringExtras.h"
28 #include "llvm/ADT/Triple.h"
29 #include "llvm/DIBuilder.h"
30 #include "llvm/IR/DataLayout.h"
31 #include "llvm/IR/Function.h"
32 #include "llvm/IR/IRBuilder.h"
33 #include "llvm/IR/InlineAsm.h"
34 #include "llvm/IR/IntrinsicInst.h"
35 #include "llvm/IR/LLVMContext.h"
36 #include "llvm/IR/Module.h"
37 #include "llvm/IR/Type.h"
38 #include "llvm/InstVisitor.h"
39 #include "llvm/Support/CallSite.h"
40 #include "llvm/Support/CommandLine.h"
41 #include "llvm/Support/DataTypes.h"
42 #include "llvm/Support/Debug.h"
43 #include "llvm/Support/Endian.h"
44 #include "llvm/Support/raw_ostream.h"
45 #include "llvm/Support/system_error.h"
46 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
47 #include "llvm/Transforms/Utils/Cloning.h"
48 #include "llvm/Transforms/Utils/Local.h"
49 #include "llvm/Transforms/Utils/ModuleUtils.h"
50 #include "llvm/Transforms/Utils/SpecialCaseList.h"
56 static const uint64_t kDefaultShadowScale = 3;
57 static const uint64_t kDefaultShadowOffset32 = 1ULL << 29;
58 static const uint64_t kDefaultShadowOffset64 = 1ULL << 44;
59 static const uint64_t kDefaultShort64bitShadowOffset = 0x7FFF8000; // < 2G.
60 static const uint64_t kPPC64_ShadowOffset64 = 1ULL << 41;
61 static const uint64_t kMIPS32_ShadowOffset32 = 0x0aaa8000;
63 static const size_t kMinStackMallocSize = 1 << 6; // 64B
64 static const size_t kMaxStackMallocSize = 1 << 16; // 64K
65 static const uintptr_t kCurrentStackFrameMagic = 0x41B58AB3;
66 static const uintptr_t kRetiredStackFrameMagic = 0x45E0360E;
68 static const char *const kAsanModuleCtorName = "asan.module_ctor";
69 static const char *const kAsanModuleDtorName = "asan.module_dtor";
70 static const int kAsanCtorAndCtorPriority = 1;
71 static const char *const kAsanReportErrorTemplate = "__asan_report_";
72 static const char *const kAsanReportLoadN = "__asan_report_load_n";
73 static const char *const kAsanReportStoreN = "__asan_report_store_n";
74 static const char *const kAsanRegisterGlobalsName = "__asan_register_globals";
75 static const char *const kAsanUnregisterGlobalsName =
76 "__asan_unregister_globals";
77 static const char *const kAsanPoisonGlobalsName = "__asan_before_dynamic_init";
78 static const char *const kAsanUnpoisonGlobalsName = "__asan_after_dynamic_init";
79 static const char *const kAsanInitName = "__asan_init_v3";
80 static const char *const kAsanCovName = "__sanitizer_cov";
81 static const char *const kAsanHandleNoReturnName = "__asan_handle_no_return";
82 static const char *const kAsanMappingOffsetName = "__asan_mapping_offset";
83 static const char *const kAsanMappingScaleName = "__asan_mapping_scale";
84 static const int kMaxAsanStackMallocSizeClass = 10;
85 static const char *const kAsanStackMallocNameTemplate = "__asan_stack_malloc_";
86 static const char *const kAsanStackFreeNameTemplate = "__asan_stack_free_";
87 static const char *const kAsanGenPrefix = "__asan_gen_";
88 static const char *const kAsanPoisonStackMemoryName =
89 "__asan_poison_stack_memory";
90 static const char *const kAsanUnpoisonStackMemoryName =
91 "__asan_unpoison_stack_memory";
93 static const char *const kAsanOptionDetectUAR =
94 "__asan_option_detect_stack_use_after_return";
96 // These constants must match the definitions in the run-time library.
97 static const int kAsanStackLeftRedzoneMagic = 0xf1;
98 static const int kAsanStackMidRedzoneMagic = 0xf2;
99 static const int kAsanStackRightRedzoneMagic = 0xf3;
100 static const int kAsanStackPartialRedzoneMagic = 0xf4;
102 static const int kAsanStackAfterReturnMagic = 0xf5;
105 // Accesses sizes are powers of two: 1, 2, 4, 8, 16.
106 static const size_t kNumberOfAccessSizes = 5;
108 // Command-line flags.
110 // This flag may need to be replaced with -f[no-]asan-reads.
111 static cl::opt<bool> ClInstrumentReads("asan-instrument-reads",
112 cl::desc("instrument read instructions"), cl::Hidden, cl::init(true));
113 static cl::opt<bool> ClInstrumentWrites("asan-instrument-writes",
114 cl::desc("instrument write instructions"), cl::Hidden, cl::init(true));
115 static cl::opt<bool> ClInstrumentAtomics("asan-instrument-atomics",
116 cl::desc("instrument atomic instructions (rmw, cmpxchg)"),
117 cl::Hidden, cl::init(true));
118 static cl::opt<bool> ClAlwaysSlowPath("asan-always-slow-path",
119 cl::desc("use instrumentation with slow path for all accesses"),
120 cl::Hidden, cl::init(false));
121 // This flag limits the number of instructions to be instrumented
122 // in any given BB. Normally, this should be set to unlimited (INT_MAX),
123 // but due to http://llvm.org/bugs/show_bug.cgi?id=12652 we temporary
125 static cl::opt<int> ClMaxInsnsToInstrumentPerBB("asan-max-ins-per-bb",
127 cl::desc("maximal number of instructions to instrument in any given BB"),
129 // This flag may need to be replaced with -f[no]asan-stack.
130 static cl::opt<bool> ClStack("asan-stack",
131 cl::desc("Handle stack memory"), cl::Hidden, cl::init(true));
132 // This flag may need to be replaced with -f[no]asan-use-after-return.
133 static cl::opt<bool> ClUseAfterReturn("asan-use-after-return",
134 cl::desc("Check return-after-free"), cl::Hidden, cl::init(false));
135 // This flag may need to be replaced with -f[no]asan-globals.
136 static cl::opt<bool> ClGlobals("asan-globals",
137 cl::desc("Handle global objects"), cl::Hidden, cl::init(true));
138 static cl::opt<bool> ClCoverage("asan-coverage",
139 cl::desc("ASan coverage"), cl::Hidden, cl::init(false));
140 static cl::opt<bool> ClInitializers("asan-initialization-order",
141 cl::desc("Handle C++ initializer order"), cl::Hidden, cl::init(false));
142 static cl::opt<bool> ClMemIntrin("asan-memintrin",
143 cl::desc("Handle memset/memcpy/memmove"), cl::Hidden, cl::init(true));
144 static cl::opt<bool> ClRealignStack("asan-realign-stack",
145 cl::desc("Realign stack to 32"), cl::Hidden, cl::init(true));
146 static cl::opt<std::string> ClBlacklistFile("asan-blacklist",
147 cl::desc("File containing the list of objects to ignore "
148 "during instrumentation"), cl::Hidden);
150 // This is an experimental feature that will allow to choose between
151 // instrumented and non-instrumented code at link-time.
152 // If this option is on, just before instrumenting a function we create its
153 // clone; if the function is not changed by asan the clone is deleted.
154 // If we end up with a clone, we put the instrumented function into a section
155 // called "ASAN" and the uninstrumented function into a section called "NOASAN".
157 // This is still a prototype, we need to figure out a way to keep two copies of
158 // a function so that the linker can easily choose one of them.
159 static cl::opt<bool> ClKeepUninstrumented("asan-keep-uninstrumented-functions",
160 cl::desc("Keep uninstrumented copies of functions"),
161 cl::Hidden, cl::init(false));
163 // These flags allow to change the shadow mapping.
164 // The shadow mapping looks like
165 // Shadow = (Mem >> scale) + (1 << offset_log)
166 static cl::opt<int> ClMappingScale("asan-mapping-scale",
167 cl::desc("scale of asan shadow mapping"), cl::Hidden, cl::init(0));
168 static cl::opt<int> ClMappingOffsetLog("asan-mapping-offset-log",
169 cl::desc("offset of asan shadow mapping"), cl::Hidden, cl::init(-1));
170 static cl::opt<bool> ClShort64BitOffset("asan-short-64bit-mapping-offset",
171 cl::desc("Use short immediate constant as the mapping offset for 64bit"),
172 cl::Hidden, cl::init(true));
174 // Optimization flags. Not user visible, used mostly for testing
175 // and benchmarking the tool.
176 static cl::opt<bool> ClOpt("asan-opt",
177 cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true));
178 static cl::opt<bool> ClOptSameTemp("asan-opt-same-temp",
179 cl::desc("Instrument the same temp just once"), cl::Hidden,
181 static cl::opt<bool> ClOptGlobals("asan-opt-globals",
182 cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true));
184 static cl::opt<bool> ClCheckLifetime("asan-check-lifetime",
185 cl::desc("Use llvm.lifetime intrinsics to insert extra checks"),
186 cl::Hidden, cl::init(false));
189 static cl::opt<int> ClDebug("asan-debug", cl::desc("debug"), cl::Hidden,
191 static cl::opt<int> ClDebugStack("asan-debug-stack", cl::desc("debug stack"),
192 cl::Hidden, cl::init(0));
193 static cl::opt<std::string> ClDebugFunc("asan-debug-func",
194 cl::Hidden, cl::desc("Debug func"));
195 static cl::opt<int> ClDebugMin("asan-debug-min", cl::desc("Debug min inst"),
196 cl::Hidden, cl::init(-1));
197 static cl::opt<int> ClDebugMax("asan-debug-max", cl::desc("Debug man inst"),
198 cl::Hidden, cl::init(-1));
200 STATISTIC(NumInstrumentedReads, "Number of instrumented reads");
201 STATISTIC(NumInstrumentedWrites, "Number of instrumented writes");
202 STATISTIC(NumOptimizedAccessesToGlobalArray,
203 "Number of optimized accesses to global arrays");
204 STATISTIC(NumOptimizedAccessesToGlobalVar,
205 "Number of optimized accesses to global vars");
208 /// A set of dynamically initialized globals extracted from metadata.
209 class SetOfDynamicallyInitializedGlobals {
211 void Init(Module& M) {
212 // Clang generates metadata identifying all dynamically initialized globals.
213 NamedMDNode *DynamicGlobals =
214 M.getNamedMetadata("llvm.asan.dynamically_initialized_globals");
217 for (int i = 0, n = DynamicGlobals->getNumOperands(); i < n; ++i) {
218 MDNode *MDN = DynamicGlobals->getOperand(i);
219 assert(MDN->getNumOperands() == 1);
220 Value *VG = MDN->getOperand(0);
221 // The optimizer may optimize away a global entirely, in which case we
222 // cannot instrument access to it.
225 DynInitGlobals.insert(cast<GlobalVariable>(VG));
228 bool Contains(GlobalVariable *G) { return DynInitGlobals.count(G) != 0; }
230 SmallSet<GlobalValue*, 32> DynInitGlobals;
233 /// This struct defines the shadow mapping using the rule:
234 /// shadow = (mem >> Scale) ADD-or-OR Offset.
235 struct ShadowMapping {
241 static ShadowMapping getShadowMapping(const Module &M, int LongSize,
242 bool ZeroBaseShadow) {
243 llvm::Triple TargetTriple(M.getTargetTriple());
244 bool IsAndroid = TargetTriple.getEnvironment() == llvm::Triple::Android;
245 bool IsMacOSX = TargetTriple.getOS() == llvm::Triple::MacOSX;
246 bool IsPPC64 = TargetTriple.getArch() == llvm::Triple::ppc64 ||
247 TargetTriple.getArch() == llvm::Triple::ppc64le;
248 bool IsX86_64 = TargetTriple.getArch() == llvm::Triple::x86_64;
249 bool IsMIPS32 = TargetTriple.getArch() == llvm::Triple::mips ||
250 TargetTriple.getArch() == llvm::Triple::mipsel;
252 ShadowMapping Mapping;
254 // OR-ing shadow offset if more efficient (at least on x86),
255 // but on ppc64 we have to use add since the shadow offset is not neccesary
256 // 1/8-th of the address space.
257 Mapping.OrShadowOffset = !IsPPC64 && !ClShort64BitOffset;
259 Mapping.Offset = (IsAndroid || ZeroBaseShadow) ? 0 :
261 (IsMIPS32 ? kMIPS32_ShadowOffset32 : kDefaultShadowOffset32) :
262 IsPPC64 ? kPPC64_ShadowOffset64 : kDefaultShadowOffset64);
263 if (!ZeroBaseShadow && ClShort64BitOffset && IsX86_64 && !IsMacOSX) {
264 assert(LongSize == 64);
265 Mapping.Offset = kDefaultShort64bitShadowOffset;
267 if (!ZeroBaseShadow && ClMappingOffsetLog >= 0) {
268 // Zero offset log is the special case.
269 Mapping.Offset = (ClMappingOffsetLog == 0) ? 0 : 1ULL << ClMappingOffsetLog;
272 Mapping.Scale = kDefaultShadowScale;
273 if (ClMappingScale) {
274 Mapping.Scale = ClMappingScale;
280 static size_t RedzoneSizeForScale(int MappingScale) {
281 // Redzone used for stack and globals is at least 32 bytes.
282 // For scales 6 and 7, the redzone has to be 64 and 128 bytes respectively.
283 return std::max(32U, 1U << MappingScale);
286 /// AddressSanitizer: instrument the code in module to find memory bugs.
287 struct AddressSanitizer : public FunctionPass {
288 AddressSanitizer(bool CheckInitOrder = true,
289 bool CheckUseAfterReturn = false,
290 bool CheckLifetime = false,
291 StringRef BlacklistFile = StringRef(),
292 bool ZeroBaseShadow = false)
294 CheckInitOrder(CheckInitOrder || ClInitializers),
295 CheckUseAfterReturn(CheckUseAfterReturn || ClUseAfterReturn),
296 CheckLifetime(CheckLifetime || ClCheckLifetime),
297 BlacklistFile(BlacklistFile.empty() ? ClBlacklistFile
299 ZeroBaseShadow(ZeroBaseShadow) {}
300 virtual const char *getPassName() const {
301 return "AddressSanitizerFunctionPass";
303 void instrumentMop(Instruction *I);
304 void instrumentAddress(Instruction *OrigIns, Instruction *InsertBefore,
305 Value *Addr, uint32_t TypeSize, bool IsWrite,
306 Value *SizeArgument);
307 Value *createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong,
308 Value *ShadowValue, uint32_t TypeSize);
309 Instruction *generateCrashCode(Instruction *InsertBefore, Value *Addr,
310 bool IsWrite, size_t AccessSizeIndex,
311 Value *SizeArgument);
312 bool instrumentMemIntrinsic(MemIntrinsic *MI);
313 void instrumentMemIntrinsicParam(Instruction *OrigIns, Value *Addr,
315 Instruction *InsertBefore, bool IsWrite);
316 Value *memToShadow(Value *Shadow, IRBuilder<> &IRB);
317 bool runOnFunction(Function &F);
318 bool maybeInsertAsanInitAtFunctionEntry(Function &F);
319 void emitShadowMapping(Module &M, IRBuilder<> &IRB) const;
320 virtual bool doInitialization(Module &M);
321 static char ID; // Pass identification, replacement for typeid
324 void initializeCallbacks(Module &M);
326 bool ShouldInstrumentGlobal(GlobalVariable *G);
327 bool LooksLikeCodeInBug11395(Instruction *I);
328 void FindDynamicInitializers(Module &M);
329 bool GlobalIsLinkerInitialized(GlobalVariable *G);
330 bool InjectCoverage(Function &F);
333 bool CheckUseAfterReturn;
335 SmallString<64> BlacklistFile;
342 ShadowMapping Mapping;
343 Function *AsanCtorFunction;
344 Function *AsanInitFunction;
345 Function *AsanHandleNoReturnFunc;
346 Function *AsanCovFunction;
347 OwningPtr<SpecialCaseList> BL;
348 // This array is indexed by AccessIsWrite and log2(AccessSize).
349 Function *AsanErrorCallback[2][kNumberOfAccessSizes];
350 // This array is indexed by AccessIsWrite.
351 Function *AsanErrorCallbackSized[2];
353 SetOfDynamicallyInitializedGlobals DynamicallyInitializedGlobals;
355 friend struct FunctionStackPoisoner;
358 class AddressSanitizerModule : public ModulePass {
360 AddressSanitizerModule(bool CheckInitOrder = true,
361 StringRef BlacklistFile = StringRef(),
362 bool ZeroBaseShadow = false)
364 CheckInitOrder(CheckInitOrder || ClInitializers),
365 BlacklistFile(BlacklistFile.empty() ? ClBlacklistFile
367 ZeroBaseShadow(ZeroBaseShadow) {}
368 bool runOnModule(Module &M);
369 static char ID; // Pass identification, replacement for typeid
370 virtual const char *getPassName() const {
371 return "AddressSanitizerModule";
375 void initializeCallbacks(Module &M);
377 bool ShouldInstrumentGlobal(GlobalVariable *G);
378 void createInitializerPoisonCalls(Module &M, GlobalValue *ModuleName);
379 size_t RedzoneSize() const {
380 return RedzoneSizeForScale(Mapping.Scale);
384 SmallString<64> BlacklistFile;
387 OwningPtr<SpecialCaseList> BL;
388 SetOfDynamicallyInitializedGlobals DynamicallyInitializedGlobals;
392 ShadowMapping Mapping;
393 Function *AsanPoisonGlobals;
394 Function *AsanUnpoisonGlobals;
395 Function *AsanRegisterGlobals;
396 Function *AsanUnregisterGlobals;
399 // Stack poisoning does not play well with exception handling.
400 // When an exception is thrown, we essentially bypass the code
401 // that unpoisones the stack. This is why the run-time library has
402 // to intercept __cxa_throw (as well as longjmp, etc) and unpoison the entire
403 // stack in the interceptor. This however does not work inside the
404 // actual function which catches the exception. Most likely because the
405 // compiler hoists the load of the shadow value somewhere too high.
406 // This causes asan to report a non-existing bug on 453.povray.
407 // It sounds like an LLVM bug.
408 struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> {
410 AddressSanitizer &ASan;
415 ShadowMapping Mapping;
417 SmallVector<AllocaInst*, 16> AllocaVec;
418 SmallVector<Instruction*, 8> RetVec;
419 uint64_t TotalStackSize;
420 unsigned StackAlignment;
422 Function *AsanStackMallocFunc[kMaxAsanStackMallocSizeClass + 1],
423 *AsanStackFreeFunc[kMaxAsanStackMallocSizeClass + 1];
424 Function *AsanPoisonStackMemoryFunc, *AsanUnpoisonStackMemoryFunc;
426 // Stores a place and arguments of poisoning/unpoisoning call for alloca.
427 struct AllocaPoisonCall {
428 IntrinsicInst *InsBefore;
433 SmallVector<AllocaPoisonCall, 8> AllocaPoisonCallVec;
435 // Maps Value to an AllocaInst from which the Value is originated.
436 typedef DenseMap<Value*, AllocaInst*> AllocaForValueMapTy;
437 AllocaForValueMapTy AllocaForValue;
439 FunctionStackPoisoner(Function &F, AddressSanitizer &ASan)
440 : F(F), ASan(ASan), DIB(*F.getParent()), C(ASan.C),
441 IntptrTy(ASan.IntptrTy), IntptrPtrTy(PointerType::get(IntptrTy, 0)),
442 Mapping(ASan.Mapping),
443 TotalStackSize(0), StackAlignment(1 << Mapping.Scale) {}
445 bool runOnFunction() {
446 if (!ClStack) return false;
447 // Collect alloca, ret, lifetime instructions etc.
448 for (df_iterator<BasicBlock*> DI = df_begin(&F.getEntryBlock()),
449 DE = df_end(&F.getEntryBlock()); DI != DE; ++DI) {
450 BasicBlock *BB = *DI;
453 if (AllocaVec.empty()) return false;
455 initializeCallbacks(*F.getParent());
465 // Finds all static Alloca instructions and puts
466 // poisoned red zones around all of them.
467 // Then unpoison everything back before the function returns.
470 // ----------------------- Visitors.
471 /// \brief Collect all Ret instructions.
472 void visitReturnInst(ReturnInst &RI) {
473 RetVec.push_back(&RI);
476 /// \brief Collect Alloca instructions we want (and can) handle.
477 void visitAllocaInst(AllocaInst &AI) {
478 if (!isInterestingAlloca(AI)) return;
480 StackAlignment = std::max(StackAlignment, AI.getAlignment());
481 AllocaVec.push_back(&AI);
482 uint64_t AlignedSize = getAlignedAllocaSize(&AI);
483 TotalStackSize += AlignedSize;
486 /// \brief Collect lifetime intrinsic calls to check for use-after-scope
488 void visitIntrinsicInst(IntrinsicInst &II) {
489 if (!ASan.CheckLifetime) return;
490 Intrinsic::ID ID = II.getIntrinsicID();
491 if (ID != Intrinsic::lifetime_start &&
492 ID != Intrinsic::lifetime_end)
494 // Found lifetime intrinsic, add ASan instrumentation if necessary.
495 ConstantInt *Size = dyn_cast<ConstantInt>(II.getArgOperand(0));
496 // If size argument is undefined, don't do anything.
497 if (Size->isMinusOne()) return;
498 // Check that size doesn't saturate uint64_t and can
499 // be stored in IntptrTy.
500 const uint64_t SizeValue = Size->getValue().getLimitedValue();
501 if (SizeValue == ~0ULL ||
502 !ConstantInt::isValueValidForType(IntptrTy, SizeValue))
504 // Find alloca instruction that corresponds to llvm.lifetime argument.
505 AllocaInst *AI = findAllocaForValue(II.getArgOperand(1));
507 bool DoPoison = (ID == Intrinsic::lifetime_end);
508 AllocaPoisonCall APC = {&II, AI, SizeValue, DoPoison};
509 AllocaPoisonCallVec.push_back(APC);
512 // ---------------------- Helpers.
513 void initializeCallbacks(Module &M);
515 // Check if we want (and can) handle this alloca.
516 bool isInterestingAlloca(AllocaInst &AI) const {
517 return (!AI.isArrayAllocation() &&
518 AI.isStaticAlloca() &&
519 AI.getAlignment() <= RedzoneSize() &&
520 AI.getAllocatedType()->isSized());
523 size_t RedzoneSize() const {
524 return RedzoneSizeForScale(Mapping.Scale);
526 uint64_t getAllocaSizeInBytes(AllocaInst *AI) const {
527 Type *Ty = AI->getAllocatedType();
528 uint64_t SizeInBytes = ASan.TD->getTypeAllocSize(Ty);
531 uint64_t getAlignedSize(uint64_t SizeInBytes) const {
532 size_t RZ = RedzoneSize();
533 return ((SizeInBytes + RZ - 1) / RZ) * RZ;
535 uint64_t getAlignedAllocaSize(AllocaInst *AI) const {
536 uint64_t SizeInBytes = getAllocaSizeInBytes(AI);
537 return getAlignedSize(SizeInBytes);
539 /// Finds alloca where the value comes from.
540 AllocaInst *findAllocaForValue(Value *V);
541 void poisonRedZones(const ArrayRef<AllocaInst*> &AllocaVec, IRBuilder<> &IRB,
542 Value *ShadowBase, bool DoPoison);
543 void poisonAlloca(Value *V, uint64_t Size, IRBuilder<> &IRB, bool DoPoison);
545 void SetShadowToStackAfterReturnInlined(IRBuilder<> &IRB, Value *ShadowBase,
551 char AddressSanitizer::ID = 0;
552 INITIALIZE_PASS(AddressSanitizer, "asan",
553 "AddressSanitizer: detects use-after-free and out-of-bounds bugs.",
555 FunctionPass *llvm::createAddressSanitizerFunctionPass(
556 bool CheckInitOrder, bool CheckUseAfterReturn, bool CheckLifetime,
557 StringRef BlacklistFile, bool ZeroBaseShadow) {
558 return new AddressSanitizer(CheckInitOrder, CheckUseAfterReturn,
559 CheckLifetime, BlacklistFile, ZeroBaseShadow);
562 char AddressSanitizerModule::ID = 0;
563 INITIALIZE_PASS(AddressSanitizerModule, "asan-module",
564 "AddressSanitizer: detects use-after-free and out-of-bounds bugs."
565 "ModulePass", false, false)
566 ModulePass *llvm::createAddressSanitizerModulePass(
567 bool CheckInitOrder, StringRef BlacklistFile, bool ZeroBaseShadow) {
568 return new AddressSanitizerModule(CheckInitOrder, BlacklistFile,
572 static size_t TypeSizeToSizeIndex(uint32_t TypeSize) {
573 size_t Res = countTrailingZeros(TypeSize / 8);
574 assert(Res < kNumberOfAccessSizes);
578 // \brief Create a constant for Str so that we can pass it to the run-time lib.
579 static GlobalVariable *createPrivateGlobalForString(Module &M, StringRef Str) {
580 Constant *StrConst = ConstantDataArray::getString(M.getContext(), Str);
581 GlobalVariable *GV = new GlobalVariable(M, StrConst->getType(), true,
582 GlobalValue::InternalLinkage, StrConst,
584 GV->setUnnamedAddr(true); // Ok to merge these.
585 GV->setAlignment(1); // Strings may not be merged w/o setting align 1.
589 static bool GlobalWasGeneratedByAsan(GlobalVariable *G) {
590 return G->getName().find(kAsanGenPrefix) == 0;
593 Value *AddressSanitizer::memToShadow(Value *Shadow, IRBuilder<> &IRB) {
595 Shadow = IRB.CreateLShr(Shadow, Mapping.Scale);
596 if (Mapping.Offset == 0)
598 // (Shadow >> scale) | offset
599 if (Mapping.OrShadowOffset)
600 return IRB.CreateOr(Shadow, ConstantInt::get(IntptrTy, Mapping.Offset));
602 return IRB.CreateAdd(Shadow, ConstantInt::get(IntptrTy, Mapping.Offset));
605 void AddressSanitizer::instrumentMemIntrinsicParam(
606 Instruction *OrigIns,
607 Value *Addr, Value *Size, Instruction *InsertBefore, bool IsWrite) {
608 IRBuilder<> IRB(InsertBefore);
609 if (Size->getType() != IntptrTy)
610 Size = IRB.CreateIntCast(Size, IntptrTy, false);
611 // Check the first byte.
612 instrumentAddress(OrigIns, InsertBefore, Addr, 8, IsWrite, Size);
613 // Check the last byte.
614 IRB.SetInsertPoint(InsertBefore);
615 Value *SizeMinusOne = IRB.CreateSub(Size, ConstantInt::get(IntptrTy, 1));
616 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
617 Value *AddrLast = IRB.CreateAdd(AddrLong, SizeMinusOne);
618 instrumentAddress(OrigIns, InsertBefore, AddrLast, 8, IsWrite, Size);
621 // Instrument memset/memmove/memcpy
622 bool AddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI) {
623 Value *Dst = MI->getDest();
624 MemTransferInst *MemTran = dyn_cast<MemTransferInst>(MI);
625 Value *Src = MemTran ? MemTran->getSource() : 0;
626 Value *Length = MI->getLength();
628 Constant *ConstLength = dyn_cast<Constant>(Length);
629 Instruction *InsertBefore = MI;
631 if (ConstLength->isNullValue()) return false;
633 // The size is not a constant so it could be zero -- check at run-time.
634 IRBuilder<> IRB(InsertBefore);
636 Value *Cmp = IRB.CreateICmpNE(Length,
637 Constant::getNullValue(Length->getType()));
638 InsertBefore = SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), false);
641 instrumentMemIntrinsicParam(MI, Dst, Length, InsertBefore, true);
643 instrumentMemIntrinsicParam(MI, Src, Length, InsertBefore, false);
647 // If I is an interesting memory access, return the PointerOperand
648 // and set IsWrite. Otherwise return NULL.
649 static Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite) {
650 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
651 if (!ClInstrumentReads) return NULL;
653 return LI->getPointerOperand();
655 if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
656 if (!ClInstrumentWrites) return NULL;
658 return SI->getPointerOperand();
660 if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) {
661 if (!ClInstrumentAtomics) return NULL;
663 return RMW->getPointerOperand();
665 if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) {
666 if (!ClInstrumentAtomics) return NULL;
668 return XCHG->getPointerOperand();
673 bool AddressSanitizer::GlobalIsLinkerInitialized(GlobalVariable *G) {
674 // If a global variable does not have dynamic initialization we don't
675 // have to instrument it. However, if a global does not have initializer
676 // at all, we assume it has dynamic initializer (in other TU).
677 return G->hasInitializer() && !DynamicallyInitializedGlobals.Contains(G);
680 void AddressSanitizer::instrumentMop(Instruction *I) {
681 bool IsWrite = false;
682 Value *Addr = isInterestingMemoryAccess(I, &IsWrite);
684 if (ClOpt && ClOptGlobals) {
685 if (GlobalVariable *G = dyn_cast<GlobalVariable>(Addr)) {
686 // If initialization order checking is disabled, a simple access to a
687 // dynamically initialized global is always valid.
688 if (!CheckInitOrder || GlobalIsLinkerInitialized(G)) {
689 NumOptimizedAccessesToGlobalVar++;
693 ConstantExpr *CE = dyn_cast<ConstantExpr>(Addr);
694 if (CE && CE->isGEPWithNoNotionalOverIndexing()) {
695 if (GlobalVariable *G = dyn_cast<GlobalVariable>(CE->getOperand(0))) {
696 if (CE->getOperand(1)->isNullValue() && GlobalIsLinkerInitialized(G)) {
697 NumOptimizedAccessesToGlobalArray++;
704 Type *OrigPtrTy = Addr->getType();
705 Type *OrigTy = cast<PointerType>(OrigPtrTy)->getElementType();
707 assert(OrigTy->isSized());
708 uint32_t TypeSize = TD->getTypeStoreSizeInBits(OrigTy);
710 assert((TypeSize % 8) == 0);
713 NumInstrumentedWrites++;
715 NumInstrumentedReads++;
717 // Instrument a 1-, 2-, 4-, 8-, or 16- byte access with one check.
718 if (TypeSize == 8 || TypeSize == 16 ||
719 TypeSize == 32 || TypeSize == 64 || TypeSize == 128)
720 return instrumentAddress(I, I, Addr, TypeSize, IsWrite, 0);
721 // Instrument unusual size (but still multiple of 8).
722 // We can not do it with a single check, so we do 1-byte check for the first
723 // and the last bytes. We call __asan_report_*_n(addr, real_size) to be able
724 // to report the actual access size.
726 Value *LastByte = IRB.CreateIntToPtr(
727 IRB.CreateAdd(IRB.CreatePointerCast(Addr, IntptrTy),
728 ConstantInt::get(IntptrTy, TypeSize / 8 - 1)),
730 Value *Size = ConstantInt::get(IntptrTy, TypeSize / 8);
731 instrumentAddress(I, I, Addr, 8, IsWrite, Size);
732 instrumentAddress(I, I, LastByte, 8, IsWrite, Size);
735 // Validate the result of Module::getOrInsertFunction called for an interface
736 // function of AddressSanitizer. If the instrumented module defines a function
737 // with the same name, their prototypes must match, otherwise
738 // getOrInsertFunction returns a bitcast.
739 static Function *checkInterfaceFunction(Constant *FuncOrBitcast) {
740 if (isa<Function>(FuncOrBitcast)) return cast<Function>(FuncOrBitcast);
741 FuncOrBitcast->dump();
742 report_fatal_error("trying to redefine an AddressSanitizer "
743 "interface function");
746 Instruction *AddressSanitizer::generateCrashCode(
747 Instruction *InsertBefore, Value *Addr,
748 bool IsWrite, size_t AccessSizeIndex, Value *SizeArgument) {
749 IRBuilder<> IRB(InsertBefore);
750 CallInst *Call = SizeArgument
751 ? IRB.CreateCall2(AsanErrorCallbackSized[IsWrite], Addr, SizeArgument)
752 : IRB.CreateCall(AsanErrorCallback[IsWrite][AccessSizeIndex], Addr);
754 // We don't do Call->setDoesNotReturn() because the BB already has
755 // UnreachableInst at the end.
756 // This EmptyAsm is required to avoid callback merge.
757 IRB.CreateCall(EmptyAsm);
761 Value *AddressSanitizer::createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong,
764 size_t Granularity = 1 << Mapping.Scale;
765 // Addr & (Granularity - 1)
766 Value *LastAccessedByte = IRB.CreateAnd(
767 AddrLong, ConstantInt::get(IntptrTy, Granularity - 1));
768 // (Addr & (Granularity - 1)) + size - 1
769 if (TypeSize / 8 > 1)
770 LastAccessedByte = IRB.CreateAdd(
771 LastAccessedByte, ConstantInt::get(IntptrTy, TypeSize / 8 - 1));
772 // (uint8_t) ((Addr & (Granularity-1)) + size - 1)
773 LastAccessedByte = IRB.CreateIntCast(
774 LastAccessedByte, ShadowValue->getType(), false);
775 // ((uint8_t) ((Addr & (Granularity-1)) + size - 1)) >= ShadowValue
776 return IRB.CreateICmpSGE(LastAccessedByte, ShadowValue);
779 void AddressSanitizer::instrumentAddress(Instruction *OrigIns,
780 Instruction *InsertBefore,
781 Value *Addr, uint32_t TypeSize,
782 bool IsWrite, Value *SizeArgument) {
783 IRBuilder<> IRB(InsertBefore);
784 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
786 Type *ShadowTy = IntegerType::get(
787 *C, std::max(8U, TypeSize >> Mapping.Scale));
788 Type *ShadowPtrTy = PointerType::get(ShadowTy, 0);
789 Value *ShadowPtr = memToShadow(AddrLong, IRB);
790 Value *CmpVal = Constant::getNullValue(ShadowTy);
791 Value *ShadowValue = IRB.CreateLoad(
792 IRB.CreateIntToPtr(ShadowPtr, ShadowPtrTy));
794 Value *Cmp = IRB.CreateICmpNE(ShadowValue, CmpVal);
795 size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize);
796 size_t Granularity = 1 << Mapping.Scale;
797 TerminatorInst *CrashTerm = 0;
799 if (ClAlwaysSlowPath || (TypeSize < 8 * Granularity)) {
800 TerminatorInst *CheckTerm =
801 SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), false);
802 assert(dyn_cast<BranchInst>(CheckTerm)->isUnconditional());
803 BasicBlock *NextBB = CheckTerm->getSuccessor(0);
804 IRB.SetInsertPoint(CheckTerm);
805 Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeSize);
806 BasicBlock *CrashBlock =
807 BasicBlock::Create(*C, "", NextBB->getParent(), NextBB);
808 CrashTerm = new UnreachableInst(*C, CrashBlock);
809 BranchInst *NewTerm = BranchInst::Create(CrashBlock, NextBB, Cmp2);
810 ReplaceInstWithInst(CheckTerm, NewTerm);
812 CrashTerm = SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), true);
815 Instruction *Crash = generateCrashCode(
816 CrashTerm, AddrLong, IsWrite, AccessSizeIndex, SizeArgument);
817 Crash->setDebugLoc(OrigIns->getDebugLoc());
820 void AddressSanitizerModule::createInitializerPoisonCalls(
821 Module &M, GlobalValue *ModuleName) {
822 // We do all of our poisoning and unpoisoning within _GLOBAL__I_a.
823 Function *GlobalInit = M.getFunction("_GLOBAL__I_a");
824 // If that function is not present, this TU contains no globals, or they have
825 // all been optimized away
829 // Set up the arguments to our poison/unpoison functions.
830 IRBuilder<> IRB(GlobalInit->begin()->getFirstInsertionPt());
832 // Add a call to poison all external globals before the given function starts.
833 Value *ModuleNameAddr = ConstantExpr::getPointerCast(ModuleName, IntptrTy);
834 IRB.CreateCall(AsanPoisonGlobals, ModuleNameAddr);
836 // Add calls to unpoison all globals before each return instruction.
837 for (Function::iterator I = GlobalInit->begin(), E = GlobalInit->end();
839 if (ReturnInst *RI = dyn_cast<ReturnInst>(I->getTerminator())) {
840 CallInst::Create(AsanUnpoisonGlobals, "", RI);
845 bool AddressSanitizerModule::ShouldInstrumentGlobal(GlobalVariable *G) {
846 Type *Ty = cast<PointerType>(G->getType())->getElementType();
847 DEBUG(dbgs() << "GLOBAL: " << *G << "\n");
849 if (BL->isIn(*G)) return false;
850 if (!Ty->isSized()) return false;
851 if (!G->hasInitializer()) return false;
852 if (GlobalWasGeneratedByAsan(G)) return false; // Our own global.
853 // Touch only those globals that will not be defined in other modules.
854 // Don't handle ODR type linkages since other modules may be built w/o asan.
855 if (G->getLinkage() != GlobalVariable::ExternalLinkage &&
856 G->getLinkage() != GlobalVariable::PrivateLinkage &&
857 G->getLinkage() != GlobalVariable::InternalLinkage)
859 // Two problems with thread-locals:
860 // - The address of the main thread's copy can't be computed at link-time.
861 // - Need to poison all copies, not just the main thread's one.
862 if (G->isThreadLocal())
864 // For now, just ignore this Alloca if the alignment is large.
865 if (G->getAlignment() > RedzoneSize()) return false;
867 // Ignore all the globals with the names starting with "\01L_OBJC_".
868 // Many of those are put into the .cstring section. The linker compresses
869 // that section by removing the spare \0s after the string terminator, so
870 // our redzones get broken.
871 if ((G->getName().find("\01L_OBJC_") == 0) ||
872 (G->getName().find("\01l_OBJC_") == 0)) {
873 DEBUG(dbgs() << "Ignoring \\01L_OBJC_* global: " << *G);
877 if (G->hasSection()) {
878 StringRef Section(G->getSection());
879 // Ignore the globals from the __OBJC section. The ObjC runtime assumes
880 // those conform to /usr/lib/objc/runtime.h, so we can't add redzones to
882 if ((Section.find("__OBJC,") == 0) ||
883 (Section.find("__DATA, __objc_") == 0)) {
884 DEBUG(dbgs() << "Ignoring ObjC runtime global: " << *G);
887 // See http://code.google.com/p/address-sanitizer/issues/detail?id=32
888 // Constant CFString instances are compiled in the following way:
889 // -- the string buffer is emitted into
890 // __TEXT,__cstring,cstring_literals
891 // -- the constant NSConstantString structure referencing that buffer
892 // is placed into __DATA,__cfstring
893 // Therefore there's no point in placing redzones into __DATA,__cfstring.
894 // Moreover, it causes the linker to crash on OS X 10.7
895 if (Section.find("__DATA,__cfstring") == 0) {
896 DEBUG(dbgs() << "Ignoring CFString: " << *G);
904 void AddressSanitizerModule::initializeCallbacks(Module &M) {
906 // Declare our poisoning and unpoisoning functions.
907 AsanPoisonGlobals = checkInterfaceFunction(M.getOrInsertFunction(
908 kAsanPoisonGlobalsName, IRB.getVoidTy(), IntptrTy, NULL));
909 AsanPoisonGlobals->setLinkage(Function::ExternalLinkage);
910 AsanUnpoisonGlobals = checkInterfaceFunction(M.getOrInsertFunction(
911 kAsanUnpoisonGlobalsName, IRB.getVoidTy(), NULL));
912 AsanUnpoisonGlobals->setLinkage(Function::ExternalLinkage);
913 // Declare functions that register/unregister globals.
914 AsanRegisterGlobals = checkInterfaceFunction(M.getOrInsertFunction(
915 kAsanRegisterGlobalsName, IRB.getVoidTy(),
916 IntptrTy, IntptrTy, NULL));
917 AsanRegisterGlobals->setLinkage(Function::ExternalLinkage);
918 AsanUnregisterGlobals = checkInterfaceFunction(M.getOrInsertFunction(
919 kAsanUnregisterGlobalsName,
920 IRB.getVoidTy(), IntptrTy, IntptrTy, NULL));
921 AsanUnregisterGlobals->setLinkage(Function::ExternalLinkage);
924 // This function replaces all global variables with new variables that have
925 // trailing redzones. It also creates a function that poisons
926 // redzones and inserts this function into llvm.global_ctors.
927 bool AddressSanitizerModule::runOnModule(Module &M) {
928 if (!ClGlobals) return false;
929 TD = getAnalysisIfAvailable<DataLayout>();
932 BL.reset(SpecialCaseList::createOrDie(BlacklistFile));
933 if (BL->isIn(M)) return false;
934 C = &(M.getContext());
935 int LongSize = TD->getPointerSizeInBits();
936 IntptrTy = Type::getIntNTy(*C, LongSize);
937 Mapping = getShadowMapping(M, LongSize, ZeroBaseShadow);
938 initializeCallbacks(M);
939 DynamicallyInitializedGlobals.Init(M);
941 SmallVector<GlobalVariable *, 16> GlobalsToChange;
943 for (Module::GlobalListType::iterator G = M.global_begin(),
944 E = M.global_end(); G != E; ++G) {
945 if (ShouldInstrumentGlobal(G))
946 GlobalsToChange.push_back(G);
949 size_t n = GlobalsToChange.size();
950 if (n == 0) return false;
952 // A global is described by a structure
955 // size_t size_with_redzone;
957 // const char *module_name;
958 // size_t has_dynamic_init;
959 // We initialize an array of such structures and pass it to a run-time call.
960 StructType *GlobalStructTy = StructType::get(IntptrTy, IntptrTy,
962 IntptrTy, IntptrTy, NULL);
963 SmallVector<Constant *, 16> Initializers(n);
965 Function *CtorFunc = M.getFunction(kAsanModuleCtorName);
967 IRBuilder<> IRB(CtorFunc->getEntryBlock().getTerminator());
969 bool HasDynamicallyInitializedGlobals = false;
971 GlobalVariable *ModuleName = createPrivateGlobalForString(
972 M, M.getModuleIdentifier());
973 // We shouldn't merge same module names, as this string serves as unique
974 // module ID in runtime.
975 ModuleName->setUnnamedAddr(false);
977 for (size_t i = 0; i < n; i++) {
978 static const uint64_t kMaxGlobalRedzone = 1 << 18;
979 GlobalVariable *G = GlobalsToChange[i];
980 PointerType *PtrTy = cast<PointerType>(G->getType());
981 Type *Ty = PtrTy->getElementType();
982 uint64_t SizeInBytes = TD->getTypeAllocSize(Ty);
983 uint64_t MinRZ = RedzoneSize();
984 // MinRZ <= RZ <= kMaxGlobalRedzone
985 // and trying to make RZ to be ~ 1/4 of SizeInBytes.
986 uint64_t RZ = std::max(MinRZ,
987 std::min(kMaxGlobalRedzone,
988 (SizeInBytes / MinRZ / 4) * MinRZ));
989 uint64_t RightRedzoneSize = RZ;
991 if (SizeInBytes % MinRZ)
992 RightRedzoneSize += MinRZ - (SizeInBytes % MinRZ);
993 assert(((RightRedzoneSize + SizeInBytes) % MinRZ) == 0);
994 Type *RightRedZoneTy = ArrayType::get(IRB.getInt8Ty(), RightRedzoneSize);
995 // Determine whether this global should be poisoned in initialization.
996 bool GlobalHasDynamicInitializer =
997 DynamicallyInitializedGlobals.Contains(G);
998 // Don't check initialization order if this global is blacklisted.
999 GlobalHasDynamicInitializer &= !BL->isIn(*G, "init");
1001 StructType *NewTy = StructType::get(Ty, RightRedZoneTy, NULL);
1002 Constant *NewInitializer = ConstantStruct::get(
1003 NewTy, G->getInitializer(),
1004 Constant::getNullValue(RightRedZoneTy), NULL);
1006 GlobalVariable *Name = createPrivateGlobalForString(M, G->getName());
1008 // Create a new global variable with enough space for a redzone.
1009 GlobalValue::LinkageTypes Linkage = G->getLinkage();
1010 if (G->isConstant() && Linkage == GlobalValue::PrivateLinkage)
1011 Linkage = GlobalValue::InternalLinkage;
1012 GlobalVariable *NewGlobal = new GlobalVariable(
1013 M, NewTy, G->isConstant(), Linkage,
1014 NewInitializer, "", G, G->getThreadLocalMode());
1015 NewGlobal->copyAttributesFrom(G);
1016 NewGlobal->setAlignment(MinRZ);
1019 Indices2[0] = IRB.getInt32(0);
1020 Indices2[1] = IRB.getInt32(0);
1022 G->replaceAllUsesWith(
1023 ConstantExpr::getGetElementPtr(NewGlobal, Indices2, true));
1024 NewGlobal->takeName(G);
1025 G->eraseFromParent();
1027 Initializers[i] = ConstantStruct::get(
1029 ConstantExpr::getPointerCast(NewGlobal, IntptrTy),
1030 ConstantInt::get(IntptrTy, SizeInBytes),
1031 ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize),
1032 ConstantExpr::getPointerCast(Name, IntptrTy),
1033 ConstantExpr::getPointerCast(ModuleName, IntptrTy),
1034 ConstantInt::get(IntptrTy, GlobalHasDynamicInitializer),
1037 // Populate the first and last globals declared in this TU.
1038 if (CheckInitOrder && GlobalHasDynamicInitializer)
1039 HasDynamicallyInitializedGlobals = true;
1041 DEBUG(dbgs() << "NEW GLOBAL: " << *NewGlobal << "\n");
1044 ArrayType *ArrayOfGlobalStructTy = ArrayType::get(GlobalStructTy, n);
1045 GlobalVariable *AllGlobals = new GlobalVariable(
1046 M, ArrayOfGlobalStructTy, false, GlobalVariable::InternalLinkage,
1047 ConstantArray::get(ArrayOfGlobalStructTy, Initializers), "");
1049 // Create calls for poisoning before initializers run and unpoisoning after.
1050 if (CheckInitOrder && HasDynamicallyInitializedGlobals)
1051 createInitializerPoisonCalls(M, ModuleName);
1052 IRB.CreateCall2(AsanRegisterGlobals,
1053 IRB.CreatePointerCast(AllGlobals, IntptrTy),
1054 ConstantInt::get(IntptrTy, n));
1056 // We also need to unregister globals at the end, e.g. when a shared library
1058 Function *AsanDtorFunction = Function::Create(
1059 FunctionType::get(Type::getVoidTy(*C), false),
1060 GlobalValue::InternalLinkage, kAsanModuleDtorName, &M);
1061 BasicBlock *AsanDtorBB = BasicBlock::Create(*C, "", AsanDtorFunction);
1062 IRBuilder<> IRB_Dtor(ReturnInst::Create(*C, AsanDtorBB));
1063 IRB_Dtor.CreateCall2(AsanUnregisterGlobals,
1064 IRB.CreatePointerCast(AllGlobals, IntptrTy),
1065 ConstantInt::get(IntptrTy, n));
1066 appendToGlobalDtors(M, AsanDtorFunction, kAsanCtorAndCtorPriority);
1072 void AddressSanitizer::initializeCallbacks(Module &M) {
1073 IRBuilder<> IRB(*C);
1074 // Create __asan_report* callbacks.
1075 for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
1076 for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes;
1077 AccessSizeIndex++) {
1078 // IsWrite and TypeSize are encoded in the function name.
1079 std::string FunctionName = std::string(kAsanReportErrorTemplate) +
1080 (AccessIsWrite ? "store" : "load") + itostr(1 << AccessSizeIndex);
1081 // If we are merging crash callbacks, they have two parameters.
1082 AsanErrorCallback[AccessIsWrite][AccessSizeIndex] =
1083 checkInterfaceFunction(M.getOrInsertFunction(
1084 FunctionName, IRB.getVoidTy(), IntptrTy, NULL));
1087 AsanErrorCallbackSized[0] = checkInterfaceFunction(M.getOrInsertFunction(
1088 kAsanReportLoadN, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL));
1089 AsanErrorCallbackSized[1] = checkInterfaceFunction(M.getOrInsertFunction(
1090 kAsanReportStoreN, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL));
1092 AsanHandleNoReturnFunc = checkInterfaceFunction(M.getOrInsertFunction(
1093 kAsanHandleNoReturnName, IRB.getVoidTy(), NULL));
1094 AsanCovFunction = checkInterfaceFunction(M.getOrInsertFunction(
1095 kAsanCovName, IRB.getVoidTy(), IntptrTy, NULL));
1096 // We insert an empty inline asm after __asan_report* to avoid callback merge.
1097 EmptyAsm = InlineAsm::get(FunctionType::get(IRB.getVoidTy(), false),
1098 StringRef(""), StringRef(""),
1099 /*hasSideEffects=*/true);
1102 void AddressSanitizer::emitShadowMapping(Module &M, IRBuilder<> &IRB) const {
1103 // Tell the values of mapping offset and scale to the run-time.
1104 GlobalValue *asan_mapping_offset =
1105 new GlobalVariable(M, IntptrTy, true, GlobalValue::LinkOnceODRLinkage,
1106 ConstantInt::get(IntptrTy, Mapping.Offset),
1107 kAsanMappingOffsetName);
1108 // Read the global, otherwise it may be optimized away.
1109 IRB.CreateLoad(asan_mapping_offset, true);
1111 GlobalValue *asan_mapping_scale =
1112 new GlobalVariable(M, IntptrTy, true, GlobalValue::LinkOnceODRLinkage,
1113 ConstantInt::get(IntptrTy, Mapping.Scale),
1114 kAsanMappingScaleName);
1115 // Read the global, otherwise it may be optimized away.
1116 IRB.CreateLoad(asan_mapping_scale, true);
1120 bool AddressSanitizer::doInitialization(Module &M) {
1121 // Initialize the private fields. No one has accessed them before.
1122 TD = getAnalysisIfAvailable<DataLayout>();
1126 BL.reset(SpecialCaseList::createOrDie(BlacklistFile));
1127 DynamicallyInitializedGlobals.Init(M);
1129 C = &(M.getContext());
1130 LongSize = TD->getPointerSizeInBits();
1131 IntptrTy = Type::getIntNTy(*C, LongSize);
1133 AsanCtorFunction = Function::Create(
1134 FunctionType::get(Type::getVoidTy(*C), false),
1135 GlobalValue::InternalLinkage, kAsanModuleCtorName, &M);
1136 BasicBlock *AsanCtorBB = BasicBlock::Create(*C, "", AsanCtorFunction);
1137 // call __asan_init in the module ctor.
1138 IRBuilder<> IRB(ReturnInst::Create(*C, AsanCtorBB));
1139 AsanInitFunction = checkInterfaceFunction(
1140 M.getOrInsertFunction(kAsanInitName, IRB.getVoidTy(), NULL));
1141 AsanInitFunction->setLinkage(Function::ExternalLinkage);
1142 IRB.CreateCall(AsanInitFunction);
1144 Mapping = getShadowMapping(M, LongSize, ZeroBaseShadow);
1145 emitShadowMapping(M, IRB);
1147 appendToGlobalCtors(M, AsanCtorFunction, kAsanCtorAndCtorPriority);
1151 bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(Function &F) {
1152 // For each NSObject descendant having a +load method, this method is invoked
1153 // by the ObjC runtime before any of the static constructors is called.
1154 // Therefore we need to instrument such methods with a call to __asan_init
1155 // at the beginning in order to initialize our runtime before any access to
1156 // the shadow memory.
1157 // We cannot just ignore these methods, because they may call other
1158 // instrumented functions.
1159 if (F.getName().find(" load]") != std::string::npos) {
1160 IRBuilder<> IRB(F.begin()->begin());
1161 IRB.CreateCall(AsanInitFunction);
1167 // Poor man's coverage that works with ASan.
1168 // We create a Guard boolean variable with the same linkage
1169 // as the function and inject this code into the entry block:
1171 // __sanitizer_cov(&F);
1174 // The accesses to Guard are atomic. The rest of the logic is
1175 // in __sanitizer_cov (it's fine to call it more than once).
1177 // This coverage implementation provides very limited data:
1178 // it only tells if a given function was ever executed.
1179 // No counters, no per-basic-block or per-edge data.
1180 // But for many use cases this is what we need and the added slowdown
1181 // is negligible. This simple implementation will probably be obsoleted
1182 // by the upcoming Clang-based coverage implementation.
1183 // By having it here and now we hope to
1184 // a) get the functionality to users earlier and
1185 // b) collect usage statistics to help improve Clang coverage design.
1186 bool AddressSanitizer::InjectCoverage(Function &F) {
1187 if (!ClCoverage) return false;
1188 IRBuilder<> IRB(F.getEntryBlock().getFirstInsertionPt());
1189 Type *Int8Ty = IRB.getInt8Ty();
1190 GlobalVariable *Guard = new GlobalVariable(
1191 *F.getParent(), Int8Ty, false, GlobalValue::PrivateLinkage,
1192 Constant::getNullValue(Int8Ty), "__asan_gen_cov_" + F.getName());
1193 LoadInst *Load = IRB.CreateLoad(Guard);
1194 Load->setAtomic(Monotonic);
1195 Load->setAlignment(1);
1196 Value *Cmp = IRB.CreateICmpEQ(Constant::getNullValue(Int8Ty), Load);
1197 Instruction *Ins = SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), false);
1198 IRB.SetInsertPoint(Ins);
1199 // We pass &F to __sanitizer_cov. We could avoid this and rely on
1200 // GET_CALLER_PC, but having the PC of the first instruction is just nice.
1201 IRB.CreateCall(AsanCovFunction, IRB.CreatePointerCast(&F, IntptrTy));
1202 StoreInst *Store = IRB.CreateStore(ConstantInt::get(Int8Ty, 1), Guard);
1203 Store->setAtomic(Monotonic);
1204 Store->setAlignment(1);
1208 bool AddressSanitizer::runOnFunction(Function &F) {
1209 if (BL->isIn(F)) return false;
1210 if (&F == AsanCtorFunction) return false;
1211 if (F.getLinkage() == GlobalValue::AvailableExternallyLinkage) return false;
1212 DEBUG(dbgs() << "ASAN instrumenting:\n" << F << "\n");
1213 initializeCallbacks(*F.getParent());
1215 // If needed, insert __asan_init before checking for SanitizeAddress attr.
1216 maybeInsertAsanInitAtFunctionEntry(F);
1218 if (!F.hasFnAttribute(Attribute::SanitizeAddress))
1221 if (!ClDebugFunc.empty() && ClDebugFunc != F.getName())
1224 // We want to instrument every address only once per basic block (unless there
1225 // are calls between uses).
1226 SmallSet<Value*, 16> TempsToInstrument;
1227 SmallVector<Instruction*, 16> ToInstrument;
1228 SmallVector<Instruction*, 8> NoReturnCalls;
1232 // Fill the set of memory operations to instrument.
1233 for (Function::iterator FI = F.begin(), FE = F.end();
1235 TempsToInstrument.clear();
1236 int NumInsnsPerBB = 0;
1237 for (BasicBlock::iterator BI = FI->begin(), BE = FI->end();
1239 if (LooksLikeCodeInBug11395(BI)) return false;
1240 if (Value *Addr = isInterestingMemoryAccess(BI, &IsWrite)) {
1241 if (ClOpt && ClOptSameTemp) {
1242 if (!TempsToInstrument.insert(Addr))
1243 continue; // We've seen this temp in the current BB.
1245 } else if (isa<MemIntrinsic>(BI) && ClMemIntrin) {
1248 if (isa<AllocaInst>(BI))
1252 // A call inside BB.
1253 TempsToInstrument.clear();
1254 if (CS.doesNotReturn())
1255 NoReturnCalls.push_back(CS.getInstruction());
1259 ToInstrument.push_back(BI);
1261 if (NumInsnsPerBB >= ClMaxInsnsToInstrumentPerBB)
1266 Function *UninstrumentedDuplicate = 0;
1267 bool LikelyToInstrument =
1268 !NoReturnCalls.empty() || !ToInstrument.empty() || (NumAllocas > 0);
1269 if (ClKeepUninstrumented && LikelyToInstrument) {
1270 ValueToValueMapTy VMap;
1271 UninstrumentedDuplicate = CloneFunction(&F, VMap, false);
1272 UninstrumentedDuplicate->removeFnAttr(Attribute::SanitizeAddress);
1273 UninstrumentedDuplicate->setName("NOASAN_" + F.getName());
1274 F.getParent()->getFunctionList().push_back(UninstrumentedDuplicate);
1278 int NumInstrumented = 0;
1279 for (size_t i = 0, n = ToInstrument.size(); i != n; i++) {
1280 Instruction *Inst = ToInstrument[i];
1281 if (ClDebugMin < 0 || ClDebugMax < 0 ||
1282 (NumInstrumented >= ClDebugMin && NumInstrumented <= ClDebugMax)) {
1283 if (isInterestingMemoryAccess(Inst, &IsWrite))
1284 instrumentMop(Inst);
1286 instrumentMemIntrinsic(cast<MemIntrinsic>(Inst));
1291 FunctionStackPoisoner FSP(F, *this);
1292 bool ChangedStack = FSP.runOnFunction();
1294 // We must unpoison the stack before every NoReturn call (throw, _exit, etc).
1295 // See e.g. http://code.google.com/p/address-sanitizer/issues/detail?id=37
1296 for (size_t i = 0, n = NoReturnCalls.size(); i != n; i++) {
1297 Instruction *CI = NoReturnCalls[i];
1298 IRBuilder<> IRB(CI);
1299 IRB.CreateCall(AsanHandleNoReturnFunc);
1302 bool res = NumInstrumented > 0 || ChangedStack || !NoReturnCalls.empty();
1304 if (InjectCoverage(F))
1307 DEBUG(dbgs() << "ASAN done instrumenting: " << res << " " << F << "\n");
1309 if (ClKeepUninstrumented) {
1311 // No instrumentation is done, no need for the duplicate.
1312 if (UninstrumentedDuplicate)
1313 UninstrumentedDuplicate->eraseFromParent();
1315 // The function was instrumented. We must have the duplicate.
1316 assert(UninstrumentedDuplicate);
1317 UninstrumentedDuplicate->setSection("NOASAN");
1318 assert(!F.hasSection());
1319 F.setSection("ASAN");
1326 static uint64_t ValueForPoison(uint64_t PoisonByte, size_t ShadowRedzoneSize) {
1327 if (ShadowRedzoneSize == 1) return PoisonByte;
1328 if (ShadowRedzoneSize == 2) return (PoisonByte << 8) + PoisonByte;
1329 if (ShadowRedzoneSize == 4)
1330 return (PoisonByte << 24) + (PoisonByte << 16) +
1331 (PoisonByte << 8) + (PoisonByte);
1332 llvm_unreachable("ShadowRedzoneSize is either 1, 2 or 4");
1335 static void PoisonShadowPartialRightRedzone(uint8_t *Shadow,
1338 size_t ShadowGranularity,
1340 for (size_t i = 0; i < RZSize;
1341 i+= ShadowGranularity, Shadow++) {
1342 if (i + ShadowGranularity <= Size) {
1343 *Shadow = 0; // fully addressable
1344 } else if (i >= Size) {
1345 *Shadow = Magic; // unaddressable
1347 *Shadow = Size - i; // first Size-i bytes are addressable
1352 // Workaround for bug 11395: we don't want to instrument stack in functions
1353 // with large assembly blobs (32-bit only), otherwise reg alloc may crash.
1354 // FIXME: remove once the bug 11395 is fixed.
1355 bool AddressSanitizer::LooksLikeCodeInBug11395(Instruction *I) {
1356 if (LongSize != 32) return false;
1357 CallInst *CI = dyn_cast<CallInst>(I);
1358 if (!CI || !CI->isInlineAsm()) return false;
1359 if (CI->getNumArgOperands() <= 5) return false;
1360 // We have inline assembly with quite a few arguments.
1364 void FunctionStackPoisoner::initializeCallbacks(Module &M) {
1365 IRBuilder<> IRB(*C);
1366 for (int i = 0; i <= kMaxAsanStackMallocSizeClass; i++) {
1367 std::string Suffix = itostr(i);
1368 AsanStackMallocFunc[i] = checkInterfaceFunction(
1369 M.getOrInsertFunction(kAsanStackMallocNameTemplate + Suffix, IntptrTy,
1370 IntptrTy, IntptrTy, NULL));
1371 AsanStackFreeFunc[i] = checkInterfaceFunction(M.getOrInsertFunction(
1372 kAsanStackFreeNameTemplate + Suffix, IRB.getVoidTy(), IntptrTy,
1373 IntptrTy, IntptrTy, NULL));
1375 AsanPoisonStackMemoryFunc = checkInterfaceFunction(M.getOrInsertFunction(
1376 kAsanPoisonStackMemoryName, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL));
1377 AsanUnpoisonStackMemoryFunc = checkInterfaceFunction(M.getOrInsertFunction(
1378 kAsanUnpoisonStackMemoryName, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL));
1381 void FunctionStackPoisoner::poisonRedZones(
1382 const ArrayRef<AllocaInst*> &AllocaVec, IRBuilder<> &IRB, Value *ShadowBase,
1384 size_t ShadowRZSize = RedzoneSize() >> Mapping.Scale;
1385 assert(ShadowRZSize >= 1 && ShadowRZSize <= 4);
1386 Type *RZTy = Type::getIntNTy(*C, ShadowRZSize * 8);
1387 Type *RZPtrTy = PointerType::get(RZTy, 0);
1389 Value *PoisonLeft = ConstantInt::get(RZTy,
1390 ValueForPoison(DoPoison ? kAsanStackLeftRedzoneMagic : 0LL, ShadowRZSize));
1391 Value *PoisonMid = ConstantInt::get(RZTy,
1392 ValueForPoison(DoPoison ? kAsanStackMidRedzoneMagic : 0LL, ShadowRZSize));
1393 Value *PoisonRight = ConstantInt::get(RZTy,
1394 ValueForPoison(DoPoison ? kAsanStackRightRedzoneMagic : 0LL, ShadowRZSize));
1396 // poison the first red zone.
1397 IRB.CreateStore(PoisonLeft, IRB.CreateIntToPtr(ShadowBase, RZPtrTy));
1399 // poison all other red zones.
1400 uint64_t Pos = RedzoneSize();
1401 for (size_t i = 0, n = AllocaVec.size(); i < n; i++) {
1402 AllocaInst *AI = AllocaVec[i];
1403 uint64_t SizeInBytes = getAllocaSizeInBytes(AI);
1404 uint64_t AlignedSize = getAlignedAllocaSize(AI);
1405 assert(AlignedSize - SizeInBytes < RedzoneSize());
1410 assert(ShadowBase->getType() == IntptrTy);
1411 if (SizeInBytes < AlignedSize) {
1412 // Poison the partial redzone at right
1413 Ptr = IRB.CreateAdd(
1414 ShadowBase, ConstantInt::get(IntptrTy,
1415 (Pos >> Mapping.Scale) - ShadowRZSize));
1416 size_t AddressableBytes = RedzoneSize() - (AlignedSize - SizeInBytes);
1417 uint32_t Poison = 0;
1419 PoisonShadowPartialRightRedzone((uint8_t*)&Poison, AddressableBytes,
1421 1ULL << Mapping.Scale,
1422 kAsanStackPartialRedzoneMagic);
1424 ASan.TD->isLittleEndian()
1425 ? support::endian::byte_swap<uint32_t, support::little>(Poison)
1426 : support::endian::byte_swap<uint32_t, support::big>(Poison);
1428 Value *PartialPoison = ConstantInt::get(RZTy, Poison);
1429 IRB.CreateStore(PartialPoison, IRB.CreateIntToPtr(Ptr, RZPtrTy));
1432 // Poison the full redzone at right.
1433 Ptr = IRB.CreateAdd(ShadowBase,
1434 ConstantInt::get(IntptrTy, Pos >> Mapping.Scale));
1435 bool LastAlloca = (i == AllocaVec.size() - 1);
1436 Value *Poison = LastAlloca ? PoisonRight : PoisonMid;
1437 IRB.CreateStore(Poison, IRB.CreateIntToPtr(Ptr, RZPtrTy));
1439 Pos += RedzoneSize();
1443 // Fake stack allocator (asan_fake_stack.h) has 11 size classes
1444 // for every power of 2 from kMinStackMallocSize to kMaxAsanStackMallocSizeClass
1445 static int StackMallocSizeClass(uint64_t LocalStackSize) {
1446 assert(LocalStackSize <= kMaxStackMallocSize);
1447 uint64_t MaxSize = kMinStackMallocSize;
1448 for (int i = 0; ; i++, MaxSize *= 2)
1449 if (LocalStackSize <= MaxSize)
1451 llvm_unreachable("impossible LocalStackSize");
1454 // Set Size bytes starting from ShadowBase to kAsanStackAfterReturnMagic.
1455 // We can not use MemSet intrinsic because it may end up calling the actual
1456 // memset. Size is a multiple of 8.
1457 // Currently this generates 8-byte stores on x86_64; it may be better to
1458 // generate wider stores.
1459 void FunctionStackPoisoner::SetShadowToStackAfterReturnInlined(
1460 IRBuilder<> &IRB, Value *ShadowBase, int Size) {
1461 assert(!(Size % 8));
1462 assert(kAsanStackAfterReturnMagic == 0xf5);
1463 for (int i = 0; i < Size; i += 8) {
1464 Value *p = IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i));
1465 IRB.CreateStore(ConstantInt::get(IRB.getInt64Ty(), 0xf5f5f5f5f5f5f5f5ULL),
1466 IRB.CreateIntToPtr(p, IRB.getInt64Ty()->getPointerTo()));
1470 void FunctionStackPoisoner::poisonStack() {
1471 uint64_t LocalStackSize = TotalStackSize +
1472 (AllocaVec.size() + 1) * RedzoneSize();
1474 bool DoStackMalloc = ASan.CheckUseAfterReturn
1475 && LocalStackSize <= kMaxStackMallocSize;
1476 int StackMallocIdx = -1;
1478 assert(AllocaVec.size() > 0);
1479 Instruction *InsBefore = AllocaVec[0];
1480 IRBuilder<> IRB(InsBefore);
1483 Type *ByteArrayTy = ArrayType::get(IRB.getInt8Ty(), LocalStackSize);
1484 AllocaInst *MyAlloca =
1485 new AllocaInst(ByteArrayTy, "MyAlloca", InsBefore);
1486 if (ClRealignStack && StackAlignment < RedzoneSize())
1487 StackAlignment = RedzoneSize();
1488 MyAlloca->setAlignment(StackAlignment);
1489 assert(MyAlloca->isStaticAlloca());
1490 Value *OrigStackBase = IRB.CreatePointerCast(MyAlloca, IntptrTy);
1491 Value *LocalStackBase = OrigStackBase;
1493 if (DoStackMalloc) {
1494 // LocalStackBase = OrigStackBase
1495 // if (__asan_option_detect_stack_use_after_return)
1496 // LocalStackBase = __asan_stack_malloc_N(LocalStackBase, OrigStackBase);
1497 StackMallocIdx = StackMallocSizeClass(LocalStackSize);
1498 assert(StackMallocIdx <= kMaxAsanStackMallocSizeClass);
1499 Constant *OptionDetectUAR = F.getParent()->getOrInsertGlobal(
1500 kAsanOptionDetectUAR, IRB.getInt32Ty());
1501 Value *Cmp = IRB.CreateICmpNE(IRB.CreateLoad(OptionDetectUAR),
1502 Constant::getNullValue(IRB.getInt32Ty()));
1504 SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), false);
1505 BasicBlock *CmpBlock = cast<Instruction>(Cmp)->getParent();
1506 IRBuilder<> IRBIf(Term);
1507 LocalStackBase = IRBIf.CreateCall2(
1508 AsanStackMallocFunc[StackMallocIdx],
1509 ConstantInt::get(IntptrTy, LocalStackSize), OrigStackBase);
1510 BasicBlock *SetBlock = cast<Instruction>(LocalStackBase)->getParent();
1511 IRB.SetInsertPoint(InsBefore);
1512 PHINode *Phi = IRB.CreatePHI(IntptrTy, 2);
1513 Phi->addIncoming(OrigStackBase, CmpBlock);
1514 Phi->addIncoming(LocalStackBase, SetBlock);
1515 LocalStackBase = Phi;
1518 // This string will be parsed by the run-time (DescribeAddressIfStack).
1519 SmallString<2048> StackDescriptionStorage;
1520 raw_svector_ostream StackDescription(StackDescriptionStorage);
1521 StackDescription << AllocaVec.size() << " ";
1523 // Insert poison calls for lifetime intrinsics for alloca.
1524 bool HavePoisonedAllocas = false;
1525 for (size_t i = 0, n = AllocaPoisonCallVec.size(); i < n; i++) {
1526 const AllocaPoisonCall &APC = AllocaPoisonCallVec[i];
1527 assert(APC.InsBefore);
1529 IRBuilder<> IRB(APC.InsBefore);
1530 poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
1531 HavePoisonedAllocas |= APC.DoPoison;
1534 uint64_t Pos = RedzoneSize();
1535 // Replace Alloca instructions with base+offset.
1536 for (size_t i = 0, n = AllocaVec.size(); i < n; i++) {
1537 AllocaInst *AI = AllocaVec[i];
1538 uint64_t SizeInBytes = getAllocaSizeInBytes(AI);
1539 StringRef Name = AI->getName();
1540 StackDescription << Pos << " " << SizeInBytes << " "
1541 << Name.size() << " " << Name << " ";
1542 uint64_t AlignedSize = getAlignedAllocaSize(AI);
1543 assert((AlignedSize % RedzoneSize()) == 0);
1544 Value *NewAllocaPtr = IRB.CreateIntToPtr(
1545 IRB.CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy, Pos)),
1547 replaceDbgDeclareForAlloca(AI, NewAllocaPtr, DIB);
1548 AI->replaceAllUsesWith(NewAllocaPtr);
1549 Pos += AlignedSize + RedzoneSize();
1551 assert(Pos == LocalStackSize);
1553 // The left-most redzone has enough space for at least 4 pointers.
1554 // Write the Magic value to redzone[0].
1555 Value *BasePlus0 = IRB.CreateIntToPtr(LocalStackBase, IntptrPtrTy);
1556 IRB.CreateStore(ConstantInt::get(IntptrTy, kCurrentStackFrameMagic),
1558 // Write the frame description constant to redzone[1].
1559 Value *BasePlus1 = IRB.CreateIntToPtr(
1560 IRB.CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy, ASan.LongSize/8)),
1562 GlobalVariable *StackDescriptionGlobal =
1563 createPrivateGlobalForString(*F.getParent(), StackDescription.str());
1564 Value *Description = IRB.CreatePointerCast(StackDescriptionGlobal,
1566 IRB.CreateStore(Description, BasePlus1);
1567 // Write the PC to redzone[2].
1568 Value *BasePlus2 = IRB.CreateIntToPtr(
1569 IRB.CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy,
1570 2 * ASan.LongSize/8)),
1572 IRB.CreateStore(IRB.CreatePointerCast(&F, IntptrTy), BasePlus2);
1574 // Poison the stack redzones at the entry.
1575 Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB);
1576 poisonRedZones(AllocaVec, IRB, ShadowBase, true);
1578 // Unpoison the stack before all ret instructions.
1579 for (size_t i = 0, n = RetVec.size(); i < n; i++) {
1580 Instruction *Ret = RetVec[i];
1581 IRBuilder<> IRBRet(Ret);
1582 // Mark the current frame as retired.
1583 IRBRet.CreateStore(ConstantInt::get(IntptrTy, kRetiredStackFrameMagic),
1585 // Unpoison the stack.
1586 poisonRedZones(AllocaVec, IRBRet, ShadowBase, false);
1587 if (DoStackMalloc) {
1588 assert(StackMallocIdx >= 0);
1589 // In use-after-return mode, mark the whole stack frame unaddressable.
1590 if (StackMallocIdx <= 4) {
1591 // For small sizes inline the whole thing:
1592 // if LocalStackBase != OrigStackBase:
1593 // memset(ShadowBase, kAsanStackAfterReturnMagic, ShadowSize);
1594 // **SavedFlagPtr(LocalStackBase) = 0
1595 // FIXME: if LocalStackBase != OrigStackBase don't call poisonRedZones.
1596 Value *Cmp = IRBRet.CreateICmpNE(LocalStackBase, OrigStackBase);
1597 TerminatorInst *PoisonTerm =
1598 SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), false);
1599 IRBuilder<> IRBPoison(PoisonTerm);
1600 int ClassSize = kMinStackMallocSize << StackMallocIdx;
1601 SetShadowToStackAfterReturnInlined(IRBPoison, ShadowBase,
1602 ClassSize >> Mapping.Scale);
1603 Value *SavedFlagPtrPtr = IRBPoison.CreateAdd(
1605 ConstantInt::get(IntptrTy, ClassSize - ASan.LongSize / 8));
1606 Value *SavedFlagPtr = IRBPoison.CreateLoad(
1607 IRBPoison.CreateIntToPtr(SavedFlagPtrPtr, IntptrPtrTy));
1608 IRBPoison.CreateStore(
1609 Constant::getNullValue(IRBPoison.getInt8Ty()),
1610 IRBPoison.CreateIntToPtr(SavedFlagPtr, IRBPoison.getInt8PtrTy()));
1612 // For larger frames call __asan_stack_free_*.
1613 IRBRet.CreateCall3(AsanStackFreeFunc[StackMallocIdx], LocalStackBase,
1614 ConstantInt::get(IntptrTy, LocalStackSize),
1617 } else if (HavePoisonedAllocas) {
1618 // If we poisoned some allocas in llvm.lifetime analysis,
1619 // unpoison whole stack frame now.
1620 assert(LocalStackBase == OrigStackBase);
1621 poisonAlloca(LocalStackBase, LocalStackSize, IRBRet, false);
1625 // We are done. Remove the old unused alloca instructions.
1626 for (size_t i = 0, n = AllocaVec.size(); i < n; i++)
1627 AllocaVec[i]->eraseFromParent();
1630 void FunctionStackPoisoner::poisonAlloca(Value *V, uint64_t Size,
1631 IRBuilder<> &IRB, bool DoPoison) {
1632 // For now just insert the call to ASan runtime.
1633 Value *AddrArg = IRB.CreatePointerCast(V, IntptrTy);
1634 Value *SizeArg = ConstantInt::get(IntptrTy, Size);
1635 IRB.CreateCall2(DoPoison ? AsanPoisonStackMemoryFunc
1636 : AsanUnpoisonStackMemoryFunc,
1640 // Handling llvm.lifetime intrinsics for a given %alloca:
1641 // (1) collect all llvm.lifetime.xxx(%size, %value) describing the alloca.
1642 // (2) if %size is constant, poison memory for llvm.lifetime.end (to detect
1643 // invalid accesses) and unpoison it for llvm.lifetime.start (the memory
1644 // could be poisoned by previous llvm.lifetime.end instruction, as the
1645 // variable may go in and out of scope several times, e.g. in loops).
1646 // (3) if we poisoned at least one %alloca in a function,
1647 // unpoison the whole stack frame at function exit.
1649 AllocaInst *FunctionStackPoisoner::findAllocaForValue(Value *V) {
1650 if (AllocaInst *AI = dyn_cast<AllocaInst>(V))
1651 // We're intested only in allocas we can handle.
1652 return isInterestingAlloca(*AI) ? AI : 0;
1653 // See if we've already calculated (or started to calculate) alloca for a
1655 AllocaForValueMapTy::iterator I = AllocaForValue.find(V);
1656 if (I != AllocaForValue.end())
1658 // Store 0 while we're calculating alloca for value V to avoid
1659 // infinite recursion if the value references itself.
1660 AllocaForValue[V] = 0;
1661 AllocaInst *Res = 0;
1662 if (CastInst *CI = dyn_cast<CastInst>(V))
1663 Res = findAllocaForValue(CI->getOperand(0));
1664 else if (PHINode *PN = dyn_cast<PHINode>(V)) {
1665 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
1666 Value *IncValue = PN->getIncomingValue(i);
1667 // Allow self-referencing phi-nodes.
1668 if (IncValue == PN) continue;
1669 AllocaInst *IncValueAI = findAllocaForValue(IncValue);
1670 // AI for incoming values should exist and should all be equal.
1671 if (IncValueAI == 0 || (Res != 0 && IncValueAI != Res))
1677 AllocaForValue[V] = Res;