X-Git-Url: http://demsky.eecs.uci.edu/git/?a=blobdiff_plain;f=lib%2FTransforms%2FInstrumentation%2FAddressSanitizer.cpp;h=c03075c9ea06baf4101a53e9c0b53f25d767df28;hb=6d49eafb005080faf074fafb149177da8c501f83;hp=77e9e0ea13d7cf922fa441a99ca0708e17fc7766;hpb=8cc5f7cd59c69250ab3b6a68e38405dcdb6a4b25;p=oota-llvm.git diff --git a/lib/Transforms/Instrumentation/AddressSanitizer.cpp b/lib/Transforms/Instrumentation/AddressSanitizer.cpp index 77e9e0ea13d..c03075c9ea0 100644 --- a/lib/Transforms/Instrumentation/AddressSanitizer.cpp +++ b/lib/Transforms/Instrumentation/AddressSanitizer.cpp @@ -41,8 +41,8 @@ #include "llvm/Support/DataTypes.h" #include "llvm/Support/Debug.h" #include "llvm/Support/Endian.h" -#include "llvm/Support/raw_ostream.h" #include "llvm/Support/system_error.h" +#include "llvm/Transforms/Utils/ASanStackFrameLayout.h" #include "llvm/Transforms/Utils/BasicBlockUtils.h" #include "llvm/Transforms/Utils/Cloning.h" #include "llvm/Transforms/Utils/Local.h" @@ -93,11 +93,6 @@ static const char *const kAsanUnpoisonStackMemoryName = static const char *const kAsanOptionDetectUAR = "__asan_option_detect_stack_use_after_return"; -// These constants must match the definitions in the run-time library. -static const int kAsanStackLeftRedzoneMagic = 0xf1; -static const int kAsanStackMidRedzoneMagic = 0xf2; -static const int kAsanStackRightRedzoneMagic = 0xf3; -static const int kAsanStackPartialRedzoneMagic = 0xf4; #ifndef NDEBUG static const int kAsanStackAfterReturnMagic = 0xf5; #endif @@ -141,8 +136,9 @@ static cl::opt ClInitializers("asan-initialization-order", cl::desc("Handle C++ initializer order"), cl::Hidden, cl::init(false)); static cl::opt ClMemIntrin("asan-memintrin", cl::desc("Handle memset/memcpy/memmove"), cl::Hidden, cl::init(true)); -static cl::opt ClRealignStack("asan-realign-stack", - cl::desc("Realign stack to 32"), cl::Hidden, cl::init(true)); +static cl::opt ClRealignStack("asan-realign-stack", + cl::desc("Realign stack to the value of this flag (power of two)"), + cl::Hidden, cl::init(32)); static cl::opt ClBlacklistFile("asan-blacklist", cl::desc("File containing the list of objects to ignore " "during instrumentation"), cl::Hidden); @@ -238,8 +234,7 @@ struct ShadowMapping { bool OrShadowOffset; }; -static ShadowMapping getShadowMapping(const Module &M, int LongSize, - bool ZeroBaseShadow) { +static ShadowMapping getShadowMapping(const Module &M, int LongSize) { llvm::Triple TargetTriple(M.getTargetTriple()); bool IsAndroid = TargetTriple.getEnvironment() == llvm::Triple::Android; bool IsMacOSX = TargetTriple.getOS() == llvm::Triple::MacOSX; @@ -256,15 +251,15 @@ static ShadowMapping getShadowMapping(const Module &M, int LongSize, // 1/8-th of the address space. Mapping.OrShadowOffset = !IsPPC64 && !ClShort64BitOffset; - Mapping.Offset = (IsAndroid || ZeroBaseShadow) ? 0 : + Mapping.Offset = IsAndroid ? 0 : (LongSize == 32 ? (IsMIPS32 ? kMIPS32_ShadowOffset32 : kDefaultShadowOffset32) : IsPPC64 ? kPPC64_ShadowOffset64 : kDefaultShadowOffset64); - if (!ZeroBaseShadow && ClShort64BitOffset && IsX86_64 && !IsMacOSX) { + if (!IsAndroid && ClShort64BitOffset && IsX86_64 && !IsMacOSX) { assert(LongSize == 64); Mapping.Offset = kDefaultShort64bitShadowOffset; } - if (!ZeroBaseShadow && ClMappingOffsetLog >= 0) { + if (!IsAndroid && ClMappingOffsetLog >= 0) { // Zero offset log is the special case. Mapping.Offset = (ClMappingOffsetLog == 0) ? 0 : 1ULL << ClMappingOffsetLog; } @@ -288,15 +283,13 @@ struct AddressSanitizer : public FunctionPass { AddressSanitizer(bool CheckInitOrder = true, bool CheckUseAfterReturn = false, bool CheckLifetime = false, - StringRef BlacklistFile = StringRef(), - bool ZeroBaseShadow = false) + StringRef BlacklistFile = StringRef()) : FunctionPass(ID), CheckInitOrder(CheckInitOrder || ClInitializers), CheckUseAfterReturn(CheckUseAfterReturn || ClUseAfterReturn), CheckLifetime(CheckLifetime || ClCheckLifetime), BlacklistFile(BlacklistFile.empty() ? ClBlacklistFile - : BlacklistFile), - ZeroBaseShadow(ZeroBaseShadow) {} + : BlacklistFile) {} virtual const char *getPassName() const { return "AddressSanitizerFunctionPass"; } @@ -333,7 +326,6 @@ struct AddressSanitizer : public FunctionPass { bool CheckUseAfterReturn; bool CheckLifetime; SmallString<64> BlacklistFile; - bool ZeroBaseShadow; LLVMContext *C; DataLayout *TD; @@ -358,13 +350,11 @@ struct AddressSanitizer : public FunctionPass { class AddressSanitizerModule : public ModulePass { public: AddressSanitizerModule(bool CheckInitOrder = true, - StringRef BlacklistFile = StringRef(), - bool ZeroBaseShadow = false) + StringRef BlacklistFile = StringRef()) : ModulePass(ID), CheckInitOrder(CheckInitOrder || ClInitializers), BlacklistFile(BlacklistFile.empty() ? ClBlacklistFile - : BlacklistFile), - ZeroBaseShadow(ZeroBaseShadow) {} + : BlacklistFile) {} bool runOnModule(Module &M); static char ID; // Pass identification, replacement for typeid virtual const char *getPassName() const { @@ -376,13 +366,12 @@ class AddressSanitizerModule : public ModulePass { bool ShouldInstrumentGlobal(GlobalVariable *G); void createInitializerPoisonCalls(Module &M, GlobalValue *ModuleName); - size_t RedzoneSize() const { + size_t MinRedzoneSizeForGlobal() const { return RedzoneSizeForScale(Mapping.Scale); } bool CheckInitOrder; SmallString<64> BlacklistFile; - bool ZeroBaseShadow; OwningPtr BL; SetOfDynamicallyInitializedGlobals DynamicallyInitializedGlobals; @@ -416,7 +405,6 @@ struct FunctionStackPoisoner : public InstVisitor { SmallVector AllocaVec; SmallVector RetVec; - uint64_t TotalStackSize; unsigned StackAlignment; Function *AsanStackMallocFunc[kMaxAsanStackMallocSizeClass + 1], @@ -426,6 +414,7 @@ struct FunctionStackPoisoner : public InstVisitor { // Stores a place and arguments of poisoning/unpoisoning call for alloca. struct AllocaPoisonCall { IntrinsicInst *InsBefore; + AllocaInst *AI; uint64_t Size; bool DoPoison; }; @@ -439,7 +428,7 @@ struct FunctionStackPoisoner : public InstVisitor { : F(F), ASan(ASan), DIB(*F.getParent()), C(ASan.C), IntptrTy(ASan.IntptrTy), IntptrPtrTy(PointerType::get(IntptrTy, 0)), Mapping(ASan.Mapping), - TotalStackSize(0), StackAlignment(1 << Mapping.Scale) {} + StackAlignment(1 << Mapping.Scale) {} bool runOnFunction() { if (!ClStack) return false; @@ -478,8 +467,6 @@ struct FunctionStackPoisoner : public InstVisitor { StackAlignment = std::max(StackAlignment, AI.getAlignment()); AllocaVec.push_back(&AI); - uint64_t AlignedSize = getAlignedAllocaSize(&AI); - TotalStackSize += AlignedSize; } /// \brief Collect lifetime intrinsic calls to check for use-after-scope @@ -504,7 +491,7 @@ struct FunctionStackPoisoner : public InstVisitor { AllocaInst *AI = findAllocaForValue(II.getArgOperand(1)); if (!AI) return; bool DoPoison = (ID == Intrinsic::lifetime_end); - AllocaPoisonCall APC = {&II, SizeValue, DoPoison}; + AllocaPoisonCall APC = {&II, AI, SizeValue, DoPoison}; AllocaPoisonCallVec.push_back(APC); } @@ -513,31 +500,20 @@ struct FunctionStackPoisoner : public InstVisitor { // Check if we want (and can) handle this alloca. bool isInterestingAlloca(AllocaInst &AI) const { - return (!AI.isArrayAllocation() && - AI.isStaticAlloca() && - AI.getAlignment() <= RedzoneSize() && - AI.getAllocatedType()->isSized()); + return (!AI.isArrayAllocation() && AI.isStaticAlloca() && + AI.getAllocatedType()->isSized() && + // alloca() may be called with 0 size, ignore it. + getAllocaSizeInBytes(&AI) > 0); } - size_t RedzoneSize() const { - return RedzoneSizeForScale(Mapping.Scale); - } uint64_t getAllocaSizeInBytes(AllocaInst *AI) const { Type *Ty = AI->getAllocatedType(); uint64_t SizeInBytes = ASan.TD->getTypeAllocSize(Ty); return SizeInBytes; } - uint64_t getAlignedSize(uint64_t SizeInBytes) const { - size_t RZ = RedzoneSize(); - return ((SizeInBytes + RZ - 1) / RZ) * RZ; - } - uint64_t getAlignedAllocaSize(AllocaInst *AI) const { - uint64_t SizeInBytes = getAllocaSizeInBytes(AI); - return getAlignedSize(SizeInBytes); - } /// Finds alloca where the value comes from. AllocaInst *findAllocaForValue(Value *V); - void poisonRedZones(const ArrayRef &AllocaVec, IRBuilder<> &IRB, + void poisonRedZones(const ArrayRef ShadowBytes, IRBuilder<> &IRB, Value *ShadowBase, bool DoPoison); void poisonAlloca(Value *V, uint64_t Size, IRBuilder<> &IRB, bool DoPoison); @@ -553,9 +529,9 @@ INITIALIZE_PASS(AddressSanitizer, "asan", false, false) FunctionPass *llvm::createAddressSanitizerFunctionPass( bool CheckInitOrder, bool CheckUseAfterReturn, bool CheckLifetime, - StringRef BlacklistFile, bool ZeroBaseShadow) { + StringRef BlacklistFile) { return new AddressSanitizer(CheckInitOrder, CheckUseAfterReturn, - CheckLifetime, BlacklistFile, ZeroBaseShadow); + CheckLifetime, BlacklistFile); } char AddressSanitizerModule::ID = 0; @@ -563,9 +539,8 @@ INITIALIZE_PASS(AddressSanitizerModule, "asan-module", "AddressSanitizer: detects use-after-free and out-of-bounds bugs." "ModulePass", false, false) ModulePass *llvm::createAddressSanitizerModulePass( - bool CheckInitOrder, StringRef BlacklistFile, bool ZeroBaseShadow) { - return new AddressSanitizerModule(CheckInitOrder, BlacklistFile, - ZeroBaseShadow); + bool CheckInitOrder, StringRef BlacklistFile) { + return new AddressSanitizerModule(CheckInitOrder, BlacklistFile); } static size_t TypeSizeToSizeIndex(uint32_t TypeSize) { @@ -575,12 +550,22 @@ static size_t TypeSizeToSizeIndex(uint32_t TypeSize) { } // \brief Create a constant for Str so that we can pass it to the run-time lib. -static GlobalVariable *createPrivateGlobalForString(Module &M, StringRef Str) { +static GlobalVariable *createPrivateGlobalForString( + Module &M, StringRef Str, bool AllowMerging) { Constant *StrConst = ConstantDataArray::getString(M.getContext(), Str); - GlobalVariable *GV = new GlobalVariable(M, StrConst->getType(), true, - GlobalValue::InternalLinkage, StrConst, - kAsanGenPrefix); - GV->setUnnamedAddr(true); // Ok to merge these. + // For module-local strings that can be merged with another one we set the + // private linkage and the unnamed_addr attribute. + // Non-mergeable strings are made linker_private to remove them from the + // symbol table. "private" linkage doesn't work for Darwin, where the + // "L"-prefixed globals end up in __TEXT,__const section + // (see http://llvm.org/bugs/show_bug.cgi?id=17976 for more info). + GlobalValue::LinkageTypes linkage = + AllowMerging ? GlobalValue::PrivateLinkage + : GlobalValue::LinkerPrivateLinkage; + GlobalVariable *GV = + new GlobalVariable(M, StrConst->getType(), true, + linkage, StrConst, kAsanGenPrefix); + if (AllowMerging) GV->setUnnamedAddr(true); GV->setAlignment(1); // Strings may not be merged w/o setting align 1. return GV; } @@ -634,7 +619,7 @@ bool AddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI) { Value *Cmp = IRB.CreateICmpNE(Length, Constant::getNullValue(Length->getType())); - InsertBefore = SplitBlockAndInsertIfThen(cast(Cmp), false); + InsertBefore = SplitBlockAndInsertIfThen(Cmp, InsertBefore, false); } instrumentMemIntrinsicParam(MI, Dst, Length, InsertBefore, true); @@ -797,7 +782,7 @@ void AddressSanitizer::instrumentAddress(Instruction *OrigIns, if (ClAlwaysSlowPath || (TypeSize < 8 * Granularity)) { TerminatorInst *CheckTerm = - SplitBlockAndInsertIfThen(cast(Cmp), false); + SplitBlockAndInsertIfThen(Cmp, InsertBefore, false); assert(dyn_cast(CheckTerm)->isUnconditional()); BasicBlock *NextBB = CheckTerm->getSuccessor(0); IRB.SetInsertPoint(CheckTerm); @@ -808,7 +793,7 @@ void AddressSanitizer::instrumentAddress(Instruction *OrigIns, BranchInst *NewTerm = BranchInst::Create(CrashBlock, NextBB, Cmp2); ReplaceInstWithInst(CheckTerm, NewTerm); } else { - CrashTerm = SplitBlockAndInsertIfThen(cast(Cmp), true); + CrashTerm = SplitBlockAndInsertIfThen(Cmp, InsertBefore, true); } Instruction *Crash = generateCrashCode( @@ -860,8 +845,8 @@ bool AddressSanitizerModule::ShouldInstrumentGlobal(GlobalVariable *G) { // - Need to poison all copies, not just the main thread's one. if (G->isThreadLocal()) return false; - // For now, just ignore this Alloca if the alignment is large. - if (G->getAlignment() > RedzoneSize()) return false; + // For now, just ignore this Global if the alignment is large. + if (G->getAlignment() > MinRedzoneSizeForGlobal()) return false; // Ignore all the globals with the names starting with "\01L_OBJC_". // Many of those are put into the .cstring section. The linker compresses @@ -933,7 +918,7 @@ bool AddressSanitizerModule::runOnModule(Module &M) { C = &(M.getContext()); int LongSize = TD->getPointerSizeInBits(); IntptrTy = Type::getIntNTy(*C, LongSize); - Mapping = getShadowMapping(M, LongSize, ZeroBaseShadow); + Mapping = getShadowMapping(M, LongSize); initializeCallbacks(M); DynamicallyInitializedGlobals.Init(M); @@ -967,11 +952,10 @@ bool AddressSanitizerModule::runOnModule(Module &M) { bool HasDynamicallyInitializedGlobals = false; - GlobalVariable *ModuleName = createPrivateGlobalForString( - M, M.getModuleIdentifier()); // We shouldn't merge same module names, as this string serves as unique // module ID in runtime. - ModuleName->setUnnamedAddr(false); + GlobalVariable *ModuleName = createPrivateGlobalForString( + M, M.getModuleIdentifier(), /*AllowMerging*/false); for (size_t i = 0; i < n; i++) { static const uint64_t kMaxGlobalRedzone = 1 << 18; @@ -979,7 +963,7 @@ bool AddressSanitizerModule::runOnModule(Module &M) { PointerType *PtrTy = cast(G->getType()); Type *Ty = PtrTy->getElementType(); uint64_t SizeInBytes = TD->getTypeAllocSize(Ty); - uint64_t MinRZ = RedzoneSize(); + uint64_t MinRZ = MinRedzoneSizeForGlobal(); // MinRZ <= RZ <= kMaxGlobalRedzone // and trying to make RZ to be ~ 1/4 of SizeInBytes. uint64_t RZ = std::max(MinRZ, @@ -1002,7 +986,8 @@ bool AddressSanitizerModule::runOnModule(Module &M) { NewTy, G->getInitializer(), Constant::getNullValue(RightRedZoneTy), NULL); - GlobalVariable *Name = createPrivateGlobalForString(M, G->getName()); + GlobalVariable *Name = + createPrivateGlobalForString(M, G->getName(), /*AllowMerging*/true); // Create a new global variable with enough space for a redzone. GlobalValue::LinkageTypes Linkage = G->getLinkage(); @@ -1140,7 +1125,7 @@ bool AddressSanitizer::doInitialization(Module &M) { AsanInitFunction->setLinkage(Function::ExternalLinkage); IRB.CreateCall(AsanInitFunction); - Mapping = getShadowMapping(M, LongSize, ZeroBaseShadow); + Mapping = getShadowMapping(M, LongSize); emitShadowMapping(M, IRB); appendToGlobalCtors(M, AsanCtorFunction, kAsanCtorAndCtorPriority); @@ -1184,16 +1169,28 @@ bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(Function &F) { // b) collect usage statistics to help improve Clang coverage design. bool AddressSanitizer::InjectCoverage(Function &F) { if (!ClCoverage) return false; - IRBuilder<> IRB(F.getEntryBlock().getFirstInsertionPt()); + + // Skip static allocas at the top of the entry block so they don't become + // dynamic when we split the block. If we used our optimized stack layout, + // then there will only be one alloca and it will come first. + BasicBlock &Entry = F.getEntryBlock(); + BasicBlock::iterator IP = Entry.getFirstInsertionPt(), BE = Entry.end(); + for (; IP != BE; ++IP) { + AllocaInst *AI = dyn_cast(IP); + if (!AI || !AI->isStaticAlloca()) + break; + } + + IRBuilder<> IRB(IP); Type *Int8Ty = IRB.getInt8Ty(); GlobalVariable *Guard = new GlobalVariable( - *F.getParent(), Int8Ty, false, F.getLinkage(), + *F.getParent(), Int8Ty, false, GlobalValue::PrivateLinkage, Constant::getNullValue(Int8Ty), "__asan_gen_cov_" + F.getName()); LoadInst *Load = IRB.CreateLoad(Guard); Load->setAtomic(Monotonic); Load->setAlignment(1); Value *Cmp = IRB.CreateICmpEQ(Constant::getNullValue(Int8Ty), Load); - Instruction *Ins = SplitBlockAndInsertIfThen(cast(Cmp), false); + Instruction *Ins = SplitBlockAndInsertIfThen(Cmp, IP, false); IRB.SetInsertPoint(Ins); // We pass &F to __sanitizer_cov. We could avoid this and rely on // GET_CALLER_PC, but having the PC of the first instruction is just nice. @@ -1322,32 +1319,6 @@ bool AddressSanitizer::runOnFunction(Function &F) { return res; } -static uint64_t ValueForPoison(uint64_t PoisonByte, size_t ShadowRedzoneSize) { - if (ShadowRedzoneSize == 1) return PoisonByte; - if (ShadowRedzoneSize == 2) return (PoisonByte << 8) + PoisonByte; - if (ShadowRedzoneSize == 4) - return (PoisonByte << 24) + (PoisonByte << 16) + - (PoisonByte << 8) + (PoisonByte); - llvm_unreachable("ShadowRedzoneSize is either 1, 2 or 4"); -} - -static void PoisonShadowPartialRightRedzone(uint8_t *Shadow, - size_t Size, - size_t RZSize, - size_t ShadowGranularity, - uint8_t Magic) { - for (size_t i = 0; i < RZSize; - i+= ShadowGranularity, Shadow++) { - if (i + ShadowGranularity <= Size) { - *Shadow = 0; // fully addressable - } else if (i >= Size) { - *Shadow = Magic; // unaddressable - } else { - *Shadow = Size - i; // first Size-i bytes are addressable - } - } -} - // Workaround for bug 11395: we don't want to instrument stack in functions // with large assembly blobs (32-bit only), otherwise reg alloc may crash. // FIXME: remove once the bug 11395 is fixed. @@ -1377,65 +1348,31 @@ void FunctionStackPoisoner::initializeCallbacks(Module &M) { kAsanUnpoisonStackMemoryName, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL)); } -void FunctionStackPoisoner::poisonRedZones( - const ArrayRef &AllocaVec, IRBuilder<> &IRB, Value *ShadowBase, - bool DoPoison) { - size_t ShadowRZSize = RedzoneSize() >> Mapping.Scale; - assert(ShadowRZSize >= 1 && ShadowRZSize <= 4); - Type *RZTy = Type::getIntNTy(*C, ShadowRZSize * 8); - Type *RZPtrTy = PointerType::get(RZTy, 0); - - Value *PoisonLeft = ConstantInt::get(RZTy, - ValueForPoison(DoPoison ? kAsanStackLeftRedzoneMagic : 0LL, ShadowRZSize)); - Value *PoisonMid = ConstantInt::get(RZTy, - ValueForPoison(DoPoison ? kAsanStackMidRedzoneMagic : 0LL, ShadowRZSize)); - Value *PoisonRight = ConstantInt::get(RZTy, - ValueForPoison(DoPoison ? kAsanStackRightRedzoneMagic : 0LL, ShadowRZSize)); - - // poison the first red zone. - IRB.CreateStore(PoisonLeft, IRB.CreateIntToPtr(ShadowBase, RZPtrTy)); - - // poison all other red zones. - uint64_t Pos = RedzoneSize(); - for (size_t i = 0, n = AllocaVec.size(); i < n; i++) { - AllocaInst *AI = AllocaVec[i]; - uint64_t SizeInBytes = getAllocaSizeInBytes(AI); - uint64_t AlignedSize = getAlignedAllocaSize(AI); - assert(AlignedSize - SizeInBytes < RedzoneSize()); - Value *Ptr = NULL; - - Pos += AlignedSize; - - assert(ShadowBase->getType() == IntptrTy); - if (SizeInBytes < AlignedSize) { - // Poison the partial redzone at right - Ptr = IRB.CreateAdd( - ShadowBase, ConstantInt::get(IntptrTy, - (Pos >> Mapping.Scale) - ShadowRZSize)); - size_t AddressableBytes = RedzoneSize() - (AlignedSize - SizeInBytes); - uint32_t Poison = 0; - if (DoPoison) { - PoisonShadowPartialRightRedzone((uint8_t*)&Poison, AddressableBytes, - RedzoneSize(), - 1ULL << Mapping.Scale, - kAsanStackPartialRedzoneMagic); - Poison = - ASan.TD->isLittleEndian() - ? support::endian::byte_swap(Poison) - : support::endian::byte_swap(Poison); +void +FunctionStackPoisoner::poisonRedZones(const ArrayRef ShadowBytes, + IRBuilder<> &IRB, Value *ShadowBase, + bool DoPoison) { + size_t n = ShadowBytes.size(); + size_t i = 0; + // We need to (un)poison n bytes of stack shadow. Poison as many as we can + // using 64-bit stores (if we are on 64-bit arch), then poison the rest + // with 32-bit stores, then with 16-byte stores, then with 8-byte stores. + for (size_t LargeStoreSizeInBytes = ASan.LongSize / 8; + LargeStoreSizeInBytes != 0; LargeStoreSizeInBytes /= 2) { + for (; i + LargeStoreSizeInBytes - 1 < n; i += LargeStoreSizeInBytes) { + uint64_t Val = 0; + for (size_t j = 0; j < LargeStoreSizeInBytes; j++) { + if (ASan.TD->isLittleEndian()) + Val |= (uint64_t)ShadowBytes[i + j] << (8 * j); + else + Val = (Val << 8) | ShadowBytes[i + j]; } - Value *PartialPoison = ConstantInt::get(RZTy, Poison); - IRB.CreateStore(PartialPoison, IRB.CreateIntToPtr(Ptr, RZPtrTy)); + if (!Val) continue; + Value *Ptr = IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i)); + Type *StoreTy = Type::getIntNTy(*C, LargeStoreSizeInBytes * 8); + Value *Poison = ConstantInt::get(StoreTy, DoPoison ? Val : 0); + IRB.CreateStore(Poison, IRB.CreateIntToPtr(Ptr, StoreTy->getPointerTo())); } - - // Poison the full redzone at right. - Ptr = IRB.CreateAdd(ShadowBase, - ConstantInt::get(IntptrTy, Pos >> Mapping.Scale)); - bool LastAlloca = (i == AllocaVec.size() - 1); - Value *Poison = LastAlloca ? PoisonRight : PoisonMid; - IRB.CreateStore(Poison, IRB.CreateIntToPtr(Ptr, RZPtrTy)); - - Pos += RedzoneSize(); } } @@ -1467,24 +1404,37 @@ void FunctionStackPoisoner::SetShadowToStackAfterReturnInlined( } void FunctionStackPoisoner::poisonStack() { - uint64_t LocalStackSize = TotalStackSize + - (AllocaVec.size() + 1) * RedzoneSize(); - - bool DoStackMalloc = ASan.CheckUseAfterReturn - && LocalStackSize <= kMaxStackMallocSize; int StackMallocIdx = -1; assert(AllocaVec.size() > 0); Instruction *InsBefore = AllocaVec[0]; IRBuilder<> IRB(InsBefore); + SmallVector SVD; + SVD.reserve(AllocaVec.size()); + for (size_t i = 0, n = AllocaVec.size(); i < n; i++) { + AllocaInst *AI = AllocaVec[i]; + ASanStackVariableDescription D = { AI->getName().data(), + getAllocaSizeInBytes(AI), + AI->getAlignment(), AI, 0}; + SVD.push_back(D); + } + // Minimal header size (left redzone) is 4 pointers, + // i.e. 32 bytes on 64-bit platforms and 16 bytes in 32-bit platforms. + size_t MinHeaderSize = ASan.LongSize / 2; + ASanStackFrameLayout L; + ComputeASanStackFrameLayout(SVD, 1UL << Mapping.Scale, MinHeaderSize, &L); + DEBUG(dbgs() << L.DescriptionString << " --- " << L.FrameSize << "\n"); + uint64_t LocalStackSize = L.FrameSize; + bool DoStackMalloc = + ASan.CheckUseAfterReturn && LocalStackSize <= kMaxStackMallocSize; Type *ByteArrayTy = ArrayType::get(IRB.getInt8Ty(), LocalStackSize); AllocaInst *MyAlloca = new AllocaInst(ByteArrayTy, "MyAlloca", InsBefore); - if (ClRealignStack && StackAlignment < RedzoneSize()) - StackAlignment = RedzoneSize(); - MyAlloca->setAlignment(StackAlignment); + assert((ClRealignStack & (ClRealignStack - 1)) == 0); + size_t FrameAlignment = std::max(L.FrameAlignment, (size_t)ClRealignStack); + MyAlloca->setAlignment(FrameAlignment); assert(MyAlloca->isStaticAlloca()); Value *OrigStackBase = IRB.CreatePointerCast(MyAlloca, IntptrTy); Value *LocalStackBase = OrigStackBase; @@ -1499,8 +1449,7 @@ void FunctionStackPoisoner::poisonStack() { kAsanOptionDetectUAR, IRB.getInt32Ty()); Value *Cmp = IRB.CreateICmpNE(IRB.CreateLoad(OptionDetectUAR), Constant::getNullValue(IRB.getInt32Ty())); - Instruction *Term = - SplitBlockAndInsertIfThen(cast(Cmp), false); + Instruction *Term = SplitBlockAndInsertIfThen(Cmp, InsBefore, false); BasicBlock *CmpBlock = cast(Cmp)->getParent(); IRBuilder<> IRBIf(Term); LocalStackBase = IRBIf.CreateCall2( @@ -1514,41 +1463,27 @@ void FunctionStackPoisoner::poisonStack() { LocalStackBase = Phi; } - // This string will be parsed by the run-time (DescribeAddressIfStack). - SmallString<2048> StackDescriptionStorage; - raw_svector_ostream StackDescription(StackDescriptionStorage); - StackDescription << AllocaVec.size() << " "; - // Insert poison calls for lifetime intrinsics for alloca. bool HavePoisonedAllocas = false; for (size_t i = 0, n = AllocaPoisonCallVec.size(); i < n; i++) { const AllocaPoisonCall &APC = AllocaPoisonCallVec[i]; - IntrinsicInst *II = APC.InsBefore; - AllocaInst *AI = findAllocaForValue(II->getArgOperand(1)); - assert(AI); - IRBuilder<> IRB(II); - poisonAlloca(AI, APC.Size, IRB, APC.DoPoison); + assert(APC.InsBefore); + assert(APC.AI); + IRBuilder<> IRB(APC.InsBefore); + poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison); HavePoisonedAllocas |= APC.DoPoison; } - uint64_t Pos = RedzoneSize(); // Replace Alloca instructions with base+offset. - for (size_t i = 0, n = AllocaVec.size(); i < n; i++) { - AllocaInst *AI = AllocaVec[i]; - uint64_t SizeInBytes = getAllocaSizeInBytes(AI); - StringRef Name = AI->getName(); - StackDescription << Pos << " " << SizeInBytes << " " - << Name.size() << " " << Name << " "; - uint64_t AlignedSize = getAlignedAllocaSize(AI); - assert((AlignedSize % RedzoneSize()) == 0); + for (size_t i = 0, n = SVD.size(); i < n; i++) { + AllocaInst *AI = SVD[i].AI; Value *NewAllocaPtr = IRB.CreateIntToPtr( - IRB.CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy, Pos)), - AI->getType()); + IRB.CreateAdd(LocalStackBase, + ConstantInt::get(IntptrTy, SVD[i].Offset)), + AI->getType()); replaceDbgDeclareForAlloca(AI, NewAllocaPtr, DIB); AI->replaceAllUsesWith(NewAllocaPtr); - Pos += AlignedSize + RedzoneSize(); } - assert(Pos == LocalStackSize); // The left-most redzone has enough space for at least 4 pointers. // Write the Magic value to redzone[0]. @@ -1560,7 +1495,8 @@ void FunctionStackPoisoner::poisonStack() { IRB.CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy, ASan.LongSize/8)), IntptrPtrTy); GlobalVariable *StackDescriptionGlobal = - createPrivateGlobalForString(*F.getParent(), StackDescription.str()); + createPrivateGlobalForString(*F.getParent(), L.DescriptionString, + /*AllowMerging*/true); Value *Description = IRB.CreatePointerCast(StackDescriptionGlobal, IntptrTy); IRB.CreateStore(Description, BasePlus1); @@ -1573,30 +1509,33 @@ void FunctionStackPoisoner::poisonStack() { // Poison the stack redzones at the entry. Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB); - poisonRedZones(AllocaVec, IRB, ShadowBase, true); + poisonRedZones(L.ShadowBytes, IRB, ShadowBase, true); - // Unpoison the stack before all ret instructions. + // (Un)poison the stack before all ret instructions. for (size_t i = 0, n = RetVec.size(); i < n; i++) { Instruction *Ret = RetVec[i]; IRBuilder<> IRBRet(Ret); // Mark the current frame as retired. IRBRet.CreateStore(ConstantInt::get(IntptrTy, kRetiredStackFrameMagic), BasePlus0); - // Unpoison the stack. - poisonRedZones(AllocaVec, IRBRet, ShadowBase, false); if (DoStackMalloc) { assert(StackMallocIdx >= 0); - // In use-after-return mode, mark the whole stack frame unaddressable. + // if LocalStackBase != OrigStackBase: + // // In use-after-return mode, poison the whole stack frame. + // if StackMallocIdx <= 4 + // // For small sizes inline the whole thing: + // memset(ShadowBase, kAsanStackAfterReturnMagic, ShadowSize); + // **SavedFlagPtr(LocalStackBase) = 0 + // else + // __asan_stack_free_N(LocalStackBase, OrigStackBase) + // else + // + Value *Cmp = IRBRet.CreateICmpNE(LocalStackBase, OrigStackBase); + TerminatorInst *ThenTerm, *ElseTerm; + SplitBlockAndInsertIfThenElse(Cmp, Ret, &ThenTerm, &ElseTerm); + + IRBuilder<> IRBPoison(ThenTerm); if (StackMallocIdx <= 4) { - // For small sizes inline the whole thing: - // if LocalStackBase != OrigStackBase: - // memset(ShadowBase, kAsanStackAfterReturnMagic, ShadowSize); - // **SavedFlagPtr(LocalStackBase) = 0 - // FIXME: if LocalStackBase != OrigStackBase don't call poisonRedZones. - Value *Cmp = IRBRet.CreateICmpNE(LocalStackBase, OrigStackBase); - TerminatorInst *PoisonTerm = - SplitBlockAndInsertIfThen(cast(Cmp), false); - IRBuilder<> IRBPoison(PoisonTerm); int ClassSize = kMinStackMallocSize << StackMallocIdx; SetShadowToStackAfterReturnInlined(IRBPoison, ShadowBase, ClassSize >> Mapping.Scale); @@ -1610,15 +1549,20 @@ void FunctionStackPoisoner::poisonStack() { IRBPoison.CreateIntToPtr(SavedFlagPtr, IRBPoison.getInt8PtrTy())); } else { // For larger frames call __asan_stack_free_*. - IRBRet.CreateCall3(AsanStackFreeFunc[StackMallocIdx], LocalStackBase, - ConstantInt::get(IntptrTy, LocalStackSize), - OrigStackBase); + IRBPoison.CreateCall3(AsanStackFreeFunc[StackMallocIdx], LocalStackBase, + ConstantInt::get(IntptrTy, LocalStackSize), + OrigStackBase); } + + IRBuilder<> IRBElse(ElseTerm); + poisonRedZones(L.ShadowBytes, IRBElse, ShadowBase, false); } else if (HavePoisonedAllocas) { // If we poisoned some allocas in llvm.lifetime analysis, // unpoison whole stack frame now. assert(LocalStackBase == OrigStackBase); poisonAlloca(LocalStackBase, LocalStackSize, IRBRet, false); + } else { + poisonRedZones(L.ShadowBytes, IRBRet, ShadowBase, false); } }