1 //===- DeadStoreElimination.cpp - Fast Dead Store Elimination -------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements a trivial dead store elimination that only considers
11 // basic-block local redundant stores.
13 // FIXME: This should eventually be extended to be a post-dominator tree
14 // traversal. Doing so would be pretty trivial.
16 //===----------------------------------------------------------------------===//
18 #define DEBUG_TYPE "dse"
19 #include "llvm/Transforms/Scalar.h"
20 #include "llvm/Constants.h"
21 #include "llvm/Function.h"
22 #include "llvm/Instructions.h"
23 #include "llvm/IntrinsicInst.h"
24 #include "llvm/Pass.h"
25 #include "llvm/ADT/SmallPtrSet.h"
26 #include "llvm/ADT/Statistic.h"
27 #include "llvm/Analysis/AliasAnalysis.h"
28 #include "llvm/Analysis/Dominators.h"
29 #include "llvm/Analysis/MemoryBuiltins.h"
30 #include "llvm/Analysis/MemoryDependenceAnalysis.h"
31 #include "llvm/Target/TargetData.h"
32 #include "llvm/Transforms/Utils/Local.h"
35 STATISTIC(NumFastStores, "Number of stores deleted");
36 STATISTIC(NumFastOther , "Number of other instrs removed");
39 struct DSE : public FunctionPass {
42 static char ID; // Pass identification, replacement for typeid
43 DSE() : FunctionPass(&ID) {}
45 virtual bool runOnFunction(Function &F) {
48 DominatorTree &DT = getAnalysis<DominatorTree>();
50 for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I)
51 // Only check non-dead blocks. Dead blocks may have strange pointer
52 // cycles that will confuse alias analysis.
53 if (DT.isReachableFromEntry(I))
54 Changed |= runOnBasicBlock(*I);
58 bool runOnBasicBlock(BasicBlock &BB);
59 bool handleFreeWithNonTrivialDependency(Instruction *F, MemDepResult Dep);
60 bool handleEndBlock(BasicBlock &BB);
61 bool RemoveUndeadPointers(Value *Ptr, uint64_t killPointerSize,
62 BasicBlock::iterator &BBI,
63 SmallPtrSet<Value*, 64> &deadPointers);
64 void DeleteDeadInstruction(Instruction *I,
65 SmallPtrSet<Value*, 64> *deadPointers = 0);
68 // getAnalysisUsage - We require post dominance frontiers (aka Control
70 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
72 AU.addRequired<DominatorTree>();
73 AU.addRequired<AliasAnalysis>();
74 AU.addRequired<MemoryDependenceAnalysis>();
75 AU.addPreserved<DominatorTree>();
76 AU.addPreserved<AliasAnalysis>();
77 AU.addPreserved<MemoryDependenceAnalysis>();
80 unsigned getPointerSize(Value *V) const;
85 static RegisterPass<DSE> X("dse", "Dead Store Elimination");
87 FunctionPass *llvm::createDeadStoreEliminationPass() { return new DSE(); }
89 /// doesClobberMemory - Does this instruction clobber (write without reading)
91 static bool doesClobberMemory(Instruction *I) {
92 if (isa<StoreInst>(I))
94 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
95 switch (II->getIntrinsicID()) {
98 case Intrinsic::memset:
99 case Intrinsic::memmove:
100 case Intrinsic::memcpy:
101 case Intrinsic::init_trampoline:
102 case Intrinsic::lifetime_end:
109 /// isElidable - If the value of this instruction and the memory it writes to is
110 /// unused, may we delete this instrtction?
111 static bool isElidable(Instruction *I) {
112 assert(doesClobberMemory(I));
113 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I))
114 return II->getIntrinsicID() != Intrinsic::lifetime_end;
115 if (StoreInst *SI = dyn_cast<StoreInst>(I))
116 return !SI->isVolatile();
120 /// getPointerOperand - Return the pointer that is being clobbered.
121 static Value *getPointerOperand(Instruction *I) {
122 assert(doesClobberMemory(I));
123 if (StoreInst *SI = dyn_cast<StoreInst>(I))
124 return SI->getPointerOperand();
125 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I))
126 return MI->getOperand(1);
128 switch (cast<IntrinsicInst>(I)->getIntrinsicID()) {
129 default: assert(false && "Unexpected intrinsic!");
130 case Intrinsic::init_trampoline:
131 return I->getOperand(1);
132 case Intrinsic::lifetime_end:
133 return I->getOperand(2);
137 /// getStoreSize - Return the length in bytes of the write by the clobbering
138 /// instruction. If variable or unknown, returns -1.
139 static unsigned getStoreSize(Instruction *I, const TargetData *TD) {
140 assert(doesClobberMemory(I));
141 if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
143 return TD->getTypeStoreSize(SI->getOperand(0)->getType());
147 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
148 Len = MI->getLength();
150 switch (cast<IntrinsicInst>(I)->getIntrinsicID()) {
151 default: assert(false && "Unexpected intrinsic!");
152 case Intrinsic::init_trampoline:
154 case Intrinsic::lifetime_end:
155 Len = I->getOperand(1);
159 if (ConstantInt *LenCI = dyn_cast<ConstantInt>(Len))
160 if (!LenCI->isAllOnesValue())
161 return LenCI->getZExtValue();
165 /// isStoreAtLeastAsWideAs - Return true if the size of the store in I1 is
166 /// greater than or equal to the store in I2. This returns false if we don't
169 static bool isStoreAtLeastAsWideAs(Instruction *I1, Instruction *I2,
170 const TargetData *TD) {
171 const Type *I1Ty = getPointerOperand(I1)->getType();
172 const Type *I2Ty = getPointerOperand(I2)->getType();
174 // Exactly the same type, must have exactly the same size.
175 if (I1Ty == I2Ty) return true;
177 int I1Size = getStoreSize(I1, TD);
178 int I2Size = getStoreSize(I2, TD);
180 return I1Size != -1 && I2Size != -1 && I1Size >= I2Size;
183 bool DSE::runOnBasicBlock(BasicBlock &BB) {
184 MemoryDependenceAnalysis &MD = getAnalysis<MemoryDependenceAnalysis>();
185 TD = getAnalysisIfAvailable<TargetData>();
187 bool MadeChange = false;
189 // Do a top-down walk on the BB.
190 for (BasicBlock::iterator BBI = BB.begin(), BBE = BB.end(); BBI != BBE; ) {
191 Instruction *Inst = BBI++;
193 // If we find a store or a free, get its memory dependence.
194 if (!doesClobberMemory(Inst) && !isFreeCall(Inst))
197 MemDepResult InstDep = MD.getDependency(Inst);
199 // Ignore non-local stores.
200 // FIXME: cross-block DSE would be fun. :)
201 if (InstDep.isNonLocal()) continue;
203 // Handle frees whose dependencies are non-trivial.
204 if (isFreeCall(Inst)) {
205 MadeChange |= handleFreeWithNonTrivialDependency(Inst, InstDep);
209 // If not a definite must-alias dependency, ignore it.
210 if (!InstDep.isDef())
213 // If this is a store-store dependence, then the previous store is dead so
214 // long as this store is at least as big as it.
215 if (doesClobberMemory(InstDep.getInst())) {
216 Instruction *DepStore = InstDep.getInst();
217 if (isStoreAtLeastAsWideAs(Inst, DepStore, TD) &&
218 isElidable(DepStore)) {
219 // Delete the store and now-dead instructions that feed it.
220 DeleteDeadInstruction(DepStore);
224 // DeleteDeadInstruction can delete the current instruction in loop
227 if (BBI != BB.begin())
233 if (!isElidable(Inst))
236 // If we're storing the same value back to a pointer that we just
237 // loaded from, then the store can be removed.
238 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
239 if (LoadInst *DepLoad = dyn_cast<LoadInst>(InstDep.getInst())) {
240 if (SI->getPointerOperand() == DepLoad->getPointerOperand() &&
241 SI->getOperand(0) == DepLoad) {
242 // DeleteDeadInstruction can delete the current instruction. Save BBI
243 // in case we need it.
244 WeakVH NextInst(BBI);
246 DeleteDeadInstruction(SI);
248 if (NextInst == 0) // Next instruction deleted.
250 else if (BBI != BB.begin()) // Revisit this instruction if possible.
259 // If this is a lifetime end marker, we can throw away the store.
260 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(InstDep.getInst())) {
261 if (II->getIntrinsicID() == Intrinsic::lifetime_end) {
262 // Delete the store and now-dead instructions that feed it.
263 // DeleteDeadInstruction can delete the current instruction. Save BBI
264 // in case we need it.
265 WeakVH NextInst(BBI);
267 DeleteDeadInstruction(Inst);
269 if (NextInst == 0) // Next instruction deleted.
271 else if (BBI != BB.begin()) // Revisit this instruction if possible.
280 // If this block ends in a return, unwind, or unreachable, all allocas are
281 // dead at its end, which means stores to them are also dead.
282 if (BB.getTerminator()->getNumSuccessors() == 0)
283 MadeChange |= handleEndBlock(BB);
288 /// handleFreeWithNonTrivialDependency - Handle frees of entire structures whose
289 /// dependency is a store to a field of that structure.
290 bool DSE::handleFreeWithNonTrivialDependency(Instruction *F, MemDepResult Dep) {
291 AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
293 Instruction *Dependency = Dep.getInst();
294 if (!Dependency || !doesClobberMemory(Dependency) || !isElidable(Dependency))
297 Value *DepPointer = getPointerOperand(Dependency)->getUnderlyingObject();
299 // Check for aliasing.
300 if (AA.alias(F->getOperand(1), 1, DepPointer, 1) !=
301 AliasAnalysis::MustAlias)
304 // DCE instructions only used to calculate that store
305 DeleteDeadInstruction(Dependency);
310 /// handleEndBlock - Remove dead stores to stack-allocated locations in the
311 /// function end block. Ex:
314 /// store i32 1, i32* %A
316 bool DSE::handleEndBlock(BasicBlock &BB) {
317 AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
319 bool MadeChange = false;
321 // Pointers alloca'd in this function are dead in the end block
322 SmallPtrSet<Value*, 64> deadPointers;
324 // Find all of the alloca'd pointers in the entry block.
325 BasicBlock *Entry = BB.getParent()->begin();
326 for (BasicBlock::iterator I = Entry->begin(), E = Entry->end(); I != E; ++I)
327 if (AllocaInst *AI = dyn_cast<AllocaInst>(I))
328 deadPointers.insert(AI);
330 // Treat byval arguments the same, stores to them are dead at the end of the
332 for (Function::arg_iterator AI = BB.getParent()->arg_begin(),
333 AE = BB.getParent()->arg_end(); AI != AE; ++AI)
334 if (AI->hasByValAttr())
335 deadPointers.insert(AI);
337 // Scan the basic block backwards
338 for (BasicBlock::iterator BBI = BB.end(); BBI != BB.begin(); ){
341 // If we find a store whose pointer is dead.
342 if (doesClobberMemory(BBI)) {
343 if (isElidable(BBI)) {
344 // See through pointer-to-pointer bitcasts
345 Value *pointerOperand = getPointerOperand(BBI)->getUnderlyingObject();
347 // Alloca'd pointers or byval arguments (which are functionally like
348 // alloca's) are valid candidates for removal.
349 if (deadPointers.count(pointerOperand)) {
350 // DCE instructions only used to calculate that store.
351 Instruction *Dead = BBI;
353 DeleteDeadInstruction(Dead, &deadPointers);
360 // Because a memcpy or memmove is also a load, we can't skip it if we
362 if (!isa<MemTransferInst>(BBI))
366 Value *killPointer = 0;
367 uint64_t killPointerSize = ~0UL;
369 // If we encounter a use of the pointer, it is no longer considered dead
370 if (LoadInst *L = dyn_cast<LoadInst>(BBI)) {
371 // However, if this load is unused and not volatile, we can go ahead and
372 // remove it, and not have to worry about it making our pointer undead!
373 if (L->use_empty() && !L->isVolatile()) {
375 DeleteDeadInstruction(L, &deadPointers);
381 killPointer = L->getPointerOperand();
382 } else if (VAArgInst *V = dyn_cast<VAArgInst>(BBI)) {
383 killPointer = V->getOperand(0);
384 } else if (isa<MemTransferInst>(BBI) &&
385 isa<ConstantInt>(cast<MemTransferInst>(BBI)->getLength())) {
386 killPointer = cast<MemTransferInst>(BBI)->getSource();
387 killPointerSize = cast<ConstantInt>(
388 cast<MemTransferInst>(BBI)->getLength())->getZExtValue();
389 } else if (AllocaInst *A = dyn_cast<AllocaInst>(BBI)) {
390 deadPointers.erase(A);
392 // Dead alloca's can be DCE'd when we reach them
393 if (A->use_empty()) {
395 DeleteDeadInstruction(A, &deadPointers);
401 } else if (CallSite::get(BBI).getInstruction() != 0) {
402 // If this call does not access memory, it can't
403 // be undeadifying any of our pointers.
404 CallSite CS = CallSite::get(BBI);
405 if (AA.doesNotAccessMemory(CS))
411 // Remove any pointers made undead by the call from the dead set
412 std::vector<Value*> dead;
413 for (SmallPtrSet<Value*, 64>::iterator I = deadPointers.begin(),
414 E = deadPointers.end(); I != E; ++I) {
415 // HACK: if we detect that our AA is imprecise, it's not
416 // worth it to scan the rest of the deadPointers set. Just
417 // assume that the AA will return ModRef for everything, and
418 // go ahead and bail.
419 if (modRef >= 16 && other == 0) {
420 deadPointers.clear();
424 // See if the call site touches it
425 AliasAnalysis::ModRefResult A = AA.getModRefInfo(CS, *I,
428 if (A == AliasAnalysis::ModRef)
433 if (A == AliasAnalysis::ModRef || A == AliasAnalysis::Ref)
437 for (std::vector<Value*>::iterator I = dead.begin(), E = dead.end();
439 deadPointers.erase(*I);
442 } else if (isInstructionTriviallyDead(BBI)) {
443 // For any non-memory-affecting non-terminators, DCE them as we reach them
444 Instruction *Inst = BBI;
446 DeleteDeadInstruction(Inst, &deadPointers);
455 killPointer = killPointer->getUnderlyingObject();
457 // Deal with undead pointers
458 MadeChange |= RemoveUndeadPointers(killPointer, killPointerSize, BBI,
465 /// RemoveUndeadPointers - check for uses of a pointer that make it
466 /// undead when scanning for dead stores to alloca's.
467 bool DSE::RemoveUndeadPointers(Value *killPointer, uint64_t killPointerSize,
468 BasicBlock::iterator &BBI,
469 SmallPtrSet<Value*, 64> &deadPointers) {
470 AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
472 // If the kill pointer can be easily reduced to an alloca,
473 // don't bother doing extraneous AA queries.
474 if (deadPointers.count(killPointer)) {
475 deadPointers.erase(killPointer);
479 // A global can't be in the dead pointer set.
480 if (isa<GlobalValue>(killPointer))
483 bool MadeChange = false;
485 SmallVector<Value*, 16> undead;
487 for (SmallPtrSet<Value*, 64>::iterator I = deadPointers.begin(),
488 E = deadPointers.end(); I != E; ++I) {
489 // See if this pointer could alias it
490 AliasAnalysis::AliasResult A = AA.alias(*I, getPointerSize(*I),
491 killPointer, killPointerSize);
493 // If it must-alias and a store, we can delete it
494 if (isa<StoreInst>(BBI) && A == AliasAnalysis::MustAlias) {
495 StoreInst *S = cast<StoreInst>(BBI);
499 DeleteDeadInstruction(S, &deadPointers);
505 // Otherwise, it is undead
506 } else if (A != AliasAnalysis::NoAlias)
507 undead.push_back(*I);
510 for (SmallVector<Value*, 16>::iterator I = undead.begin(), E = undead.end();
512 deadPointers.erase(*I);
517 /// DeleteDeadInstruction - Delete this instruction. Before we do, go through
518 /// and zero out all the operands of this instruction. If any of them become
519 /// dead, delete them and the computation tree that feeds them.
521 /// If ValueSet is non-null, remove any deleted instructions from it as well.
523 void DSE::DeleteDeadInstruction(Instruction *I,
524 SmallPtrSet<Value*, 64> *ValueSet) {
525 SmallVector<Instruction*, 32> NowDeadInsts;
527 NowDeadInsts.push_back(I);
530 // Before we touch this instruction, remove it from memdep!
531 MemoryDependenceAnalysis &MDA = getAnalysis<MemoryDependenceAnalysis>();
533 Instruction *DeadInst = NowDeadInsts.pop_back_val();
537 // This instruction is dead, zap it, in stages. Start by removing it from
538 // MemDep, which needs to know the operands and needs it to be in the
540 MDA.removeInstruction(DeadInst);
542 for (unsigned op = 0, e = DeadInst->getNumOperands(); op != e; ++op) {
543 Value *Op = DeadInst->getOperand(op);
544 DeadInst->setOperand(op, 0);
546 // If this operand just became dead, add it to the NowDeadInsts list.
547 if (!Op->use_empty()) continue;
549 if (Instruction *OpI = dyn_cast<Instruction>(Op))
550 if (isInstructionTriviallyDead(OpI))
551 NowDeadInsts.push_back(OpI);
554 DeadInst->eraseFromParent();
556 if (ValueSet) ValueSet->erase(DeadInst);
557 } while (!NowDeadInsts.empty());
560 unsigned DSE::getPointerSize(Value *V) const {
562 if (AllocaInst *A = dyn_cast<AllocaInst>(V)) {
563 // Get size information for the alloca
564 if (ConstantInt *C = dyn_cast<ConstantInt>(A->getArraySize()))
565 return C->getZExtValue() * TD->getTypeAllocSize(A->getAllocatedType());
567 assert(isa<Argument>(V) && "Expected AllocaInst or Argument!");
568 const PointerType *PT = cast<PointerType>(V->getType());
569 return TD->getTypeAllocSize(PT->getElementType());