X-Git-Url: http://demsky.eecs.uci.edu/git/?a=blobdiff_plain;f=lib%2FTransforms%2FScalar%2FDeadStoreElimination.cpp;h=09c01d314124e65ff3c7767db50d7f99eeb93ee9;hb=040056fd11693ffc41ce9b777281c71705d0dc1f;hp=f9d1205ada558e2a6feef90bcdf9ce703a409069;hpb=c69ace3a64aee6f97bd82f0d811b89f49a3b38ce;p=oota-llvm.git diff --git a/lib/Transforms/Scalar/DeadStoreElimination.cpp b/lib/Transforms/Scalar/DeadStoreElimination.cpp index f9d1205ada5..09c01d31412 100644 --- a/lib/Transforms/Scalar/DeadStoreElimination.cpp +++ b/lib/Transforms/Scalar/DeadStoreElimination.cpp @@ -22,217 +22,289 @@ #include "llvm/Instructions.h" #include "llvm/IntrinsicInst.h" #include "llvm/Pass.h" -#include "llvm/ADT/SetVector.h" #include "llvm/ADT/SmallPtrSet.h" #include "llvm/ADT/Statistic.h" #include "llvm/Analysis/AliasAnalysis.h" +#include "llvm/Analysis/Dominators.h" +#include "llvm/Analysis/MemoryBuiltins.h" #include "llvm/Analysis/MemoryDependenceAnalysis.h" #include "llvm/Target/TargetData.h" #include "llvm/Transforms/Utils/Local.h" -#include "llvm/Support/Compiler.h" using namespace llvm; STATISTIC(NumFastStores, "Number of stores deleted"); STATISTIC(NumFastOther , "Number of other instrs removed"); namespace { - struct VISIBILITY_HIDDEN DSE : public FunctionPass { + struct DSE : public FunctionPass { + TargetData *TD; + static char ID; // Pass identification, replacement for typeid - DSE() : FunctionPass((intptr_t)&ID) {} + DSE() : FunctionPass(&ID) {} virtual bool runOnFunction(Function &F) { bool Changed = false; + + DominatorTree &DT = getAnalysis(); + for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I) - Changed |= runOnBasicBlock(*I); + // Only check non-dead blocks. Dead blocks may have strange pointer + // cycles that will confuse alias analysis. + if (DT.isReachableFromEntry(I)) + Changed |= runOnBasicBlock(*I); return Changed; } - + bool runOnBasicBlock(BasicBlock &BB); - bool handleFreeWithNonTrivialDependency(FreeInst* F, - Instruction* dependency, - SetVector& possiblyDead); - bool handleEndBlock(BasicBlock& BB, SetVector& possiblyDead); - bool RemoveUndeadPointers(Value* pointer, uint64_t killPointerSize, - BasicBlock::iterator& BBI, - SmallPtrSet& deadPointers, - SetVector& possiblyDead); - void DeleteDeadInstructionChains(Instruction *I, - SetVector &DeadInsts); + bool handleFreeWithNonTrivialDependency(Instruction *F, MemDepResult Dep); + bool handleEndBlock(BasicBlock &BB); + bool RemoveUndeadPointers(Value *Ptr, uint64_t killPointerSize, + BasicBlock::iterator &BBI, + SmallPtrSet &deadPointers); + void DeleteDeadInstruction(Instruction *I, + SmallPtrSet *deadPointers = 0); - /// Find the base pointer that a pointer came from - /// Because this is used to find pointers that originate - /// from allocas, it is safe to ignore GEP indices, since - /// either the store will be in the alloca, and thus dead, - /// or beyond the end of the alloca, and thus undefined. - void TranslatePointerBitCasts(Value*& v, bool zeroGepsOnly = false) { - assert(isa(v->getType()) && - "Translating a non-pointer type?"); - while (true) { - if (BitCastInst* C = dyn_cast(v)) - v = C->getOperand(0); - else if (GetElementPtrInst* G = dyn_cast(v)) - if (!zeroGepsOnly || G->hasAllZeroIndices()) { - v = G->getOperand(0); - } else { - break; - } - else - break; - } - } // getAnalysisUsage - We require post dominance frontiers (aka Control // Dependence Graph) virtual void getAnalysisUsage(AnalysisUsage &AU) const { AU.setPreservesCFG(); - AU.addRequired(); + AU.addRequired(); AU.addRequired(); AU.addRequired(); + AU.addPreserved(); AU.addPreserved(); AU.addPreserved(); } + + unsigned getPointerSize(Value *V) const; }; - char DSE::ID = 0; - RegisterPass X("dse", "Dead Store Elimination"); } +char DSE::ID = 0; +static RegisterPass X("dse", "Dead Store Elimination"); + FunctionPass *llvm::createDeadStoreEliminationPass() { return new DSE(); } -bool DSE::runOnBasicBlock(BasicBlock &BB) { - MemoryDependenceAnalysis& MD = getAnalysis(); - TargetData &TD = getAnalysis(); +/// doesClobberMemory - Does this instruction clobber (write without reading) +/// some memory? +static bool doesClobberMemory(Instruction *I) { + if (isa(I)) + return true; + if (IntrinsicInst *II = dyn_cast(I)) { + switch (II->getIntrinsicID()) { + default: + return false; + case Intrinsic::memset: + case Intrinsic::memmove: + case Intrinsic::memcpy: + case Intrinsic::init_trampoline: + case Intrinsic::lifetime_end: + return true; + } + } + return false; +} + +/// isElidable - If the value of this instruction and the memory it writes to is +/// unused, may we delete this instrtction? +static bool isElidable(Instruction *I) { + assert(doesClobberMemory(I)); + if (IntrinsicInst *II = dyn_cast(I)) + return II->getIntrinsicID() != Intrinsic::lifetime_end; + if (StoreInst *SI = dyn_cast(I)) + return !SI->isVolatile(); + return true; +} - // Record the last-seen store to this pointer - DenseMap lastStore; - // Record instructions possibly made dead by deleting a store - SetVector possiblyDead; +/// getPointerOperand - Return the pointer that is being clobbered. +static Value *getPointerOperand(Instruction *I) { + assert(doesClobberMemory(I)); + if (StoreInst *SI = dyn_cast(I)) + return SI->getPointerOperand(); + if (MemIntrinsic *MI = dyn_cast(I)) + return MI->getOperand(1); + switch (cast(I)->getIntrinsicID()) { + default: assert(false && "Unexpected intrinsic!"); + case Intrinsic::init_trampoline: + return I->getOperand(1); + case Intrinsic::lifetime_end: + return I->getOperand(2); + } +} + +/// getStoreSize - Return the length in bytes of the write by the clobbering +/// instruction. If variable or unknown, returns -1. +static unsigned getStoreSize(Instruction *I, const TargetData *TD) { + assert(doesClobberMemory(I)); + if (StoreInst *SI = dyn_cast(I)) { + if (!TD) return -1u; + return TD->getTypeStoreSize(SI->getOperand(0)->getType()); + } + + Value *Len; + if (MemIntrinsic *MI = dyn_cast(I)) { + Len = MI->getLength(); + } else { + switch (cast(I)->getIntrinsicID()) { + default: assert(false && "Unexpected intrinsic!"); + case Intrinsic::init_trampoline: + return -1u; + case Intrinsic::lifetime_end: + Len = I->getOperand(1); + break; + } + } + if (ConstantInt *LenCI = dyn_cast(Len)) + if (!LenCI->isAllOnesValue()) + return LenCI->getZExtValue(); + return -1u; +} + +/// isStoreAtLeastAsWideAs - Return true if the size of the store in I1 is +/// greater than or equal to the store in I2. This returns false if we don't +/// know. +/// +static bool isStoreAtLeastAsWideAs(Instruction *I1, Instruction *I2, + const TargetData *TD) { + const Type *I1Ty = getPointerOperand(I1)->getType(); + const Type *I2Ty = getPointerOperand(I2)->getType(); + + // Exactly the same type, must have exactly the same size. + if (I1Ty == I2Ty) return true; + + int I1Size = getStoreSize(I1, TD); + int I2Size = getStoreSize(I2, TD); + + return I1Size != -1 && I2Size != -1 && I1Size >= I2Size; +} + +bool DSE::runOnBasicBlock(BasicBlock &BB) { + MemoryDependenceAnalysis &MD = getAnalysis(); + TD = getAnalysisIfAvailable(); + bool MadeChange = false; - // Do a top-down walk on the BB - for (BasicBlock::iterator BBI = BB.begin(), BBE = BB.end(); - BBI != BBE; ++BBI) { - // If we find a store or a free... - if (!isa(BBI) && !isa(BBI)) + // Do a top-down walk on the BB. + for (BasicBlock::iterator BBI = BB.begin(), BBE = BB.end(); BBI != BBE; ) { + Instruction *Inst = BBI++; + + // If we find a store or a free, get its memory dependence. + if (!doesClobberMemory(Inst) && !isFreeCall(Inst)) continue; - - Value* pointer = 0; - if (StoreInst* S = dyn_cast(BBI)) { - if (!S->isVolatile()) - pointer = S->getPointerOperand(); - else + + MemDepResult InstDep = MD.getDependency(Inst); + + // Ignore non-local stores. + // FIXME: cross-block DSE would be fun. :) + if (InstDep.isNonLocal()) continue; + + // Handle frees whose dependencies are non-trivial. + if (isFreeCall(Inst)) { + MadeChange |= handleFreeWithNonTrivialDependency(Inst, InstDep); + continue; + } + + // If not a definite must-alias dependency, ignore it. + if (!InstDep.isDef()) + continue; + + // If this is a store-store dependence, then the previous store is dead so + // long as this store is at least as big as it. + if (doesClobberMemory(InstDep.getInst())) { + Instruction *DepStore = InstDep.getInst(); + if (isStoreAtLeastAsWideAs(Inst, DepStore, TD) && + isElidable(DepStore)) { + // Delete the store and now-dead instructions that feed it. + DeleteDeadInstruction(DepStore); + NumFastStores++; + MadeChange = true; + + // DeleteDeadInstruction can delete the current instruction in loop + // cases, reset BBI. + BBI = Inst; + if (BBI != BB.begin()) + --BBI; continue; - } else - pointer = cast(BBI)->getPointerOperand(); - - TranslatePointerBitCasts(pointer, true); - StoreInst*& last = lastStore[pointer]; - bool deletedStore = false; - - // ... to a pointer that has been stored to before... - if (last) { - Instruction* dep = MD.getDependency(BBI); - - // ... and no other memory dependencies are between them.... - while (dep != MemoryDependenceAnalysis::None && - dep != MemoryDependenceAnalysis::NonLocal && - isa(dep)) { - if (dep != last || - TD.getTypeStoreSize(last->getOperand(0)->getType()) > - TD.getTypeStoreSize(BBI->getOperand(0)->getType())) { - dep = MD.getDependency(BBI, dep); + } + } + + if (!isElidable(Inst)) + continue; + + // If we're storing the same value back to a pointer that we just + // loaded from, then the store can be removed. + if (StoreInst *SI = dyn_cast(Inst)) { + if (LoadInst *DepLoad = dyn_cast(InstDep.getInst())) { + if (SI->getPointerOperand() == DepLoad->getPointerOperand() && + SI->getOperand(0) == DepLoad) { + // DeleteDeadInstruction can delete the current instruction. Save BBI + // in case we need it. + WeakVH NextInst(BBI); + + DeleteDeadInstruction(SI); + + if (NextInst == 0) // Next instruction deleted. + BBI = BB.begin(); + else if (BBI != BB.begin()) // Revisit this instruction if possible. + --BBI; + NumFastStores++; + MadeChange = true; continue; } + } + } + + // If this is a lifetime end marker, we can throw away the store. + if (IntrinsicInst *II = dyn_cast(InstDep.getInst())) { + if (II->getIntrinsicID() == Intrinsic::lifetime_end) { + // Delete the store and now-dead instructions that feed it. + // DeleteDeadInstruction can delete the current instruction. Save BBI + // in case we need it. + WeakVH NextInst(BBI); - // Remove it! - MD.removeInstruction(last); - - // DCE instructions only used to calculate that store - if (Instruction* D = dyn_cast(last->getOperand(0))) - possiblyDead.insert(D); - if (Instruction* D = dyn_cast(last->getOperand(1))) - possiblyDead.insert(D); + DeleteDeadInstruction(Inst); - last->eraseFromParent(); + if (NextInst == 0) // Next instruction deleted. + BBI = BB.begin(); + else if (BBI != BB.begin()) // Revisit this instruction if possible. + --BBI; NumFastStores++; - deletedStore = true; MadeChange = true; - - break; + continue; } } - - // Handle frees whose dependencies are non-trivial. - if (FreeInst* F = dyn_cast(BBI)) { - if (!deletedStore) - MadeChange |= handleFreeWithNonTrivialDependency(F, - MD.getDependency(F), - possiblyDead); - // No known stores after the free - last = 0; - } else { - // Update our most-recent-store map. - last = cast(BBI); - } } - // If this block ends in a return, unwind, unreachable, and eventually - // tailcall, then all allocas are dead at its end. + // If this block ends in a return, unwind, or unreachable, all allocas are + // dead at its end, which means stores to them are also dead. if (BB.getTerminator()->getNumSuccessors() == 0) - MadeChange |= handleEndBlock(BB, possiblyDead); - - // Do a trivial DCE - while (!possiblyDead.empty()) { - Instruction *I = possiblyDead.back(); - possiblyDead.pop_back(); - DeleteDeadInstructionChains(I, possiblyDead); - } + MadeChange |= handleEndBlock(BB); return MadeChange; } /// handleFreeWithNonTrivialDependency - Handle frees of entire structures whose -/// dependency is a store to a field of that structure -bool DSE::handleFreeWithNonTrivialDependency(FreeInst* F, Instruction* dep, - SetVector& possiblyDead) { - TargetData &TD = getAnalysis(); +/// dependency is a store to a field of that structure. +bool DSE::handleFreeWithNonTrivialDependency(Instruction *F, MemDepResult Dep) { AliasAnalysis &AA = getAnalysis(); - MemoryDependenceAnalysis& MD = getAnalysis(); - if (dep == MemoryDependenceAnalysis::None || - dep == MemoryDependenceAnalysis::NonLocal) + Instruction *Dependency = Dep.getInst(); + if (!Dependency || !doesClobberMemory(Dependency) || !isElidable(Dependency)) return false; - StoreInst* dependency = dyn_cast(dep); - if (!dependency) - return false; - else if (dependency->isVolatile()) + Value *DepPointer = getPointerOperand(Dependency)->getUnderlyingObject(); + + // Check for aliasing. + if (AA.alias(F->getOperand(1), 1, DepPointer, 1) != + AliasAnalysis::MustAlias) return false; - Value* depPointer = dependency->getPointerOperand(); - const Type* depType = dependency->getOperand(0)->getType(); - unsigned depPointerSize = TD.getTypeStoreSize(depType); - - // Check for aliasing - AliasAnalysis::AliasResult A = AA.alias(F->getPointerOperand(), ~0U, - depPointer, depPointerSize); - - if (A == AliasAnalysis::MustAlias) { - // Remove it! - MD.removeInstruction(dependency); - - // DCE instructions only used to calculate that store - if (Instruction* D = dyn_cast(dependency->getOperand(0))) - possiblyDead.insert(D); - if (Instruction* D = dyn_cast(dependency->getOperand(1))) - possiblyDead.insert(D); - - dependency->eraseFromParent(); - NumFastStores++; - return true; - } - - return false; + // DCE instructions only used to calculate that store + DeleteDeadInstruction(Dependency); + NumFastStores++; + return true; } /// handleEndBlock - Remove dead stores to stack-allocated locations in the @@ -241,22 +313,22 @@ bool DSE::handleFreeWithNonTrivialDependency(FreeInst* F, Instruction* dep, /// ... /// store i32 1, i32* %A /// ret void -bool DSE::handleEndBlock(BasicBlock& BB, - SetVector& possiblyDead) { - TargetData &TD = getAnalysis(); +bool DSE::handleEndBlock(BasicBlock &BB) { AliasAnalysis &AA = getAnalysis(); - MemoryDependenceAnalysis& MD = getAnalysis(); bool MadeChange = false; // Pointers alloca'd in this function are dead in the end block SmallPtrSet deadPointers; - // Find all of the alloca'd pointers in the entry block + // Find all of the alloca'd pointers in the entry block. BasicBlock *Entry = BB.getParent()->begin(); for (BasicBlock::iterator I = Entry->begin(), E = Entry->end(); I != E; ++I) if (AllocaInst *AI = dyn_cast(I)) deadPointers.insert(AI); + + // Treat byval arguments the same, stores to them are dead at the end of the + // function. for (Function::arg_iterator AI = BB.getParent()->arg_begin(), AE = BB.getParent()->arg_end(); AI != AE; ++AI) if (AI->hasByValAttr()) @@ -266,108 +338,63 @@ bool DSE::handleEndBlock(BasicBlock& BB, for (BasicBlock::iterator BBI = BB.end(); BBI != BB.begin(); ){ --BBI; - // If we find a store whose pointer is dead... - if (StoreInst* S = dyn_cast(BBI)) { - if (!S->isVolatile()) { - Value* pointerOperand = S->getPointerOperand(); + // If we find a store whose pointer is dead. + if (doesClobberMemory(BBI)) { + if (isElidable(BBI)) { // See through pointer-to-pointer bitcasts - TranslatePointerBitCasts(pointerOperand); - + Value *pointerOperand = getPointerOperand(BBI)->getUnderlyingObject(); + // Alloca'd pointers or byval arguments (which are functionally like // alloca's) are valid candidates for removal. if (deadPointers.count(pointerOperand)) { - // Remove it! - MD.removeInstruction(S); - - // DCE instructions only used to calculate that store - if (Instruction* D = dyn_cast(S->getOperand(0))) - possiblyDead.insert(D); - if (Instruction* D = dyn_cast(S->getOperand(1))) - possiblyDead.insert(D); - + // DCE instructions only used to calculate that store. + Instruction *Dead = BBI; BBI++; - S->eraseFromParent(); + DeleteDeadInstruction(Dead, &deadPointers); NumFastStores++; MadeChange = true; + continue; } } - continue; - - // We can also remove memcpy's to local variables at the end of a function - } else if (MemCpyInst* M = dyn_cast(BBI)) { - Value* dest = M->getDest(); - TranslatePointerBitCasts(dest); - - if (deadPointers.count(dest)) { - MD.removeInstruction(M); - - // DCE instructions only used to calculate that memcpy - if (Instruction* D = dyn_cast(M->getRawSource())) - possiblyDead.insert(D); - if (Instruction* D = dyn_cast(M->getLength())) - possiblyDead.insert(D); - if (Instruction* D = dyn_cast(M->getRawDest())) - possiblyDead.insert(D); - - BBI++; - M->eraseFromParent(); - NumFastOther++; - MadeChange = true; - + // Because a memcpy or memmove is also a load, we can't skip it if we + // didn't remove it. + if (!isa(BBI)) continue; - } - - // Because a memcpy is also a load, we can't skip it if we didn't remove it } - Value* killPointer = 0; + Value *killPointer = 0; uint64_t killPointerSize = ~0UL; // If we encounter a use of the pointer, it is no longer considered dead - if (LoadInst* L = dyn_cast(BBI)) { - // However, if this load is unused, we can go ahead and remove it, and - // not have to worry about it making our pointer undead! - if (L->use_empty()) { - MD.removeInstruction(L); - - // DCE instructions only used to calculate that load - if (Instruction* D = dyn_cast(L->getPointerOperand())) - possiblyDead.insert(D); - + if (LoadInst *L = dyn_cast(BBI)) { + // However, if this load is unused and not volatile, we can go ahead and + // remove it, and not have to worry about it making our pointer undead! + if (L->use_empty() && !L->isVolatile()) { BBI++; - L->eraseFromParent(); + DeleteDeadInstruction(L, &deadPointers); NumFastOther++; MadeChange = true; - possiblyDead.remove(L); - continue; } killPointer = L->getPointerOperand(); - } else if (VAArgInst* V = dyn_cast(BBI)) { + } else if (VAArgInst *V = dyn_cast(BBI)) { killPointer = V->getOperand(0); - } else if (isa(BBI) && - isa(cast(BBI)->getLength())) { - killPointer = cast(BBI)->getSource(); + } else if (isa(BBI) && + isa(cast(BBI)->getLength())) { + killPointer = cast(BBI)->getSource(); killPointerSize = cast( - cast(BBI)->getLength())->getZExtValue(); - } else if (AllocaInst* A = dyn_cast(BBI)) { + cast(BBI)->getLength())->getZExtValue(); + } else if (AllocaInst *A = dyn_cast(BBI)) { deadPointers.erase(A); // Dead alloca's can be DCE'd when we reach them if (A->use_empty()) { - MD.removeInstruction(A); - - // DCE instructions only used to calculate that load - if (Instruction* D = dyn_cast(A->getArraySize())) - possiblyDead.insert(D); - BBI++; - A->eraseFromParent(); + DeleteDeadInstruction(A, &deadPointers); NumFastOther++; MadeChange = true; - possiblyDead.remove(A); } continue; @@ -393,21 +420,10 @@ bool DSE::handleEndBlock(BasicBlock& BB, deadPointers.clear(); return MadeChange; } - - // Get size information for the alloca - unsigned pointerSize = ~0U; - if (AllocaInst* A = dyn_cast(*I)) { - if (ConstantInt* C = dyn_cast(A->getArraySize())) - pointerSize = C->getZExtValue() * \ - TD.getABITypeSize(A->getAllocatedType()); - } else { - const PointerType* PT = cast( - cast(*I)->getType()); - pointerSize = TD.getABITypeSize(PT->getElementType()); - } - + // See if the call site touches it - AliasAnalysis::ModRefResult A = AA.getModRefInfo(CS, *I, pointerSize); + AliasAnalysis::ModRefResult A = AA.getModRefInfo(CS, *I, + getPointerSize(*I)); if (A == AliasAnalysis::ModRef) modRef++; @@ -423,35 +439,24 @@ bool DSE::handleEndBlock(BasicBlock& BB, deadPointers.erase(*I); continue; - } else { + } else if (isInstructionTriviallyDead(BBI)) { // For any non-memory-affecting non-terminators, DCE them as we reach them - Instruction *CI = BBI; - if (!CI->isTerminator() && CI->use_empty() && !isa(CI)) { - - // DCE instructions only used to calculate that load - for (Instruction::op_iterator OI = CI->op_begin(), OE = CI->op_end(); - OI != OE; ++OI) - if (Instruction* D = dyn_cast(OI)) - possiblyDead.insert(D); - - BBI++; - CI->eraseFromParent(); - NumFastOther++; - MadeChange = true; - possiblyDead.remove(CI); - - continue; - } + Instruction *Inst = BBI; + BBI++; + DeleteDeadInstruction(Inst, &deadPointers); + NumFastOther++; + MadeChange = true; + continue; } if (!killPointer) continue; - - TranslatePointerBitCasts(killPointer); - + + killPointer = killPointer->getUnderlyingObject(); + // Deal with undead pointers MadeChange |= RemoveUndeadPointers(killPointer, killPointerSize, BBI, - deadPointers, possiblyDead); + deadPointers); } return MadeChange; @@ -459,101 +464,110 @@ bool DSE::handleEndBlock(BasicBlock& BB, /// RemoveUndeadPointers - check for uses of a pointer that make it /// undead when scanning for dead stores to alloca's. -bool DSE::RemoveUndeadPointers(Value* killPointer, uint64_t killPointerSize, - BasicBlock::iterator& BBI, - SmallPtrSet& deadPointers, - SetVector& possiblyDead) { - TargetData &TD = getAnalysis(); +bool DSE::RemoveUndeadPointers(Value *killPointer, uint64_t killPointerSize, + BasicBlock::iterator &BBI, + SmallPtrSet &deadPointers) { AliasAnalysis &AA = getAnalysis(); - MemoryDependenceAnalysis& MD = getAnalysis(); - + // If the kill pointer can be easily reduced to an alloca, - // don't bother doing extraneous AA queries + // don't bother doing extraneous AA queries. if (deadPointers.count(killPointer)) { deadPointers.erase(killPointer); return false; - } else if (isa(killPointer)) { - // A global can't be in the dead pointer set - return false; } + // A global can't be in the dead pointer set. + if (isa(killPointer)) + return false; + bool MadeChange = false; - std::vector undead; - + SmallVector undead; + for (SmallPtrSet::iterator I = deadPointers.begin(), - E = deadPointers.end(); I != E; ++I) { - // Get size information for the alloca - unsigned pointerSize = ~0U; - if (AllocaInst* A = dyn_cast(*I)) { - if (ConstantInt* C = dyn_cast(A->getArraySize())) - pointerSize = C->getZExtValue() * \ - TD.getABITypeSize(A->getAllocatedType()); - } else { - const PointerType* PT = cast( - cast(*I)->getType()); - pointerSize = TD.getABITypeSize(PT->getElementType()); - } - + E = deadPointers.end(); I != E; ++I) { // See if this pointer could alias it - AliasAnalysis::AliasResult A = AA.alias(*I, pointerSize, + AliasAnalysis::AliasResult A = AA.alias(*I, getPointerSize(*I), killPointer, killPointerSize); // If it must-alias and a store, we can delete it if (isa(BBI) && A == AliasAnalysis::MustAlias) { - StoreInst* S = cast(BBI); + StoreInst *S = cast(BBI); // Remove it! - MD.removeInstruction(S); - - // DCE instructions only used to calculate that store - if (Instruction* D = dyn_cast(S->getOperand(0))) - possiblyDead.insert(D); - if (Instruction* D = dyn_cast(S->getOperand(1))) - possiblyDead.insert(D); - - BBI++; - S->eraseFromParent(); + ++BBI; + DeleteDeadInstruction(S, &deadPointers); NumFastStores++; MadeChange = true; continue; // Otherwise, it is undead - } else if (A != AliasAnalysis::NoAlias) - undead.push_back(*I); + } else if (A != AliasAnalysis::NoAlias) + undead.push_back(*I); } - for (std::vector::iterator I = undead.begin(), E = undead.end(); + for (SmallVector::iterator I = undead.begin(), E = undead.end(); I != E; ++I) deadPointers.erase(*I); return MadeChange; } -/// DeleteDeadInstructionChains - takes an instruction and a setvector of -/// dead instructions. If I is dead, it is erased, and its operands are -/// checked for deadness. If they are dead, they are added to the dead -/// setvector. -void DSE::DeleteDeadInstructionChains(Instruction *I, - SetVector &DeadInsts) { - // Instruction must be dead. - if (!I->use_empty() || !isInstructionTriviallyDead(I)) return; - - // Let the memory dependence know - getAnalysis().removeInstruction(I); - - // See if this made any operands dead. We do it this way in case the - // instruction uses the same operand twice. We don't want to delete a - // value then reference it. - for (unsigned i = 0, e = I->getNumOperands(); i != e; ++i) { - if (I->getOperand(i)->hasOneUse()) - if (Instruction* Op = dyn_cast(I->getOperand(i))) - DeadInsts.insert(Op); // Attempt to nuke it later. +/// DeleteDeadInstruction - Delete this instruction. Before we do, go through +/// and zero out all the operands of this instruction. If any of them become +/// dead, delete them and the computation tree that feeds them. +/// +/// If ValueSet is non-null, remove any deleted instructions from it as well. +/// +void DSE::DeleteDeadInstruction(Instruction *I, + SmallPtrSet *ValueSet) { + SmallVector NowDeadInsts; + + NowDeadInsts.push_back(I); + --NumFastOther; + + // Before we touch this instruction, remove it from memdep! + MemoryDependenceAnalysis &MDA = getAnalysis(); + do { + Instruction *DeadInst = NowDeadInsts.pop_back_val(); - I->setOperand(i, 0); // Drop from the operand list. - } + ++NumFastOther; + + // This instruction is dead, zap it, in stages. Start by removing it from + // MemDep, which needs to know the operands and needs it to be in the + // function. + MDA.removeInstruction(DeadInst); + + for (unsigned op = 0, e = DeadInst->getNumOperands(); op != e; ++op) { + Value *Op = DeadInst->getOperand(op); + DeadInst->setOperand(op, 0); + + // If this operand just became dead, add it to the NowDeadInsts list. + if (!Op->use_empty()) continue; + + if (Instruction *OpI = dyn_cast(Op)) + if (isInstructionTriviallyDead(OpI)) + NowDeadInsts.push_back(OpI); + } + + DeadInst->eraseFromParent(); + + if (ValueSet) ValueSet->erase(DeadInst); + } while (!NowDeadInsts.empty()); +} - I->eraseFromParent(); - ++NumFastOther; +unsigned DSE::getPointerSize(Value *V) const { + if (TD) { + if (AllocaInst *A = dyn_cast(V)) { + // Get size information for the alloca + if (ConstantInt *C = dyn_cast(A->getArraySize())) + return C->getZExtValue() * TD->getTypeAllocSize(A->getAllocatedType()); + } else { + assert(isa(V) && "Expected AllocaInst or Argument!"); + const PointerType *PT = cast(V->getType()); + return TD->getTypeAllocSize(PT->getElementType()); + } + } + return ~0U; }