STATISTIC(NumSimplify, "Number of instructions simplified or DCE'd");
STATISTIC(NumCSE, "Number of instructions CSE'd");
-STATISTIC(NumCSEMem, "Number of load and call instructions CSE'd");
+STATISTIC(NumCSELoad, "Number of load instructions CSE'd");
+STATISTIC(NumCSECall, "Number of call instructions CSE'd");
+STATISTIC(NumDSE, "Number of trivial dead stores removed");
static unsigned getHash(const void *V) {
return DenseMapInfo<const void*>::getHashValue(V);
}
static bool canHandle(Instruction *Inst) {
+ // This can only handle non-void readnone functions.
+ if (CallInst *CI = dyn_cast<CallInst>(Inst))
+ return CI->doesNotAccessMemory() && !CI->getType()->isVoidTy();
return isa<CastInst>(Inst) || isa<BinaryOperator>(Inst) ||
isa<GetElementPtrInst>(Inst) || isa<CmpInst>(Inst) ||
isa<SelectInst>(Inst) || isa<ExtractElementInst>(Inst) ||
Res ^= *I;
} else {
// nothing extra to hash in.
- assert((isa<BinaryOperator>(Inst) || isa<GetElementPtrInst>(Inst) ||
+ assert((isa<CallInst>(Inst) ||
+ isa<BinaryOperator>(Inst) || isa<GetElementPtrInst>(Inst) ||
isa<SelectInst>(Inst) || isa<ExtractElementInst>(Inst) ||
isa<InsertElementInst>(Inst) || isa<ShuffleVectorInst>(Inst)) &&
"Invalid/unknown instruction");
}
//===----------------------------------------------------------------------===//
-// MemoryValue
+// CallValue
//===----------------------------------------------------------------------===//
namespace {
- /// MemoryValue - Instances of this struct represent available load and call
- /// values in the scoped hash table.
- struct MemoryValue {
+ /// CallValue - Instances of this struct represent available call values in
+ /// the scoped hash table.
+ struct CallValue {
Instruction *Inst;
- MemoryValue(Instruction *I) : Inst(I) {
+ CallValue(Instruction *I) : Inst(I) {
assert((isSentinel() || canHandle(I)) && "Inst can't be handled!");
}
}
static bool canHandle(Instruction *Inst) {
- if (LoadInst *LI = dyn_cast<LoadInst>(Inst))
- return !LI->isVolatile();
- if (CallInst *CI = dyn_cast<CallInst>(Inst))
- return CI->onlyReadsMemory();
- return false;
+ // Don't value number anything that returns void.
+ if (Inst->getType()->isVoidTy())
+ return false;
+
+ CallInst *CI = dyn_cast<CallInst>(Inst);
+ if (CI == 0 || !CI->onlyReadsMemory())
+ return false;
+ return true;
}
};
}
namespace llvm {
- // MemoryValue is POD.
- template<> struct isPodLike<MemoryValue> {
+ // CallValue is POD.
+ template<> struct isPodLike<CallValue> {
static const bool value = true;
};
- template<> struct DenseMapInfo<MemoryValue> {
- static inline MemoryValue getEmptyKey() {
+ template<> struct DenseMapInfo<CallValue> {
+ static inline CallValue getEmptyKey() {
return DenseMapInfo<Instruction*>::getEmptyKey();
}
- static inline MemoryValue getTombstoneKey() {
+ static inline CallValue getTombstoneKey() {
return DenseMapInfo<Instruction*>::getTombstoneKey();
}
- static unsigned getHashValue(MemoryValue Val);
- static bool isEqual(MemoryValue LHS, MemoryValue RHS);
+ static unsigned getHashValue(CallValue Val);
+ static bool isEqual(CallValue LHS, CallValue RHS);
};
}
-unsigned DenseMapInfo<MemoryValue>::getHashValue(MemoryValue Val) {
+unsigned DenseMapInfo<CallValue>::getHashValue(CallValue Val) {
Instruction *Inst = Val.Inst;
// Hash in all of the operands as pointers.
unsigned Res = 0;
- for (unsigned i = 0, e = Inst->getNumOperands(); i != e; ++i)
+ for (unsigned i = 0, e = Inst->getNumOperands(); i != e; ++i) {
+ assert(!Inst->getOperand(i)->getType()->isMetadataTy() &&
+ "Cannot value number calls with metadata operands");
Res ^= getHash(Inst->getOperand(i)) << i;
+ }
+
// Mix in the opcode.
return (Res << 1) ^ Inst->getOpcode();
}
-bool DenseMapInfo<MemoryValue>::isEqual(MemoryValue LHS, MemoryValue RHS) {
+bool DenseMapInfo<CallValue>::isEqual(CallValue LHS, CallValue RHS) {
Instruction *LHSI = LHS.Inst, *RHSI = RHS.Inst;
-
if (LHS.isSentinel() || RHS.isSentinel())
return LHSI == RHSI;
-
- if (LHSI->getOpcode() != RHSI->getOpcode()) return false;
return LHSI->isIdenticalTo(RHSI);
}
/// their lookup.
ScopedHTType *AvailableValues;
- typedef ScopedHashTable<MemoryValue, std::pair<Value*, unsigned> > MemHTType;
- /// AvailableMemValues - This scoped hash table contains the current values of
- /// loads and other read-only memory values. This allows us to get efficient
- /// access to dominating loads we we find a fully redundant load. In addition
- /// to the most recent load, we keep track of a generation count of the read,
- /// which is compared against the current generation count. The current
- /// generation count is incremented after every possibly writing memory
- /// operation, which ensures that we only CSE loads with other loads that have
- /// no intervening store.
- MemHTType *AvailableMemValues;
+ /// AvailableLoads - This scoped hash table contains the current values
+ /// of loads. This allows us to get efficient access to dominating loads when
+ /// we have a fully redundant load. In addition to the most recent load, we
+ /// keep track of a generation count of the read, which is compared against
+ /// the current generation count. The current generation count is
+ /// incremented after every possibly writing memory operation, which ensures
+ /// that we only CSE loads with other loads that have no intervening store.
+ typedef RecyclingAllocator<BumpPtrAllocator,
+ ScopedHashTableVal<Value*, std::pair<Value*, unsigned> > > LoadMapAllocator;
+ typedef ScopedHashTable<Value*, std::pair<Value*, unsigned>,
+ DenseMapInfo<Value*>, LoadMapAllocator> LoadHTType;
+ LoadHTType *AvailableLoads;
+
+ /// AvailableCalls - This scoped hash table contains the current values
+ /// of read-only call values. It uses the same generation count as loads.
+ typedef ScopedHashTable<CallValue, std::pair<Value*, unsigned> > CallHTType;
+ CallHTType *AvailableCalls;
/// CurrentGeneration - This is the current generation of the memory value.
unsigned CurrentGeneration;
// off all the values we install.
ScopedHTType::ScopeTy Scope(*AvailableValues);
- // Define a scope for the memory values so that anything we add will get
+ // Define a scope for the load values so that anything we add will get
+ // popped when we recurse back up to our parent domtree node.
+ LoadHTType::ScopeTy LoadScope(*AvailableLoads);
+
+ // Define a scope for the call values so that anything we add will get
// popped when we recurse back up to our parent domtree node.
- MemHTType::ScopeTy MemScope(*AvailableMemValues);
+ CallHTType::ScopeTy CallScope(*AvailableCalls);
BasicBlock *BB = Node->getBlock();
if (BB->getSinglePredecessor() == 0)
++CurrentGeneration;
+ /// LastStore - Keep track of the last non-volatile store that we saw... for
+ /// as long as there in no instruction that reads memory. If we see a store
+ /// to the same location, we delete the dead store. This zaps trivial dead
+ /// stores which can occur in bitfield code among other things.
+ StoreInst *LastStore = 0;
+
bool Changed = false;
// See if any instructions in the block can be eliminated. If so, do it. If
continue;
}
- // If this is a read-only memory value, process it.
- if (MemoryValue::canHandle(Inst)) {
- // If we have an available version of this value, and if it is the right
+ // If this is a non-volatile load, process it.
+ if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
+ // Ignore volatile loads.
+ if (LI->isVolatile()) {
+ LastStore = 0;
+ continue;
+ }
+
+ // If we have an available version of this load, and if it is the right
+ // generation, replace this instruction.
+ std::pair<Value*, unsigned> InVal =
+ AvailableLoads->lookup(Inst->getOperand(0));
+ if (InVal.first != 0 && InVal.second == CurrentGeneration) {
+ DEBUG(dbgs() << "EarlyCSE CSE LOAD: " << *Inst << " to: "
+ << *InVal.first << '\n');
+ if (!Inst->use_empty()) Inst->replaceAllUsesWith(InVal.first);
+ Inst->eraseFromParent();
+ Changed = true;
+ ++NumCSELoad;
+ continue;
+ }
+
+ // Otherwise, remember that we have this instruction.
+ AvailableLoads->insert(Inst->getOperand(0),
+ std::pair<Value*, unsigned>(Inst, CurrentGeneration));
+ LastStore = 0;
+ continue;
+ }
+
+ // If this instruction may read from memory, forget LastStore.
+ if (Inst->mayReadFromMemory())
+ LastStore = 0;
+
+ // If this is a read-only call, process it.
+ if (CallValue::canHandle(Inst)) {
+ // If we have an available version of this call, and if it is the right
// generation, replace this instruction.
- std::pair<Value*, unsigned> InVal = AvailableMemValues->lookup(Inst);
+ std::pair<Value*, unsigned> InVal = AvailableCalls->lookup(Inst);
if (InVal.first != 0 && InVal.second == CurrentGeneration) {
- DEBUG(dbgs() << "EarlyCSE CSE MEM: " << *Inst << " to: "
+ DEBUG(dbgs() << "EarlyCSE CSE CALL: " << *Inst << " to: "
<< *InVal.first << '\n');
if (!Inst->use_empty()) Inst->replaceAllUsesWith(InVal.first);
Inst->eraseFromParent();
Changed = true;
- ++NumCSEMem;
+ ++NumCSECall;
continue;
}
// Otherwise, remember that we have this instruction.
- AvailableMemValues->insert(Inst,
+ AvailableCalls->insert(Inst,
std::pair<Value*, unsigned>(Inst, CurrentGeneration));
continue;
}
// Okay, this isn't something we can CSE at all. Check to see if it is
// something that could modify memory. If so, our available memory values
// cannot be used so bump the generation count.
- if (Inst->mayWriteToMemory())
+ if (Inst->mayWriteToMemory()) {
++CurrentGeneration;
+
+ if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
+ // We do a trivial form of DSE if there are two stores to the same
+ // location with no intervening loads. Delete the earlier store.
+ if (LastStore &&
+ LastStore->getPointerOperand() == SI->getPointerOperand()) {
+ DEBUG(dbgs() << "EarlyCSE DEAD STORE: " << *LastStore << " due to: "
+ << *Inst << '\n');
+ LastStore->eraseFromParent();
+ Changed = true;
+ ++NumDSE;
+ LastStore = 0;
+ continue;
+ }
+
+ // Okay, we just invalidated anything we knew about loaded values. Try
+ // to salvage *something* by remembering that the stored value is a live
+ // version of the pointer. It is safe to forward from volatile stores
+ // to non-volatile loads, so we don't have to check for volatility of
+ // the store.
+ AvailableLoads->insert(SI->getPointerOperand(),
+ std::pair<Value*, unsigned>(SI->getValueOperand(), CurrentGeneration));
+
+ // Remember that this was the last store we saw for DSE.
+ if (!SI->isVolatile())
+ LastStore = SI;
+ }
+ }
}
unsigned LiveOutGeneration = CurrentGeneration;
bool EarlyCSE::runOnFunction(Function &F) {
TD = getAnalysisIfAvailable<TargetData>();
DT = &getAnalysis<DominatorTree>();
+
+ // Tables that the pass uses when walking the domtree.
ScopedHTType AVTable;
AvailableValues = &AVTable;
-
- MemHTType MemTable;
- AvailableMemValues = &MemTable;
+ LoadHTType LoadTable;
+ AvailableLoads = &LoadTable;
+ CallHTType CallTable;
+ AvailableCalls = &CallTable;
CurrentGeneration = 0;
return processNode(DT->getRootNode());