1 //===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements an analysis that determines, for a given memory
11 // operation, what preceding memory operations it depends on. It builds on
12 // alias analysis information, and tries to provide a lazy, caching interface to
13 // a common kind of alias information query.
15 //===----------------------------------------------------------------------===//
17 #define DEBUG_TYPE "memdep"
18 #include "llvm/Analysis/MemoryDependenceAnalysis.h"
19 #include "llvm/Constants.h"
20 #include "llvm/Instructions.h"
21 #include "llvm/Function.h"
22 #include "llvm/Analysis/AliasAnalysis.h"
23 #include "llvm/ADT/Statistic.h"
24 #include "llvm/ADT/STLExtras.h"
25 #include "llvm/Support/CFG.h"
26 #include "llvm/Support/CommandLine.h"
27 #include "llvm/Support/Debug.h"
28 #include "llvm/Target/TargetData.h"
31 STATISTIC(NumCacheNonLocal, "Number of fully cached non-local responses");
32 STATISTIC(NumCacheDirtyNonLocal, "Number of dirty cached non-local responses");
33 STATISTIC(NumUncacheNonLocal, "Number of uncached non-local responses");
34 char MemoryDependenceAnalysis::ID = 0;
36 // Register this pass...
37 static RegisterPass<MemoryDependenceAnalysis> X("memdep",
38 "Memory Dependence Analysis", false, true);
40 /// getAnalysisUsage - Does not modify anything. It uses Alias Analysis.
42 void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
44 AU.addRequiredTransitive<AliasAnalysis>();
45 AU.addRequiredTransitive<TargetData>();
48 bool MemoryDependenceAnalysis::runOnFunction(Function &) {
49 AA = &getAnalysis<AliasAnalysis>();
50 TD = &getAnalysis<TargetData>();
55 /// getCallSiteDependencyFrom - Private helper for finding the local
56 /// dependencies of a call site.
57 MemDepResult MemoryDependenceAnalysis::
58 getCallSiteDependencyFrom(CallSite CS, BasicBlock::iterator ScanIt,
60 // Walk backwards through the block, looking for dependencies
61 while (ScanIt != BB->begin()) {
62 Instruction *Inst = --ScanIt;
64 // If this inst is a memory op, get the pointer it accessed
66 uint64_t PointerSize = 0;
67 if (StoreInst *S = dyn_cast<StoreInst>(Inst)) {
68 Pointer = S->getPointerOperand();
69 PointerSize = TD->getTypeStoreSize(S->getOperand(0)->getType());
70 } else if (VAArgInst *V = dyn_cast<VAArgInst>(Inst)) {
71 Pointer = V->getOperand(0);
72 PointerSize = TD->getTypeStoreSize(V->getType());
73 } else if (FreeInst *F = dyn_cast<FreeInst>(Inst)) {
74 Pointer = F->getPointerOperand();
76 // FreeInsts erase the entire structure
78 } else if (isa<CallInst>(Inst) || isa<InvokeInst>(Inst)) {
79 CallSite InstCS = CallSite::get(Inst);
80 // If these two calls do not interfere, look past it.
81 if (AA->getModRefInfo(CS, InstCS) == AliasAnalysis::NoModRef)
84 // FIXME: If this is a ref/ref result, we should ignore it!
87 // Z = strlen(P); // Z = X
89 // If they interfere, we generally return clobber. However, if they are
90 // calls to the same read-only functions we return Def.
91 if (!AA->onlyReadsMemory(CS) || CS.getCalledFunction() == 0 ||
92 CS.getCalledFunction() != InstCS.getCalledFunction())
93 return MemDepResult::getClobber(Inst);
94 return MemDepResult::getDef(Inst);
96 // Non-memory instruction.
100 if (AA->getModRefInfo(CS, Pointer, PointerSize) != AliasAnalysis::NoModRef)
101 return MemDepResult::getClobber(Inst);
104 // No dependence found.
105 return MemDepResult::getNonLocal();
108 /// getDependencyFrom - Return the instruction on which a memory operation
110 MemDepResult MemoryDependenceAnalysis::
111 getDependencyFrom(Instruction *QueryInst, BasicBlock::iterator ScanIt,
113 // The first instruction in a block is always non-local.
114 if (ScanIt == BB->begin())
115 return MemDepResult::getNonLocal();
117 // Get the pointer value for which dependence will be determined
119 uint64_t MemSize = 0;
121 if (StoreInst *SI = dyn_cast<StoreInst>(QueryInst)) {
122 // If this is a volatile store, don't mess around with it. Just return the
123 // previous instruction as a clobber.
124 if (SI->isVolatile())
125 return MemDepResult::getClobber(--ScanIt);
127 MemPtr = SI->getPointerOperand();
128 MemSize = TD->getTypeStoreSize(SI->getOperand(0)->getType());
129 } else if (LoadInst *LI = dyn_cast<LoadInst>(QueryInst)) {
130 // If this is a volatile load, don't mess around with it. Just return the
131 // previous instruction as a clobber.
132 if (LI->isVolatile())
133 return MemDepResult::getClobber(--ScanIt);
135 MemPtr = LI->getPointerOperand();
136 MemSize = TD->getTypeStoreSize(LI->getType());
137 } else if (FreeInst *FI = dyn_cast<FreeInst>(QueryInst)) {
138 MemPtr = FI->getPointerOperand();
139 // FreeInsts erase the entire structure, not just a field.
141 } else if (isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst)) {
142 assert(0 && "Should use getCallSiteDependencyFrom!");
143 return getCallSiteDependencyFrom(CallSite::get(QueryInst), ScanIt, BB);
145 // Otherwise, this is a vaarg or non-memory instruction, just return a
146 // clobber dependency on the previous inst.
147 return MemDepResult::getClobber(--ScanIt);
150 // Walk backwards through the basic block, looking for dependencies
151 while (ScanIt != BB->begin()) {
152 Instruction *Inst = --ScanIt;
154 // Values depend on loads if the pointers are must aliased. This means that
155 // a load depends on another must aliased load from the same value.
156 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
157 Value *Pointer = LI->getPointerOperand();
158 uint64_t PointerSize = TD->getTypeStoreSize(LI->getType());
160 // If we found a pointer, check if it could be the same as our pointer.
161 AliasAnalysis::AliasResult R =
162 AA->alias(Pointer, PointerSize, MemPtr, MemSize);
163 if (R == AliasAnalysis::NoAlias)
166 // May-alias loads don't depend on each other without a dependence.
167 if (isa<LoadInst>(QueryInst) && R == AliasAnalysis::MayAlias)
169 return MemDepResult::getDef(Inst);
172 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
173 Value *Pointer = SI->getPointerOperand();
174 uint64_t PointerSize = TD->getTypeStoreSize(SI->getOperand(0)->getType());
176 // If we found a pointer, check if it could be the same as our pointer.
177 AliasAnalysis::AliasResult R =
178 AA->alias(Pointer, PointerSize, MemPtr, MemSize);
180 if (R == AliasAnalysis::NoAlias)
182 if (R == AliasAnalysis::MayAlias)
183 return MemDepResult::getClobber(Inst);
184 return MemDepResult::getDef(Inst);
187 // If this is an allocation, and if we know that the accessed pointer is to
188 // the allocation, return Def. This means that there is no dependence and
189 // the access can be optimized based on that. For example, a load could
191 if (AllocationInst *AI = dyn_cast<AllocationInst>(Inst)) {
192 Value *AccessPtr = MemPtr->getUnderlyingObject();
194 if (AccessPtr == AI ||
195 AA->alias(AI, 1, AccessPtr, 1) == AliasAnalysis::MustAlias)
196 return MemDepResult::getDef(AI);
200 // See if this instruction (e.g. a call or vaarg) mod/ref's the pointer.
201 if (AA->getModRefInfo(Inst, MemPtr, MemSize) == AliasAnalysis::NoModRef)
204 // Otherwise, there is a dependence.
205 return MemDepResult::getClobber(Inst);
208 // If we found nothing, return the non-local flag.
209 return MemDepResult::getNonLocal();
212 /// getDependency - Return the instruction on which a memory operation
214 MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) {
215 Instruction *ScanPos = QueryInst;
217 // Check for a cached result
218 MemDepResult &LocalCache = LocalDeps[QueryInst];
220 // If the cached entry is non-dirty, just return it. Note that this depends
221 // on MemDepResult's default constructing to 'dirty'.
222 if (!LocalCache.isDirty())
225 // Otherwise, if we have a dirty entry, we know we can start the scan at that
226 // instruction, which may save us some work.
227 if (Instruction *Inst = LocalCache.getInst()) {
230 SmallPtrSet<Instruction*, 4> &InstMap = ReverseLocalDeps[Inst];
231 InstMap.erase(QueryInst);
233 ReverseLocalDeps.erase(Inst);
237 if (!isa<CallInst>(QueryInst) && !isa<InvokeInst>(QueryInst))
238 LocalCache = getDependencyFrom(QueryInst, ScanPos, QueryInst->getParent());
240 LocalCache = getCallSiteDependencyFrom(CallSite::get(QueryInst), ScanPos,
241 QueryInst->getParent());
243 // Remember the result!
244 if (Instruction *I = LocalCache.getInst())
245 ReverseLocalDeps[I].insert(QueryInst);
250 /// getNonLocalDependency - Perform a full dependency query for the
251 /// specified instruction, returning the set of blocks that the value is
252 /// potentially live across. The returned set of results will include a
253 /// "NonLocal" result for all blocks where the value is live across.
255 /// This method assumes the instruction returns a "nonlocal" dependency
256 /// within its own block.
258 const MemoryDependenceAnalysis::NonLocalDepInfo &
259 MemoryDependenceAnalysis::getNonLocalDependency(Instruction *QueryInst) {
260 assert(getDependency(QueryInst).isNonLocal() &&
261 "getNonLocalDependency should only be used on insts with non-local deps!");
262 PerInstNLInfo &CacheP = NonLocalDeps[QueryInst];
264 NonLocalDepInfo &Cache = CacheP.first;
266 /// DirtyBlocks - This is the set of blocks that need to be recomputed. In
267 /// the cached case, this can happen due to instructions being deleted etc. In
268 /// the uncached case, this starts out as the set of predecessors we care
270 SmallVector<BasicBlock*, 32> DirtyBlocks;
272 if (!Cache.empty()) {
273 // Okay, we have a cache entry. If we know it is not dirty, just return it
274 // with no computation.
275 if (!CacheP.second) {
280 // If we already have a partially computed set of results, scan them to
281 // determine what is dirty, seeding our initial DirtyBlocks worklist.
282 for (NonLocalDepInfo::iterator I = Cache.begin(), E = Cache.end();
284 if (I->second.isDirty())
285 DirtyBlocks.push_back(I->first);
287 // Sort the cache so that we can do fast binary search lookups below.
288 std::sort(Cache.begin(), Cache.end());
290 ++NumCacheDirtyNonLocal;
291 //cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: "
292 // << Cache.size() << " cached: " << *QueryInst;
294 // Seed DirtyBlocks with each of the preds of QueryInst's block.
295 BasicBlock *QueryBB = QueryInst->getParent();
296 DirtyBlocks.append(pred_begin(QueryBB), pred_end(QueryBB));
297 NumUncacheNonLocal++;
300 // Visited checked first, vector in sorted order.
301 SmallPtrSet<BasicBlock*, 64> Visited;
303 unsigned NumSortedEntries = Cache.size();
305 // Iterate while we still have blocks to update.
306 while (!DirtyBlocks.empty()) {
307 BasicBlock *DirtyBB = DirtyBlocks.back();
308 DirtyBlocks.pop_back();
310 // Already processed this block?
311 if (!Visited.insert(DirtyBB))
314 // Do a binary search to see if we already have an entry for this block in
315 // the cache set. If so, find it.
316 NonLocalDepInfo::iterator Entry =
317 std::upper_bound(Cache.begin(), Cache.begin()+NumSortedEntries,
318 std::make_pair(DirtyBB, MemDepResult()));
319 if (Entry != Cache.begin() && (&*Entry)[-1].first == DirtyBB)
322 MemDepResult *ExistingResult = 0;
323 if (Entry != Cache.begin()+NumSortedEntries &&
324 Entry->first == DirtyBB) {
325 // If we already have an entry, and if it isn't already dirty, the block
327 if (!Entry->second.isDirty())
330 // Otherwise, remember this slot so we can update the value.
331 ExistingResult = &Entry->second;
334 // If the dirty entry has a pointer, start scanning from it so we don't have
335 // to rescan the entire block.
336 BasicBlock::iterator ScanPos = DirtyBB->end();
337 if (ExistingResult) {
338 if (Instruction *Inst = ExistingResult->getInst()) {
341 // We're removing QueryInst's use of Inst.
342 SmallPtrSet<Instruction*, 4> &InstMap = ReverseNonLocalDeps[Inst];
343 InstMap.erase(QueryInst);
344 if (InstMap.empty()) ReverseNonLocalDeps.erase(Inst);
348 // Find out if this block has a local dependency for QueryInst.
350 if (!isa<CallInst>(QueryInst) && !isa<InvokeInst>(QueryInst))
351 Dep = getDependencyFrom(QueryInst, ScanPos, DirtyBB);
353 Dep = getCallSiteDependencyFrom(CallSite::get(QueryInst), ScanPos,
356 // If we had a dirty entry for the block, update it. Otherwise, just add
359 *ExistingResult = Dep;
361 Cache.push_back(std::make_pair(DirtyBB, Dep));
363 // If the block has a dependency (i.e. it isn't completely transparent to
364 // the value), remember the association!
365 if (!Dep.isNonLocal()) {
366 // Keep the ReverseNonLocalDeps map up to date so we can efficiently
367 // update this when we remove instructions.
368 if (Instruction *Inst = Dep.getInst())
369 ReverseNonLocalDeps[Inst].insert(QueryInst);
372 // If the block *is* completely transparent to the load, we need to check
373 // the predecessors of this block. Add them to our worklist.
374 DirtyBlocks.append(pred_begin(DirtyBB), pred_end(DirtyBB));
382 /// removeInstruction - Remove an instruction from the dependence analysis,
383 /// updating the dependence of instructions that previously depended on it.
384 /// This method attempts to keep the cache coherent using the reverse map.
385 void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
386 // Walk through the Non-local dependencies, removing this one as the value
387 // for any cached queries.
388 NonLocalDepMapType::iterator NLDI = NonLocalDeps.find(RemInst);
389 if (NLDI != NonLocalDeps.end()) {
390 NonLocalDepInfo &BlockMap = NLDI->second.first;
391 for (NonLocalDepInfo::iterator DI = BlockMap.begin(), DE = BlockMap.end();
393 if (Instruction *Inst = DI->second.getInst())
394 ReverseNonLocalDeps[Inst].erase(RemInst);
395 NonLocalDeps.erase(NLDI);
398 // If we have a cached local dependence query for this instruction, remove it.
400 LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst);
401 if (LocalDepEntry != LocalDeps.end()) {
402 // Remove us from DepInst's reverse set now that the local dep info is gone.
403 if (Instruction *Inst = LocalDepEntry->second.getInst()) {
404 SmallPtrSet<Instruction*, 4> &RLD = ReverseLocalDeps[Inst];
407 ReverseLocalDeps.erase(Inst);
410 // Remove this local dependency info.
411 LocalDeps.erase(LocalDepEntry);
414 // Loop over all of the things that depend on the instruction we're removing.
416 SmallVector<std::pair<Instruction*, Instruction*>, 8> ReverseDepsToAdd;
418 ReverseDepMapType::iterator ReverseDepIt = ReverseLocalDeps.find(RemInst);
419 if (ReverseDepIt != ReverseLocalDeps.end()) {
420 SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second;
421 // RemInst can't be the terminator if it has stuff depending on it.
422 assert(!ReverseDeps.empty() && !isa<TerminatorInst>(RemInst) &&
423 "Nothing can locally depend on a terminator");
425 // Anything that was locally dependent on RemInst is now going to be
426 // dependent on the instruction after RemInst. It will have the dirty flag
427 // set so it will rescan. This saves having to scan the entire block to get
429 Instruction *NewDepInst = next(BasicBlock::iterator(RemInst));
431 for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(),
432 E = ReverseDeps.end(); I != E; ++I) {
433 Instruction *InstDependingOnRemInst = *I;
434 assert(InstDependingOnRemInst != RemInst &&
435 "Already removed our local dep info");
437 LocalDeps[InstDependingOnRemInst] = MemDepResult::getDirty(NewDepInst);
439 // Make sure to remember that new things depend on NewDepInst.
440 ReverseDepsToAdd.push_back(std::make_pair(NewDepInst,
441 InstDependingOnRemInst));
444 ReverseLocalDeps.erase(ReverseDepIt);
446 // Add new reverse deps after scanning the set, to avoid invalidating the
447 // 'ReverseDeps' reference.
448 while (!ReverseDepsToAdd.empty()) {
449 ReverseLocalDeps[ReverseDepsToAdd.back().first]
450 .insert(ReverseDepsToAdd.back().second);
451 ReverseDepsToAdd.pop_back();
455 ReverseDepIt = ReverseNonLocalDeps.find(RemInst);
456 if (ReverseDepIt != ReverseNonLocalDeps.end()) {
457 SmallPtrSet<Instruction*, 4>& set = ReverseDepIt->second;
458 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
460 assert(*I != RemInst && "Already removed NonLocalDep info for RemInst");
462 PerInstNLInfo &INLD = NonLocalDeps[*I];
463 // The information is now dirty!
466 for (NonLocalDepInfo::iterator DI = INLD.first.begin(),
467 DE = INLD.first.end(); DI != DE; ++DI) {
468 if (DI->second.getInst() != RemInst) continue;
470 // Convert to a dirty entry for the subsequent instruction.
471 Instruction *NextI = 0;
472 if (!RemInst->isTerminator()) {
473 NextI = next(BasicBlock::iterator(RemInst));
474 ReverseDepsToAdd.push_back(std::make_pair(NextI, *I));
476 DI->second = MemDepResult::getDirty(NextI);
480 ReverseNonLocalDeps.erase(ReverseDepIt);
482 // Add new reverse deps after scanning the set, to avoid invalidating 'Set'
483 while (!ReverseDepsToAdd.empty()) {
484 ReverseNonLocalDeps[ReverseDepsToAdd.back().first]
485 .insert(ReverseDepsToAdd.back().second);
486 ReverseDepsToAdd.pop_back();
490 assert(!NonLocalDeps.count(RemInst) && "RemInst got reinserted?");
491 AA->deleteValue(RemInst);
492 DEBUG(verifyRemoved(RemInst));
495 /// verifyRemoved - Verify that the specified instruction does not occur
496 /// in our internal data structures.
497 void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const {
498 for (LocalDepMapType::const_iterator I = LocalDeps.begin(),
499 E = LocalDeps.end(); I != E; ++I) {
500 assert(I->first != D && "Inst occurs in data structures");
501 assert(I->second.getInst() != D &&
502 "Inst occurs in data structures");
505 for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(),
506 E = NonLocalDeps.end(); I != E; ++I) {
507 assert(I->first != D && "Inst occurs in data structures");
508 const PerInstNLInfo &INLD = I->second;
509 for (NonLocalDepInfo::const_iterator II = INLD.first.begin(),
510 EE = INLD.first.end(); II != EE; ++II)
511 assert(II->second.getInst() != D && "Inst occurs in data structures");
514 for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(),
515 E = ReverseLocalDeps.end(); I != E; ++I) {
516 assert(I->first != D && "Inst occurs in data structures");
517 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
518 EE = I->second.end(); II != EE; ++II)
519 assert(*II != D && "Inst occurs in data structures");
522 for (ReverseDepMapType::const_iterator I = ReverseNonLocalDeps.begin(),
523 E = ReverseNonLocalDeps.end();
525 assert(I->first != D && "Inst occurs in data structures");
526 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
527 EE = I->second.end(); II != EE; ++II)
528 assert(*II != D && "Inst occurs in data structures");