1 //===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements an analysis that determines, for a given memory
11 // operation, what preceding memory operations it depends on. It builds on
12 // alias analysis information, and tries to provide a lazy, caching interface to
13 // a common kind of alias information query.
15 //===----------------------------------------------------------------------===//
17 #define DEBUG_TYPE "memdep"
18 #include "llvm/Analysis/MemoryDependenceAnalysis.h"
19 #include "llvm/Constants.h"
20 #include "llvm/Instructions.h"
21 #include "llvm/IntrinsicInst.h"
22 #include "llvm/Function.h"
23 #include "llvm/Analysis/AliasAnalysis.h"
24 #include "llvm/ADT/Statistic.h"
25 #include "llvm/ADT/STLExtras.h"
26 #include "llvm/Support/PredIteratorCache.h"
27 #include "llvm/Support/Debug.h"
28 #include "llvm/Target/TargetData.h"
31 STATISTIC(NumCacheNonLocal, "Number of fully cached non-local responses");
32 STATISTIC(NumCacheDirtyNonLocal, "Number of dirty cached non-local responses");
33 STATISTIC(NumUncacheNonLocal, "Number of uncached non-local responses");
35 STATISTIC(NumCacheNonLocalPtr,
36 "Number of fully cached non-local ptr responses");
37 STATISTIC(NumCacheDirtyNonLocalPtr,
38 "Number of cached, but dirty, non-local ptr responses");
39 STATISTIC(NumUncacheNonLocalPtr,
40 "Number of uncached non-local ptr responses");
41 STATISTIC(NumCacheCompleteNonLocalPtr,
42 "Number of block queries that were completely cached");
44 char MemoryDependenceAnalysis::ID = 0;
46 // Register this pass...
47 static RegisterPass<MemoryDependenceAnalysis> X("memdep",
48 "Memory Dependence Analysis", false, true);
50 MemoryDependenceAnalysis::MemoryDependenceAnalysis()
51 : FunctionPass(&ID), PredCache(0) {
53 MemoryDependenceAnalysis::~MemoryDependenceAnalysis() {
56 /// Clean up memory in between runs
57 void MemoryDependenceAnalysis::releaseMemory() {
60 NonLocalPointerDeps.clear();
61 ReverseLocalDeps.clear();
62 ReverseNonLocalDeps.clear();
63 ReverseNonLocalPtrDeps.clear();
69 /// getAnalysisUsage - Does not modify anything. It uses Alias Analysis.
71 void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
73 AU.addRequiredTransitive<AliasAnalysis>();
74 AU.addRequiredTransitive<TargetData>();
77 bool MemoryDependenceAnalysis::runOnFunction(Function &) {
78 AA = &getAnalysis<AliasAnalysis>();
79 TD = &getAnalysis<TargetData>();
81 PredCache.reset(new PredIteratorCache());
85 /// RemoveFromReverseMap - This is a helper function that removes Val from
86 /// 'Inst's set in ReverseMap. If the set becomes empty, remove Inst's entry.
87 template <typename KeyTy>
88 static void RemoveFromReverseMap(DenseMap<Instruction*,
89 SmallPtrSet<KeyTy*, 4> > &ReverseMap,
90 Instruction *Inst, KeyTy *Val) {
91 typename DenseMap<Instruction*, SmallPtrSet<KeyTy*, 4> >::iterator
92 InstIt = ReverseMap.find(Inst);
93 assert(InstIt != ReverseMap.end() && "Reverse map out of sync?");
94 bool Found = InstIt->second.erase(Val);
95 assert(Found && "Invalid reverse map!"); Found=Found;
96 if (InstIt->second.empty())
97 ReverseMap.erase(InstIt);
101 /// getCallSiteDependencyFrom - Private helper for finding the local
102 /// dependencies of a call site.
103 MemDepResult MemoryDependenceAnalysis::
104 getCallSiteDependencyFrom(CallSite CS, bool isReadOnlyCall,
105 BasicBlock::iterator ScanIt, BasicBlock *BB) {
106 // Walk backwards through the block, looking for dependencies
107 while (ScanIt != BB->begin()) {
108 Instruction *Inst = --ScanIt;
110 // If this inst is a memory op, get the pointer it accessed
112 uint64_t PointerSize = 0;
113 if (StoreInst *S = dyn_cast<StoreInst>(Inst)) {
114 Pointer = S->getPointerOperand();
115 PointerSize = TD->getTypeStoreSize(S->getOperand(0)->getType());
116 } else if (VAArgInst *V = dyn_cast<VAArgInst>(Inst)) {
117 Pointer = V->getOperand(0);
118 PointerSize = TD->getTypeStoreSize(V->getType());
119 } else if (FreeInst *F = dyn_cast<FreeInst>(Inst)) {
120 Pointer = F->getPointerOperand();
122 // FreeInsts erase the entire structure
124 } else if (isa<CallInst>(Inst) || isa<InvokeInst>(Inst)) {
125 // Debug intrinsics don't cause dependences.
126 if (isa<DbgInfoIntrinsic>(Inst)) break;
127 CallSite InstCS = CallSite::get(Inst);
128 // If these two calls do not interfere, look past it.
129 switch (AA->getModRefInfo(CS, InstCS)) {
130 case AliasAnalysis::NoModRef:
131 // If the two calls don't interact (e.g. InstCS is readnone) keep
134 case AliasAnalysis::Ref:
135 // If the two calls read the same memory locations and CS is a readonly
136 // function, then we have two cases: 1) the calls may not interfere with
137 // each other at all. 2) the calls may produce the same value. In case
138 // #1 we want to ignore the values, in case #2, we want to return Inst
139 // as a Def dependence. This allows us to CSE in cases like:
142 // Y = strlen(P); // Y = X
143 if (isReadOnlyCall) {
144 if (CS.getCalledFunction() != 0 &&
145 CS.getCalledFunction() == InstCS.getCalledFunction())
146 return MemDepResult::getDef(Inst);
147 // Ignore unrelated read/read call dependences.
152 return MemDepResult::getClobber(Inst);
155 // Non-memory instruction.
159 if (AA->getModRefInfo(CS, Pointer, PointerSize) != AliasAnalysis::NoModRef)
160 return MemDepResult::getClobber(Inst);
163 // No dependence found. If this is the entry block of the function, it is a
164 // clobber, otherwise it is non-local.
165 if (BB != &BB->getParent()->getEntryBlock())
166 return MemDepResult::getNonLocal();
167 return MemDepResult::getClobber(ScanIt);
170 /// getPointerDependencyFrom - Return the instruction on which a memory
171 /// location depends. If isLoad is true, this routine ignore may-aliases with
172 /// read-only operations.
173 MemDepResult MemoryDependenceAnalysis::
174 getPointerDependencyFrom(Value *MemPtr, uint64_t MemSize, bool isLoad,
175 BasicBlock::iterator ScanIt, BasicBlock *BB) {
177 // Walk backwards through the basic block, looking for dependencies.
178 while (ScanIt != BB->begin()) {
179 Instruction *Inst = --ScanIt;
181 // Debug intrinsics don't cause dependences.
182 if (isa<DbgInfoIntrinsic>(Inst)) continue;
184 // Values depend on loads if the pointers are must aliased. This means that
185 // a load depends on another must aliased load from the same value.
186 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
187 Value *Pointer = LI->getPointerOperand();
188 uint64_t PointerSize = TD->getTypeStoreSize(LI->getType());
190 // If we found a pointer, check if it could be the same as our pointer.
191 AliasAnalysis::AliasResult R =
192 AA->alias(Pointer, PointerSize, MemPtr, MemSize);
193 if (R == AliasAnalysis::NoAlias)
196 // May-alias loads don't depend on each other without a dependence.
197 if (isLoad && R == AliasAnalysis::MayAlias)
199 // Stores depend on may and must aliased loads, loads depend on must-alias
201 return MemDepResult::getDef(Inst);
204 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
205 Value *Pointer = SI->getPointerOperand();
206 uint64_t PointerSize = TD->getTypeStoreSize(SI->getOperand(0)->getType());
208 // If we found a pointer, check if it could be the same as our pointer.
209 AliasAnalysis::AliasResult R =
210 AA->alias(Pointer, PointerSize, MemPtr, MemSize);
212 if (R == AliasAnalysis::NoAlias)
214 if (R == AliasAnalysis::MayAlias)
215 return MemDepResult::getClobber(Inst);
216 return MemDepResult::getDef(Inst);
219 // If this is an allocation, and if we know that the accessed pointer is to
220 // the allocation, return Def. This means that there is no dependence and
221 // the access can be optimized based on that. For example, a load could
223 if (AllocationInst *AI = dyn_cast<AllocationInst>(Inst)) {
224 Value *AccessPtr = MemPtr->getUnderlyingObject();
226 if (AccessPtr == AI ||
227 AA->alias(AI, 1, AccessPtr, 1) == AliasAnalysis::MustAlias)
228 return MemDepResult::getDef(AI);
232 // See if this instruction (e.g. a call or vaarg) mod/ref's the pointer.
233 switch (AA->getModRefInfo(Inst, MemPtr, MemSize)) {
234 case AliasAnalysis::NoModRef:
235 // If the call has no effect on the queried pointer, just ignore it.
237 case AliasAnalysis::Ref:
238 // If the call is known to never store to the pointer, and if this is a
239 // load query, we can safely ignore it (scan past it).
244 // Otherwise, there is a potential dependence. Return a clobber.
245 return MemDepResult::getClobber(Inst);
249 // No dependence found. If this is the entry block of the function, it is a
250 // clobber, otherwise it is non-local.
251 if (BB != &BB->getParent()->getEntryBlock())
252 return MemDepResult::getNonLocal();
253 return MemDepResult::getClobber(ScanIt);
256 /// getDependency - Return the instruction on which a memory operation
258 MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) {
259 Instruction *ScanPos = QueryInst;
261 // Check for a cached result
262 MemDepResult &LocalCache = LocalDeps[QueryInst];
264 // If the cached entry is non-dirty, just return it. Note that this depends
265 // on MemDepResult's default constructing to 'dirty'.
266 if (!LocalCache.isDirty())
269 // Otherwise, if we have a dirty entry, we know we can start the scan at that
270 // instruction, which may save us some work.
271 if (Instruction *Inst = LocalCache.getInst()) {
274 RemoveFromReverseMap(ReverseLocalDeps, Inst, QueryInst);
277 BasicBlock *QueryParent = QueryInst->getParent();
280 uint64_t MemSize = 0;
283 if (BasicBlock::iterator(QueryInst) == QueryParent->begin()) {
284 // No dependence found. If this is the entry block of the function, it is a
285 // clobber, otherwise it is non-local.
286 if (QueryParent != &QueryParent->getParent()->getEntryBlock())
287 LocalCache = MemDepResult::getNonLocal();
289 LocalCache = MemDepResult::getClobber(QueryInst);
290 } else if (StoreInst *SI = dyn_cast<StoreInst>(QueryInst)) {
291 // If this is a volatile store, don't mess around with it. Just return the
292 // previous instruction as a clobber.
293 if (SI->isVolatile())
294 LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
296 MemPtr = SI->getPointerOperand();
297 MemSize = TD->getTypeStoreSize(SI->getOperand(0)->getType());
299 } else if (LoadInst *LI = dyn_cast<LoadInst>(QueryInst)) {
300 // If this is a volatile load, don't mess around with it. Just return the
301 // previous instruction as a clobber.
302 if (LI->isVolatile())
303 LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
305 MemPtr = LI->getPointerOperand();
306 MemSize = TD->getTypeStoreSize(LI->getType());
308 } else if (isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst)) {
309 CallSite QueryCS = CallSite::get(QueryInst);
310 bool isReadOnly = AA->onlyReadsMemory(QueryCS);
311 LocalCache = getCallSiteDependencyFrom(QueryCS, isReadOnly, ScanPos,
313 } else if (FreeInst *FI = dyn_cast<FreeInst>(QueryInst)) {
314 MemPtr = FI->getPointerOperand();
315 // FreeInsts erase the entire structure, not just a field.
318 // Non-memory instruction.
319 LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
322 // If we need to do a pointer scan, make it happen.
324 LocalCache = getPointerDependencyFrom(MemPtr, MemSize,
325 isa<LoadInst>(QueryInst),
326 ScanPos, QueryParent);
328 // Remember the result!
329 if (Instruction *I = LocalCache.getInst())
330 ReverseLocalDeps[I].insert(QueryInst);
336 /// AssertSorted - This method is used when -debug is specified to verify that
337 /// cache arrays are properly kept sorted.
338 static void AssertSorted(MemoryDependenceAnalysis::NonLocalDepInfo &Cache,
340 if (Count == -1) Count = Cache.size();
341 if (Count == 0) return;
343 for (unsigned i = 1; i != unsigned(Count); ++i)
344 assert(Cache[i-1] <= Cache[i] && "Cache isn't sorted!");
348 /// getNonLocalCallDependency - Perform a full dependency query for the
349 /// specified call, returning the set of blocks that the value is
350 /// potentially live across. The returned set of results will include a
351 /// "NonLocal" result for all blocks where the value is live across.
353 /// This method assumes the instruction returns a "NonLocal" dependency
354 /// within its own block.
356 /// This returns a reference to an internal data structure that may be
357 /// invalidated on the next non-local query or when an instruction is
358 /// removed. Clients must copy this data if they want it around longer than
360 const MemoryDependenceAnalysis::NonLocalDepInfo &
361 MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
362 assert(getDependency(QueryCS.getInstruction()).isNonLocal() &&
363 "getNonLocalCallDependency should only be used on calls with non-local deps!");
364 PerInstNLInfo &CacheP = NonLocalDeps[QueryCS.getInstruction()];
365 NonLocalDepInfo &Cache = CacheP.first;
367 /// DirtyBlocks - This is the set of blocks that need to be recomputed. In
368 /// the cached case, this can happen due to instructions being deleted etc. In
369 /// the uncached case, this starts out as the set of predecessors we care
371 SmallVector<BasicBlock*, 32> DirtyBlocks;
373 if (!Cache.empty()) {
374 // Okay, we have a cache entry. If we know it is not dirty, just return it
375 // with no computation.
376 if (!CacheP.second) {
381 // If we already have a partially computed set of results, scan them to
382 // determine what is dirty, seeding our initial DirtyBlocks worklist.
383 for (NonLocalDepInfo::iterator I = Cache.begin(), E = Cache.end();
385 if (I->second.isDirty())
386 DirtyBlocks.push_back(I->first);
388 // Sort the cache so that we can do fast binary search lookups below.
389 std::sort(Cache.begin(), Cache.end());
391 ++NumCacheDirtyNonLocal;
392 //cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: "
393 // << Cache.size() << " cached: " << *QueryInst;
395 // Seed DirtyBlocks with each of the preds of QueryInst's block.
396 BasicBlock *QueryBB = QueryCS.getInstruction()->getParent();
397 for (BasicBlock **PI = PredCache->GetPreds(QueryBB); *PI; ++PI)
398 DirtyBlocks.push_back(*PI);
399 NumUncacheNonLocal++;
402 // isReadonlyCall - If this is a read-only call, we can be more aggressive.
403 bool isReadonlyCall = AA->onlyReadsMemory(QueryCS);
405 SmallPtrSet<BasicBlock*, 64> Visited;
407 unsigned NumSortedEntries = Cache.size();
408 DEBUG(AssertSorted(Cache));
410 // Iterate while we still have blocks to update.
411 while (!DirtyBlocks.empty()) {
412 BasicBlock *DirtyBB = DirtyBlocks.back();
413 DirtyBlocks.pop_back();
415 // Already processed this block?
416 if (!Visited.insert(DirtyBB))
419 // Do a binary search to see if we already have an entry for this block in
420 // the cache set. If so, find it.
421 DEBUG(AssertSorted(Cache, NumSortedEntries));
422 NonLocalDepInfo::iterator Entry =
423 std::upper_bound(Cache.begin(), Cache.begin()+NumSortedEntries,
424 std::make_pair(DirtyBB, MemDepResult()));
425 if (Entry != Cache.begin() && prior(Entry)->first == DirtyBB)
428 MemDepResult *ExistingResult = 0;
429 if (Entry != Cache.begin()+NumSortedEntries &&
430 Entry->first == DirtyBB) {
431 // If we already have an entry, and if it isn't already dirty, the block
433 if (!Entry->second.isDirty())
436 // Otherwise, remember this slot so we can update the value.
437 ExistingResult = &Entry->second;
440 // If the dirty entry has a pointer, start scanning from it so we don't have
441 // to rescan the entire block.
442 BasicBlock::iterator ScanPos = DirtyBB->end();
443 if (ExistingResult) {
444 if (Instruction *Inst = ExistingResult->getInst()) {
446 // We're removing QueryInst's use of Inst.
447 RemoveFromReverseMap(ReverseNonLocalDeps, Inst,
448 QueryCS.getInstruction());
452 // Find out if this block has a local dependency for QueryInst.
455 if (ScanPos != DirtyBB->begin()) {
456 Dep = getCallSiteDependencyFrom(QueryCS, isReadonlyCall,ScanPos, DirtyBB);
457 } else if (DirtyBB != &DirtyBB->getParent()->getEntryBlock()) {
458 // No dependence found. If this is the entry block of the function, it is
459 // a clobber, otherwise it is non-local.
460 Dep = MemDepResult::getNonLocal();
462 Dep = MemDepResult::getClobber(ScanPos);
465 // If we had a dirty entry for the block, update it. Otherwise, just add
468 *ExistingResult = Dep;
470 Cache.push_back(std::make_pair(DirtyBB, Dep));
472 // If the block has a dependency (i.e. it isn't completely transparent to
473 // the value), remember the association!
474 if (!Dep.isNonLocal()) {
475 // Keep the ReverseNonLocalDeps map up to date so we can efficiently
476 // update this when we remove instructions.
477 if (Instruction *Inst = Dep.getInst())
478 ReverseNonLocalDeps[Inst].insert(QueryCS.getInstruction());
481 // If the block *is* completely transparent to the load, we need to check
482 // the predecessors of this block. Add them to our worklist.
483 for (BasicBlock **PI = PredCache->GetPreds(DirtyBB); *PI; ++PI)
484 DirtyBlocks.push_back(*PI);
491 /// getNonLocalPointerDependency - Perform a full dependency query for an
492 /// access to the specified (non-volatile) memory location, returning the
493 /// set of instructions that either define or clobber the value.
495 /// This method assumes the pointer has a "NonLocal" dependency within its
498 void MemoryDependenceAnalysis::
499 getNonLocalPointerDependency(Value *Pointer, bool isLoad, BasicBlock *FromBB,
500 SmallVectorImpl<NonLocalDepEntry> &Result) {
501 assert(isa<PointerType>(Pointer->getType()) &&
502 "Can't get pointer deps of a non-pointer!");
505 // We know that the pointer value is live into FromBB find the def/clobbers
506 // from presecessors.
507 const Type *EltTy = cast<PointerType>(Pointer->getType())->getElementType();
508 uint64_t PointeeSize = TD->getTypeStoreSize(EltTy);
510 // This is the set of blocks we've inspected, and the pointer we consider in
511 // each block. Because of critical edges, we currently bail out if querying
512 // a block with multiple different pointers. This can happen during PHI
514 DenseMap<BasicBlock*, Value*> Visited;
515 if (!getNonLocalPointerDepFromBB(Pointer, PointeeSize, isLoad, FromBB,
516 Result, Visited, true))
519 Result.push_back(std::make_pair(FromBB,
520 MemDepResult::getClobber(FromBB->begin())));
523 /// GetNonLocalInfoForBlock - Compute the memdep value for BB with
524 /// Pointer/PointeeSize using either cached information in Cache or by doing a
525 /// lookup (which may use dirty cache info if available). If we do a lookup,
526 /// add the result to the cache.
527 MemDepResult MemoryDependenceAnalysis::
528 GetNonLocalInfoForBlock(Value *Pointer, uint64_t PointeeSize,
529 bool isLoad, BasicBlock *BB,
530 NonLocalDepInfo *Cache, unsigned NumSortedEntries) {
532 // Do a binary search to see if we already have an entry for this block in
533 // the cache set. If so, find it.
534 NonLocalDepInfo::iterator Entry =
535 std::upper_bound(Cache->begin(), Cache->begin()+NumSortedEntries,
536 std::make_pair(BB, MemDepResult()));
537 if (Entry != Cache->begin() && prior(Entry)->first == BB)
540 MemDepResult *ExistingResult = 0;
541 if (Entry != Cache->begin()+NumSortedEntries && Entry->first == BB)
542 ExistingResult = &Entry->second;
544 // If we have a cached entry, and it is non-dirty, use it as the value for
546 if (ExistingResult && !ExistingResult->isDirty()) {
547 ++NumCacheNonLocalPtr;
548 return *ExistingResult;
551 // Otherwise, we have to scan for the value. If we have a dirty cache
552 // entry, start scanning from its position, otherwise we scan from the end
554 BasicBlock::iterator ScanPos = BB->end();
555 if (ExistingResult && ExistingResult->getInst()) {
556 assert(ExistingResult->getInst()->getParent() == BB &&
557 "Instruction invalidated?");
558 ++NumCacheDirtyNonLocalPtr;
559 ScanPos = ExistingResult->getInst();
561 // Eliminating the dirty entry from 'Cache', so update the reverse info.
562 ValueIsLoadPair CacheKey(Pointer, isLoad);
563 RemoveFromReverseMap(ReverseNonLocalPtrDeps, ScanPos,
564 CacheKey.getOpaqueValue());
566 ++NumUncacheNonLocalPtr;
569 // Scan the block for the dependency.
570 MemDepResult Dep = getPointerDependencyFrom(Pointer, PointeeSize, isLoad,
573 // If we had a dirty entry for the block, update it. Otherwise, just add
576 *ExistingResult = Dep;
578 Cache->push_back(std::make_pair(BB, Dep));
580 // If the block has a dependency (i.e. it isn't completely transparent to
581 // the value), remember the reverse association because we just added it
583 if (Dep.isNonLocal())
586 // Keep the ReverseNonLocalPtrDeps map up to date so we can efficiently
587 // update MemDep when we remove instructions.
588 Instruction *Inst = Dep.getInst();
589 assert(Inst && "Didn't depend on anything?");
590 ValueIsLoadPair CacheKey(Pointer, isLoad);
591 ReverseNonLocalPtrDeps[Inst].insert(CacheKey.getOpaqueValue());
596 /// getNonLocalPointerDepFromBB - Perform a dependency query based on
597 /// pointer/pointeesize starting at the end of StartBB. Add any clobber/def
598 /// results to the results vector and keep track of which blocks are visited in
601 /// This has special behavior for the first block queries (when SkipFirstBlock
602 /// is true). In this special case, it ignores the contents of the specified
603 /// block and starts returning dependence info for its predecessors.
605 /// This function returns false on success, or true to indicate that it could
606 /// not compute dependence information for some reason. This should be treated
607 /// as a clobber dependence on the first instruction in the predecessor block.
608 bool MemoryDependenceAnalysis::
609 getNonLocalPointerDepFromBB(Value *Pointer, uint64_t PointeeSize,
610 bool isLoad, BasicBlock *StartBB,
611 SmallVectorImpl<NonLocalDepEntry> &Result,
612 DenseMap<BasicBlock*, Value*> &Visited,
613 bool SkipFirstBlock) {
615 // Look up the cached info for Pointer.
616 ValueIsLoadPair CacheKey(Pointer, isLoad);
618 std::pair<BBSkipFirstBlockPair, NonLocalDepInfo> *CacheInfo =
619 &NonLocalPointerDeps[CacheKey];
620 NonLocalDepInfo *Cache = &CacheInfo->second;
622 // If we have valid cached information for exactly the block we are
623 // investigating, just return it with no recomputation.
624 if (CacheInfo->first == BBSkipFirstBlockPair(StartBB, SkipFirstBlock)) {
625 // We have a fully cached result for this query then we can just return the
626 // cached results and populate the visited set. However, we have to verify
627 // that we don't already have conflicting results for these blocks. Check
628 // to ensure that if a block in the results set is in the visited set that
629 // it was for the same pointer query.
630 if (!Visited.empty()) {
631 for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end();
633 DenseMap<BasicBlock*, Value*>::iterator VI = Visited.find(I->first);
634 if (VI == Visited.end() || VI->second == Pointer) continue;
636 // We have a pointer mismatch in a block. Just return clobber, saying
637 // that something was clobbered in this result. We could also do a
638 // non-fully cached query, but there is little point in doing this.
643 for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end();
645 Visited.insert(std::make_pair(I->first, Pointer));
646 if (!I->second.isNonLocal())
647 Result.push_back(*I);
649 ++NumCacheCompleteNonLocalPtr;
653 // Otherwise, either this is a new block, a block with an invalid cache
654 // pointer or one that we're about to invalidate by putting more info into it
655 // than its valid cache info. If empty, the result will be valid cache info,
656 // otherwise it isn't.
658 CacheInfo->first = BBSkipFirstBlockPair(StartBB, SkipFirstBlock);
660 CacheInfo->first = BBSkipFirstBlockPair();
662 SmallVector<BasicBlock*, 32> Worklist;
663 Worklist.push_back(StartBB);
665 // Keep track of the entries that we know are sorted. Previously cached
666 // entries will all be sorted. The entries we add we only sort on demand (we
667 // don't insert every element into its sorted position). We know that we
668 // won't get any reuse from currently inserted values, because we don't
669 // revisit blocks after we insert info for them.
670 unsigned NumSortedEntries = Cache->size();
671 DEBUG(AssertSorted(*Cache));
673 while (!Worklist.empty()) {
674 BasicBlock *BB = Worklist.pop_back_val();
676 // Skip the first block if we have it.
677 if (!SkipFirstBlock) {
678 // Analyze the dependency of *Pointer in FromBB. See if we already have
680 assert(Visited.count(BB) && "Should check 'visited' before adding to WL");
682 // Get the dependency info for Pointer in BB. If we have cached
683 // information, we will use it, otherwise we compute it.
684 DEBUG(AssertSorted(*Cache, NumSortedEntries));
685 MemDepResult Dep = GetNonLocalInfoForBlock(Pointer, PointeeSize, isLoad,
686 BB, Cache, NumSortedEntries);
688 // If we got a Def or Clobber, add this to the list of results.
689 if (!Dep.isNonLocal()) {
690 Result.push_back(NonLocalDepEntry(BB, Dep));
695 // If 'Pointer' is an instruction defined in this block, then we need to do
696 // phi translation to change it into a value live in the predecessor block.
697 // If phi translation fails, then we can't continue dependence analysis.
698 Instruction *PtrInst = dyn_cast<Instruction>(Pointer);
699 bool NeedsPHITranslation = PtrInst && PtrInst->getParent() == BB;
701 // If no PHI translation is needed, just add all the predecessors of this
702 // block to scan them as well.
703 if (!NeedsPHITranslation) {
704 SkipFirstBlock = false;
705 for (BasicBlock **PI = PredCache->GetPreds(BB); *PI; ++PI) {
706 // Verify that we haven't looked at this block yet.
707 std::pair<DenseMap<BasicBlock*,Value*>::iterator, bool>
708 InsertRes = Visited.insert(std::make_pair(*PI, Pointer));
709 if (InsertRes.second) {
710 // First time we've looked at *PI.
711 Worklist.push_back(*PI);
715 // If we have seen this block before, but it was with a different
716 // pointer then we have a phi translation failure and we have to treat
717 // this as a clobber.
718 if (InsertRes.first->second != Pointer)
719 goto PredTranslationFailure;
724 // If we do need to do phi translation, then there are a bunch of different
725 // cases, because we have to find a Value* live in the predecessor block. We
726 // know that PtrInst is defined in this block at least.
728 // If this is directly a PHI node, just use the incoming values for each
729 // pred as the phi translated version.
730 if (PHINode *PtrPHI = dyn_cast<PHINode>(PtrInst)) {
731 for (BasicBlock **PI = PredCache->GetPreds(BB); *PI; ++PI) {
732 BasicBlock *Pred = *PI;
733 Value *PredPtr = PtrPHI->getIncomingValueForBlock(Pred);
735 // Check to see if we have already visited this pred block with another
736 // pointer. If so, we can't do this lookup. This failure can occur
737 // with PHI translation when a critical edge exists and the PHI node in
738 // the successor translates to a pointer value different than the
739 // pointer the block was first analyzed with.
740 std::pair<DenseMap<BasicBlock*,Value*>::iterator, bool>
741 InsertRes = Visited.insert(std::make_pair(Pred, PredPtr));
743 if (!InsertRes.second) {
744 // If the predecessor was visited with PredPtr, then we already did
745 // the analysis and can ignore it.
746 if (InsertRes.first->second == PredPtr)
749 // Otherwise, the block was previously analyzed with a different
750 // pointer. We can't represent the result of this case, so we just
751 // treat this as a phi translation failure.
752 goto PredTranslationFailure;
755 // We may have added values to the cache list before this PHI
756 // translation. If so, we haven't done anything to ensure that the
757 // cache remains sorted. Sort it now (if needed) so that recursive
758 // invocations of getNonLocalPointerDepFromBB that could reuse the cache
759 // value will only see properly sorted cache arrays.
760 if (Cache && NumSortedEntries != Cache->size())
761 std::sort(Cache->begin(), Cache->end());
764 // FIXME: it is entirely possible that PHI translating will end up with
765 // the same value. Consider PHI translating something like:
766 // X = phi [x, bb1], [y, bb2]. PHI translating for bb1 doesn't *need*
767 // to recurse here, pedantically speaking.
769 // If we have a problem phi translating, fall through to the code below
770 // to handle the failure condition.
771 if (getNonLocalPointerDepFromBB(PredPtr, PointeeSize, isLoad, Pred,
773 goto PredTranslationFailure;
776 // Refresh the CacheInfo/Cache pointer so that it isn't invalidated.
777 CacheInfo = &NonLocalPointerDeps[CacheKey];
778 Cache = &CacheInfo->second;
779 NumSortedEntries = Cache->size();
781 // Since we did phi translation, the "Cache" set won't contain all of the
782 // results for the query. This is ok (we can still use it to accelerate
783 // specific block queries) but we can't do the fastpath "return all
784 // results from the set" Clear out the indicator for this.
785 CacheInfo->first = BBSkipFirstBlockPair();
786 SkipFirstBlock = false;
790 // TODO: BITCAST, GEP.
792 // cerr << "MEMDEP: Could not PHI translate: " << *Pointer;
793 // if (isa<BitCastInst>(PtrInst) || isa<GetElementPtrInst>(PtrInst))
794 // cerr << "OP:\t\t\t\t" << *PtrInst->getOperand(0);
795 PredTranslationFailure:
798 // Refresh the CacheInfo/Cache pointer if it got invalidated.
799 CacheInfo = &NonLocalPointerDeps[CacheKey];
800 Cache = &CacheInfo->second;
801 NumSortedEntries = Cache->size();
802 } else if (NumSortedEntries != Cache->size()) {
803 std::sort(Cache->begin(), Cache->end());
804 NumSortedEntries = Cache->size();
807 // Since we did phi translation, the "Cache" set won't contain all of the
808 // results for the query. This is ok (we can still use it to accelerate
809 // specific block queries) but we can't do the fastpath "return all
810 // results from the set" Clear out the indicator for this.
811 CacheInfo->first = BBSkipFirstBlockPair();
813 // If *nothing* works, mark the pointer as being clobbered by the first
814 // instruction in this block.
816 // If this is the magic first block, return this as a clobber of the whole
817 // incoming value. Since we can't phi translate to one of the predecessors,
818 // we have to bail out.
822 for (NonLocalDepInfo::reverse_iterator I = Cache->rbegin(); ; ++I) {
823 assert(I != Cache->rend() && "Didn't find current block??");
827 assert(I->second.isNonLocal() &&
828 "Should only be here with transparent block");
829 I->second = MemDepResult::getClobber(BB->begin());
830 ReverseNonLocalPtrDeps[BB->begin()].insert(CacheKey.getOpaqueValue());
831 Result.push_back(*I);
836 // Okay, we're done now. If we added new values to the cache, re-sort it.
837 switch (Cache->size()-NumSortedEntries) {
839 // done, no new entries.
842 // Two new entries, insert the last one into place.
843 NonLocalDepEntry Val = Cache->back();
845 NonLocalDepInfo::iterator Entry =
846 std::upper_bound(Cache->begin(), Cache->end()-1, Val);
847 Cache->insert(Entry, Val);
851 // One new entry, Just insert the new value at the appropriate position.
852 if (Cache->size() != 1) {
853 NonLocalDepEntry Val = Cache->back();
855 NonLocalDepInfo::iterator Entry =
856 std::upper_bound(Cache->begin(), Cache->end(), Val);
857 Cache->insert(Entry, Val);
861 // Added many values, do a full scale sort.
862 std::sort(Cache->begin(), Cache->end());
864 DEBUG(AssertSorted(*Cache));
868 /// RemoveCachedNonLocalPointerDependencies - If P exists in
869 /// CachedNonLocalPointerInfo, remove it.
870 void MemoryDependenceAnalysis::
871 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair P) {
872 CachedNonLocalPointerInfo::iterator It =
873 NonLocalPointerDeps.find(P);
874 if (It == NonLocalPointerDeps.end()) return;
876 // Remove all of the entries in the BB->val map. This involves removing
877 // instructions from the reverse map.
878 NonLocalDepInfo &PInfo = It->second.second;
880 for (unsigned i = 0, e = PInfo.size(); i != e; ++i) {
881 Instruction *Target = PInfo[i].second.getInst();
882 if (Target == 0) continue; // Ignore non-local dep results.
883 assert(Target->getParent() == PInfo[i].first);
885 // Eliminating the dirty entry from 'Cache', so update the reverse info.
886 RemoveFromReverseMap(ReverseNonLocalPtrDeps, Target, P.getOpaqueValue());
889 // Remove P from NonLocalPointerDeps (which deletes NonLocalDepInfo).
890 NonLocalPointerDeps.erase(It);
894 /// invalidateCachedPointerInfo - This method is used to invalidate cached
895 /// information about the specified pointer, because it may be too
896 /// conservative in memdep. This is an optional call that can be used when
897 /// the client detects an equivalence between the pointer and some other
898 /// value and replaces the other value with ptr. This can make Ptr available
899 /// in more places that cached info does not necessarily keep.
900 void MemoryDependenceAnalysis::invalidateCachedPointerInfo(Value *Ptr) {
901 // If Ptr isn't really a pointer, just ignore it.
902 if (!isa<PointerType>(Ptr->getType())) return;
903 // Flush store info for the pointer.
904 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, false));
905 // Flush load info for the pointer.
906 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, true));
909 /// removeInstruction - Remove an instruction from the dependence analysis,
910 /// updating the dependence of instructions that previously depended on it.
911 /// This method attempts to keep the cache coherent using the reverse map.
912 void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
913 // Walk through the Non-local dependencies, removing this one as the value
914 // for any cached queries.
915 NonLocalDepMapType::iterator NLDI = NonLocalDeps.find(RemInst);
916 if (NLDI != NonLocalDeps.end()) {
917 NonLocalDepInfo &BlockMap = NLDI->second.first;
918 for (NonLocalDepInfo::iterator DI = BlockMap.begin(), DE = BlockMap.end();
920 if (Instruction *Inst = DI->second.getInst())
921 RemoveFromReverseMap(ReverseNonLocalDeps, Inst, RemInst);
922 NonLocalDeps.erase(NLDI);
925 // If we have a cached local dependence query for this instruction, remove it.
927 LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst);
928 if (LocalDepEntry != LocalDeps.end()) {
929 // Remove us from DepInst's reverse set now that the local dep info is gone.
930 if (Instruction *Inst = LocalDepEntry->second.getInst())
931 RemoveFromReverseMap(ReverseLocalDeps, Inst, RemInst);
933 // Remove this local dependency info.
934 LocalDeps.erase(LocalDepEntry);
937 // If we have any cached pointer dependencies on this instruction, remove
938 // them. If the instruction has non-pointer type, then it can't be a pointer
941 // Remove it from both the load info and the store info. The instruction
942 // can't be in either of these maps if it is non-pointer.
943 if (isa<PointerType>(RemInst->getType())) {
944 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, false));
945 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, true));
948 // Loop over all of the things that depend on the instruction we're removing.
950 SmallVector<std::pair<Instruction*, Instruction*>, 8> ReverseDepsToAdd;
952 // If we find RemInst as a clobber or Def in any of the maps for other values,
953 // we need to replace its entry with a dirty version of the instruction after
954 // it. If RemInst is a terminator, we use a null dirty value.
956 // Using a dirty version of the instruction after RemInst saves having to scan
957 // the entire block to get to this point.
958 MemDepResult NewDirtyVal;
959 if (!RemInst->isTerminator())
960 NewDirtyVal = MemDepResult::getDirty(++BasicBlock::iterator(RemInst));
962 ReverseDepMapType::iterator ReverseDepIt = ReverseLocalDeps.find(RemInst);
963 if (ReverseDepIt != ReverseLocalDeps.end()) {
964 SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second;
965 // RemInst can't be the terminator if it has local stuff depending on it.
966 assert(!ReverseDeps.empty() && !isa<TerminatorInst>(RemInst) &&
967 "Nothing can locally depend on a terminator");
969 for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(),
970 E = ReverseDeps.end(); I != E; ++I) {
971 Instruction *InstDependingOnRemInst = *I;
972 assert(InstDependingOnRemInst != RemInst &&
973 "Already removed our local dep info");
975 LocalDeps[InstDependingOnRemInst] = NewDirtyVal;
977 // Make sure to remember that new things depend on NewDepInst.
978 assert(NewDirtyVal.getInst() && "There is no way something else can have "
979 "a local dep on this if it is a terminator!");
980 ReverseDepsToAdd.push_back(std::make_pair(NewDirtyVal.getInst(),
981 InstDependingOnRemInst));
984 ReverseLocalDeps.erase(ReverseDepIt);
986 // Add new reverse deps after scanning the set, to avoid invalidating the
987 // 'ReverseDeps' reference.
988 while (!ReverseDepsToAdd.empty()) {
989 ReverseLocalDeps[ReverseDepsToAdd.back().first]
990 .insert(ReverseDepsToAdd.back().second);
991 ReverseDepsToAdd.pop_back();
995 ReverseDepIt = ReverseNonLocalDeps.find(RemInst);
996 if (ReverseDepIt != ReverseNonLocalDeps.end()) {
997 SmallPtrSet<Instruction*, 4> &Set = ReverseDepIt->second;
998 for (SmallPtrSet<Instruction*, 4>::iterator I = Set.begin(), E = Set.end();
1000 assert(*I != RemInst && "Already removed NonLocalDep info for RemInst");
1002 PerInstNLInfo &INLD = NonLocalDeps[*I];
1003 // The information is now dirty!
1006 for (NonLocalDepInfo::iterator DI = INLD.first.begin(),
1007 DE = INLD.first.end(); DI != DE; ++DI) {
1008 if (DI->second.getInst() != RemInst) continue;
1010 // Convert to a dirty entry for the subsequent instruction.
1011 DI->second = NewDirtyVal;
1013 if (Instruction *NextI = NewDirtyVal.getInst())
1014 ReverseDepsToAdd.push_back(std::make_pair(NextI, *I));
1018 ReverseNonLocalDeps.erase(ReverseDepIt);
1020 // Add new reverse deps after scanning the set, to avoid invalidating 'Set'
1021 while (!ReverseDepsToAdd.empty()) {
1022 ReverseNonLocalDeps[ReverseDepsToAdd.back().first]
1023 .insert(ReverseDepsToAdd.back().second);
1024 ReverseDepsToAdd.pop_back();
1028 // If the instruction is in ReverseNonLocalPtrDeps then it appears as a
1029 // value in the NonLocalPointerDeps info.
1030 ReverseNonLocalPtrDepTy::iterator ReversePtrDepIt =
1031 ReverseNonLocalPtrDeps.find(RemInst);
1032 if (ReversePtrDepIt != ReverseNonLocalPtrDeps.end()) {
1033 SmallPtrSet<void*, 4> &Set = ReversePtrDepIt->second;
1034 SmallVector<std::pair<Instruction*, ValueIsLoadPair>,8> ReversePtrDepsToAdd;
1036 for (SmallPtrSet<void*, 4>::iterator I = Set.begin(), E = Set.end();
1039 P.setFromOpaqueValue(*I);
1040 assert(P.getPointer() != RemInst &&
1041 "Already removed NonLocalPointerDeps info for RemInst");
1043 NonLocalDepInfo &NLPDI = NonLocalPointerDeps[P].second;
1045 // The cache is not valid for any specific block anymore.
1046 NonLocalPointerDeps[P].first = BBSkipFirstBlockPair();
1048 // Update any entries for RemInst to use the instruction after it.
1049 for (NonLocalDepInfo::iterator DI = NLPDI.begin(), DE = NLPDI.end();
1051 if (DI->second.getInst() != RemInst) continue;
1053 // Convert to a dirty entry for the subsequent instruction.
1054 DI->second = NewDirtyVal;
1056 if (Instruction *NewDirtyInst = NewDirtyVal.getInst())
1057 ReversePtrDepsToAdd.push_back(std::make_pair(NewDirtyInst, P));
1060 // Re-sort the NonLocalDepInfo. Changing the dirty entry to its
1061 // subsequent value may invalidate the sortedness.
1062 std::sort(NLPDI.begin(), NLPDI.end());
1065 ReverseNonLocalPtrDeps.erase(ReversePtrDepIt);
1067 while (!ReversePtrDepsToAdd.empty()) {
1068 ReverseNonLocalPtrDeps[ReversePtrDepsToAdd.back().first]
1069 .insert(ReversePtrDepsToAdd.back().second.getOpaqueValue());
1070 ReversePtrDepsToAdd.pop_back();
1075 assert(!NonLocalDeps.count(RemInst) && "RemInst got reinserted?");
1076 AA->deleteValue(RemInst);
1077 DEBUG(verifyRemoved(RemInst));
1079 /// verifyRemoved - Verify that the specified instruction does not occur
1080 /// in our internal data structures.
1081 void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const {
1082 for (LocalDepMapType::const_iterator I = LocalDeps.begin(),
1083 E = LocalDeps.end(); I != E; ++I) {
1084 assert(I->first != D && "Inst occurs in data structures");
1085 assert(I->second.getInst() != D &&
1086 "Inst occurs in data structures");
1089 for (CachedNonLocalPointerInfo::const_iterator I =NonLocalPointerDeps.begin(),
1090 E = NonLocalPointerDeps.end(); I != E; ++I) {
1091 assert(I->first.getPointer() != D && "Inst occurs in NLPD map key");
1092 const NonLocalDepInfo &Val = I->second.second;
1093 for (NonLocalDepInfo::const_iterator II = Val.begin(), E = Val.end();
1095 assert(II->second.getInst() != D && "Inst occurs as NLPD value");
1098 for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(),
1099 E = NonLocalDeps.end(); I != E; ++I) {
1100 assert(I->first != D && "Inst occurs in data structures");
1101 const PerInstNLInfo &INLD = I->second;
1102 for (NonLocalDepInfo::const_iterator II = INLD.first.begin(),
1103 EE = INLD.first.end(); II != EE; ++II)
1104 assert(II->second.getInst() != D && "Inst occurs in data structures");
1107 for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(),
1108 E = ReverseLocalDeps.end(); I != E; ++I) {
1109 assert(I->first != D && "Inst occurs in data structures");
1110 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
1111 EE = I->second.end(); II != EE; ++II)
1112 assert(*II != D && "Inst occurs in data structures");
1115 for (ReverseDepMapType::const_iterator I = ReverseNonLocalDeps.begin(),
1116 E = ReverseNonLocalDeps.end();
1118 assert(I->first != D && "Inst occurs in data structures");
1119 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
1120 EE = I->second.end(); II != EE; ++II)
1121 assert(*II != D && "Inst occurs in data structures");
1124 for (ReverseNonLocalPtrDepTy::const_iterator
1125 I = ReverseNonLocalPtrDeps.begin(),
1126 E = ReverseNonLocalPtrDeps.end(); I != E; ++I) {
1127 assert(I->first != D && "Inst occurs in rev NLPD map");
1129 for (SmallPtrSet<void*, 4>::const_iterator II = I->second.begin(),
1130 E = I->second.end(); II != E; ++II)
1131 assert(*II != ValueIsLoadPair(D, false).getOpaqueValue() &&
1132 *II != ValueIsLoadPair(D, true).getOpaqueValue() &&
1133 "Inst occurs in ReverseNonLocalPtrDeps map");