1 //===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements an analysis that determines, for a given memory
11 // operation, what preceding memory operations it depends on. It builds on
12 // alias analysis information, and tries to provide a lazy, caching interface to
13 // a common kind of alias information query.
15 //===----------------------------------------------------------------------===//
17 #define DEBUG_TYPE "memdep"
18 #include "llvm/Analysis/MemoryDependenceAnalysis.h"
19 #include "llvm/Instructions.h"
20 #include "llvm/IntrinsicInst.h"
21 #include "llvm/Function.h"
22 #include "llvm/Analysis/AliasAnalysis.h"
23 #include "llvm/Analysis/Dominators.h"
24 #include "llvm/Analysis/InstructionSimplify.h"
25 #include "llvm/Analysis/MemoryBuiltins.h"
26 #include "llvm/Analysis/PHITransAddr.h"
27 #include "llvm/ADT/Statistic.h"
28 #include "llvm/ADT/STLExtras.h"
29 #include "llvm/Support/PredIteratorCache.h"
30 #include "llvm/Support/Debug.h"
33 STATISTIC(NumCacheNonLocal, "Number of fully cached non-local responses");
34 STATISTIC(NumCacheDirtyNonLocal, "Number of dirty cached non-local responses");
35 STATISTIC(NumUncacheNonLocal, "Number of uncached non-local responses");
37 STATISTIC(NumCacheNonLocalPtr,
38 "Number of fully cached non-local ptr responses");
39 STATISTIC(NumCacheDirtyNonLocalPtr,
40 "Number of cached, but dirty, non-local ptr responses");
41 STATISTIC(NumUncacheNonLocalPtr,
42 "Number of uncached non-local ptr responses");
43 STATISTIC(NumCacheCompleteNonLocalPtr,
44 "Number of block queries that were completely cached");
46 char MemoryDependenceAnalysis::ID = 0;
48 // Register this pass...
49 static RegisterPass<MemoryDependenceAnalysis> X("memdep",
50 "Memory Dependence Analysis", false, true);
52 MemoryDependenceAnalysis::MemoryDependenceAnalysis()
53 : FunctionPass(&ID), PredCache(0) {
55 MemoryDependenceAnalysis::~MemoryDependenceAnalysis() {
58 /// Clean up memory in between runs
59 void MemoryDependenceAnalysis::releaseMemory() {
62 NonLocalPointerDeps.clear();
63 ReverseLocalDeps.clear();
64 ReverseNonLocalDeps.clear();
65 ReverseNonLocalPtrDeps.clear();
71 /// getAnalysisUsage - Does not modify anything. It uses Alias Analysis.
73 void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
75 AU.addRequiredTransitive<AliasAnalysis>();
78 bool MemoryDependenceAnalysis::runOnFunction(Function &) {
79 AA = &getAnalysis<AliasAnalysis>();
81 PredCache.reset(new PredIteratorCache());
85 /// RemoveFromReverseMap - This is a helper function that removes Val from
86 /// 'Inst's set in ReverseMap. If the set becomes empty, remove Inst's entry.
87 template <typename KeyTy>
88 static void RemoveFromReverseMap(DenseMap<Instruction*,
89 SmallPtrSet<KeyTy, 4> > &ReverseMap,
90 Instruction *Inst, KeyTy Val) {
91 typename DenseMap<Instruction*, SmallPtrSet<KeyTy, 4> >::iterator
92 InstIt = ReverseMap.find(Inst);
93 assert(InstIt != ReverseMap.end() && "Reverse map out of sync?");
94 bool Found = InstIt->second.erase(Val);
95 assert(Found && "Invalid reverse map!"); Found=Found;
96 if (InstIt->second.empty())
97 ReverseMap.erase(InstIt);
101 /// getCallSiteDependencyFrom - Private helper for finding the local
102 /// dependencies of a call site.
103 MemDepResult MemoryDependenceAnalysis::
104 getCallSiteDependencyFrom(CallSite CS, bool isReadOnlyCall,
105 BasicBlock::iterator ScanIt, BasicBlock *BB) {
106 // Walk backwards through the block, looking for dependencies
107 while (ScanIt != BB->begin()) {
108 Instruction *Inst = --ScanIt;
110 // If this inst is a memory op, get the pointer it accessed
112 uint64_t PointerSize = 0;
113 if (StoreInst *S = dyn_cast<StoreInst>(Inst)) {
114 Pointer = S->getPointerOperand();
115 PointerSize = AA->getTypeStoreSize(S->getOperand(0)->getType());
116 } else if (VAArgInst *V = dyn_cast<VAArgInst>(Inst)) {
117 Pointer = V->getOperand(0);
118 PointerSize = AA->getTypeStoreSize(V->getType());
119 } else if (isFreeCall(Inst)) {
120 Pointer = Inst->getOperand(1);
121 // calls to free() erase the entire structure
123 } else if (isa<CallInst>(Inst) || isa<InvokeInst>(Inst)) {
124 // Debug intrinsics don't cause dependences.
125 if (isa<DbgInfoIntrinsic>(Inst)) continue;
126 CallSite InstCS = CallSite::get(Inst);
127 // If these two calls do not interfere, look past it.
128 switch (AA->getModRefInfo(CS, InstCS)) {
129 case AliasAnalysis::NoModRef:
130 // If the two calls don't interact (e.g. InstCS is readnone) keep
133 case AliasAnalysis::Ref:
134 // If the two calls read the same memory locations and CS is a readonly
135 // function, then we have two cases: 1) the calls may not interfere with
136 // each other at all. 2) the calls may produce the same value. In case
137 // #1 we want to ignore the values, in case #2, we want to return Inst
138 // as a Def dependence. This allows us to CSE in cases like:
141 // Y = strlen(P); // Y = X
142 if (isReadOnlyCall) {
143 if (CS.getCalledFunction() != 0 &&
144 CS.getCalledFunction() == InstCS.getCalledFunction())
145 return MemDepResult::getDef(Inst);
146 // Ignore unrelated read/read call dependences.
151 return MemDepResult::getClobber(Inst);
154 // Non-memory instruction.
158 if (AA->getModRefInfo(CS, Pointer, PointerSize) != AliasAnalysis::NoModRef)
159 return MemDepResult::getClobber(Inst);
162 // No dependence found. If this is the entry block of the function, it is a
163 // clobber, otherwise it is non-local.
164 if (BB != &BB->getParent()->getEntryBlock())
165 return MemDepResult::getNonLocal();
166 return MemDepResult::getClobber(ScanIt);
169 /// getPointerDependencyFrom - Return the instruction on which a memory
170 /// location depends. If isLoad is true, this routine ignore may-aliases with
171 /// read-only operations.
172 MemDepResult MemoryDependenceAnalysis::
173 getPointerDependencyFrom(Value *MemPtr, uint64_t MemSize, bool isLoad,
174 BasicBlock::iterator ScanIt, BasicBlock *BB) {
176 Value *InvariantTag = 0;
178 // Walk backwards through the basic block, looking for dependencies.
179 while (ScanIt != BB->begin()) {
180 Instruction *Inst = --ScanIt;
182 // If we're in an invariant region, no dependencies can be found before
183 // we pass an invariant-begin marker.
184 if (InvariantTag == Inst) {
189 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst)) {
190 // Debug intrinsics don't cause dependences.
191 if (isa<DbgInfoIntrinsic>(Inst)) continue;
193 // If we pass an invariant-end marker, then we've just entered an
194 // invariant region and can start ignoring dependencies.
195 if (II->getIntrinsicID() == Intrinsic::invariant_end) {
196 // FIXME: This only considers queries directly on the invariant-tagged
197 // pointer, not on query pointers that are indexed off of them. It'd
198 // be nice to handle that at some point.
199 AliasAnalysis::AliasResult R =
200 AA->alias(II->getOperand(3), ~0U, MemPtr, ~0U);
201 if (R == AliasAnalysis::MustAlias) {
202 InvariantTag = II->getOperand(1);
206 // If we reach a lifetime begin or end marker, then the query ends here
207 // because the value is undefined.
208 } else if (II->getIntrinsicID() == Intrinsic::lifetime_start) {
209 // FIXME: This only considers queries directly on the invariant-tagged
210 // pointer, not on query pointers that are indexed off of them. It'd
211 // be nice to handle that at some point.
212 AliasAnalysis::AliasResult R =
213 AA->alias(II->getOperand(2), ~0U, MemPtr, ~0U);
214 if (R == AliasAnalysis::MustAlias)
215 return MemDepResult::getDef(II);
219 // If we're querying on a load and we're in an invariant region, we're done
220 // at this point. Nothing a load depends on can live in an invariant region.
221 if (isLoad && InvariantTag) continue;
223 // Values depend on loads if the pointers are must aliased. This means that
224 // a load depends on another must aliased load from the same value.
225 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
226 Value *Pointer = LI->getPointerOperand();
227 uint64_t PointerSize = AA->getTypeStoreSize(LI->getType());
229 // If we found a pointer, check if it could be the same as our pointer.
230 AliasAnalysis::AliasResult R =
231 AA->alias(Pointer, PointerSize, MemPtr, MemSize);
232 if (R == AliasAnalysis::NoAlias)
235 // May-alias loads don't depend on each other without a dependence.
236 if (isLoad && R == AliasAnalysis::MayAlias)
238 // Stores depend on may and must aliased loads, loads depend on must-alias
240 return MemDepResult::getDef(Inst);
243 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
244 // There can't be stores to the value we care about inside an
246 if (InvariantTag) continue;
248 // If alias analysis can tell that this store is guaranteed to not modify
249 // the query pointer, ignore it. Use getModRefInfo to handle cases where
250 // the query pointer points to constant memory etc.
251 if (AA->getModRefInfo(SI, MemPtr, MemSize) == AliasAnalysis::NoModRef)
254 // Ok, this store might clobber the query pointer. Check to see if it is
255 // a must alias: in this case, we want to return this as a def.
256 Value *Pointer = SI->getPointerOperand();
257 uint64_t PointerSize = AA->getTypeStoreSize(SI->getOperand(0)->getType());
259 // If we found a pointer, check if it could be the same as our pointer.
260 AliasAnalysis::AliasResult R =
261 AA->alias(Pointer, PointerSize, MemPtr, MemSize);
263 if (R == AliasAnalysis::NoAlias)
265 if (R == AliasAnalysis::MayAlias)
266 return MemDepResult::getClobber(Inst);
267 return MemDepResult::getDef(Inst);
270 // If this is an allocation, and if we know that the accessed pointer is to
271 // the allocation, return Def. This means that there is no dependence and
272 // the access can be optimized based on that. For example, a load could
274 // Note: Only determine this to be a malloc if Inst is the malloc call, not
275 // a subsequent bitcast of the malloc call result. There can be stores to
276 // the malloced memory between the malloc call and its bitcast uses, and we
277 // need to continue scanning until the malloc call.
278 if (isa<AllocaInst>(Inst) ||
279 (isa<CallInst>(Inst) && extractMallocCall(Inst))) {
280 Value *AccessPtr = MemPtr->getUnderlyingObject();
282 if (AccessPtr == Inst ||
283 AA->alias(Inst, 1, AccessPtr, 1) == AliasAnalysis::MustAlias)
284 return MemDepResult::getDef(Inst);
288 // See if this instruction (e.g. a call or vaarg) mod/ref's the pointer.
289 switch (AA->getModRefInfo(Inst, MemPtr, MemSize)) {
290 case AliasAnalysis::NoModRef:
291 // If the call has no effect on the queried pointer, just ignore it.
293 case AliasAnalysis::Mod:
294 // If we're in an invariant region, we can ignore calls that ONLY
295 // modify the pointer.
296 if (InvariantTag) continue;
297 return MemDepResult::getClobber(Inst);
298 case AliasAnalysis::Ref:
299 // If the call is known to never store to the pointer, and if this is a
300 // load query, we can safely ignore it (scan past it).
304 // Otherwise, there is a potential dependence. Return a clobber.
305 return MemDepResult::getClobber(Inst);
309 // No dependence found. If this is the entry block of the function, it is a
310 // clobber, otherwise it is non-local.
311 if (BB != &BB->getParent()->getEntryBlock())
312 return MemDepResult::getNonLocal();
313 return MemDepResult::getClobber(ScanIt);
316 /// getDependency - Return the instruction on which a memory operation
318 MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) {
319 Instruction *ScanPos = QueryInst;
321 // Check for a cached result
322 MemDepResult &LocalCache = LocalDeps[QueryInst];
324 // If the cached entry is non-dirty, just return it. Note that this depends
325 // on MemDepResult's default constructing to 'dirty'.
326 if (!LocalCache.isDirty())
329 // Otherwise, if we have a dirty entry, we know we can start the scan at that
330 // instruction, which may save us some work.
331 if (Instruction *Inst = LocalCache.getInst()) {
334 RemoveFromReverseMap(ReverseLocalDeps, Inst, QueryInst);
337 BasicBlock *QueryParent = QueryInst->getParent();
340 uint64_t MemSize = 0;
343 if (BasicBlock::iterator(QueryInst) == QueryParent->begin()) {
344 // No dependence found. If this is the entry block of the function, it is a
345 // clobber, otherwise it is non-local.
346 if (QueryParent != &QueryParent->getParent()->getEntryBlock())
347 LocalCache = MemDepResult::getNonLocal();
349 LocalCache = MemDepResult::getClobber(QueryInst);
350 } else if (StoreInst *SI = dyn_cast<StoreInst>(QueryInst)) {
351 // If this is a volatile store, don't mess around with it. Just return the
352 // previous instruction as a clobber.
353 if (SI->isVolatile())
354 LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
356 MemPtr = SI->getPointerOperand();
357 MemSize = AA->getTypeStoreSize(SI->getOperand(0)->getType());
359 } else if (LoadInst *LI = dyn_cast<LoadInst>(QueryInst)) {
360 // If this is a volatile load, don't mess around with it. Just return the
361 // previous instruction as a clobber.
362 if (LI->isVolatile())
363 LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
365 MemPtr = LI->getPointerOperand();
366 MemSize = AA->getTypeStoreSize(LI->getType());
368 } else if (isFreeCall(QueryInst)) {
369 MemPtr = QueryInst->getOperand(1);
370 // calls to free() erase the entire structure, not just a field.
372 } else if (isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst)) {
373 int IntrinsicID = 0; // Intrinsic IDs start at 1.
374 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(QueryInst))
375 IntrinsicID = II->getIntrinsicID();
377 switch (IntrinsicID) {
378 case Intrinsic::lifetime_start:
379 case Intrinsic::lifetime_end:
380 case Intrinsic::invariant_start:
381 MemPtr = QueryInst->getOperand(2);
382 MemSize = cast<ConstantInt>(QueryInst->getOperand(1))->getZExtValue();
384 case Intrinsic::invariant_end:
385 MemPtr = QueryInst->getOperand(3);
386 MemSize = cast<ConstantInt>(QueryInst->getOperand(2))->getZExtValue();
389 CallSite QueryCS = CallSite::get(QueryInst);
390 bool isReadOnly = AA->onlyReadsMemory(QueryCS);
391 LocalCache = getCallSiteDependencyFrom(QueryCS, isReadOnly, ScanPos,
396 // Non-memory instruction.
397 LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
400 // If we need to do a pointer scan, make it happen.
402 bool isLoad = !QueryInst->mayWriteToMemory();
403 if (IntrinsicInst *II = dyn_cast<MemoryUseIntrinsic>(QueryInst)) {
404 isLoad |= II->getIntrinsicID() == Intrinsic::lifetime_end;
406 LocalCache = getPointerDependencyFrom(MemPtr, MemSize, isLoad, ScanPos,
410 // Remember the result!
411 if (Instruction *I = LocalCache.getInst())
412 ReverseLocalDeps[I].insert(QueryInst);
418 /// AssertSorted - This method is used when -debug is specified to verify that
419 /// cache arrays are properly kept sorted.
420 static void AssertSorted(MemoryDependenceAnalysis::NonLocalDepInfo &Cache,
422 if (Count == -1) Count = Cache.size();
423 if (Count == 0) return;
425 for (unsigned i = 1; i != unsigned(Count); ++i)
426 assert(!(Cache[i] < Cache[i-1]) && "Cache isn't sorted!");
430 /// getNonLocalCallDependency - Perform a full dependency query for the
431 /// specified call, returning the set of blocks that the value is
432 /// potentially live across. The returned set of results will include a
433 /// "NonLocal" result for all blocks where the value is live across.
435 /// This method assumes the instruction returns a "NonLocal" dependency
436 /// within its own block.
438 /// This returns a reference to an internal data structure that may be
439 /// invalidated on the next non-local query or when an instruction is
440 /// removed. Clients must copy this data if they want it around longer than
442 const MemoryDependenceAnalysis::NonLocalDepInfo &
443 MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
444 assert(getDependency(QueryCS.getInstruction()).isNonLocal() &&
445 "getNonLocalCallDependency should only be used on calls with non-local deps!");
446 PerInstNLInfo &CacheP = NonLocalDeps[QueryCS.getInstruction()];
447 NonLocalDepInfo &Cache = CacheP.first;
449 /// DirtyBlocks - This is the set of blocks that need to be recomputed. In
450 /// the cached case, this can happen due to instructions being deleted etc. In
451 /// the uncached case, this starts out as the set of predecessors we care
453 SmallVector<BasicBlock*, 32> DirtyBlocks;
455 if (!Cache.empty()) {
456 // Okay, we have a cache entry. If we know it is not dirty, just return it
457 // with no computation.
458 if (!CacheP.second) {
463 // If we already have a partially computed set of results, scan them to
464 // determine what is dirty, seeding our initial DirtyBlocks worklist.
465 for (NonLocalDepInfo::iterator I = Cache.begin(), E = Cache.end();
467 if (I->getResult().isDirty())
468 DirtyBlocks.push_back(I->getBB());
470 // Sort the cache so that we can do fast binary search lookups below.
471 std::sort(Cache.begin(), Cache.end());
473 ++NumCacheDirtyNonLocal;
474 //cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: "
475 // << Cache.size() << " cached: " << *QueryInst;
477 // Seed DirtyBlocks with each of the preds of QueryInst's block.
478 BasicBlock *QueryBB = QueryCS.getInstruction()->getParent();
479 for (BasicBlock **PI = PredCache->GetPreds(QueryBB); *PI; ++PI)
480 DirtyBlocks.push_back(*PI);
481 NumUncacheNonLocal++;
484 // isReadonlyCall - If this is a read-only call, we can be more aggressive.
485 bool isReadonlyCall = AA->onlyReadsMemory(QueryCS);
487 SmallPtrSet<BasicBlock*, 64> Visited;
489 unsigned NumSortedEntries = Cache.size();
490 DEBUG(AssertSorted(Cache));
492 // Iterate while we still have blocks to update.
493 while (!DirtyBlocks.empty()) {
494 BasicBlock *DirtyBB = DirtyBlocks.back();
495 DirtyBlocks.pop_back();
497 // Already processed this block?
498 if (!Visited.insert(DirtyBB))
501 // Do a binary search to see if we already have an entry for this block in
502 // the cache set. If so, find it.
503 DEBUG(AssertSorted(Cache, NumSortedEntries));
504 NonLocalDepInfo::iterator Entry =
505 std::upper_bound(Cache.begin(), Cache.begin()+NumSortedEntries,
506 NonLocalDepEntry(DirtyBB));
507 if (Entry != Cache.begin() && prior(Entry)->getBB() == DirtyBB)
510 NonLocalDepEntry *ExistingResult = 0;
511 if (Entry != Cache.begin()+NumSortedEntries &&
512 Entry->getBB() == DirtyBB) {
513 // If we already have an entry, and if it isn't already dirty, the block
515 if (!Entry->getResult().isDirty())
518 // Otherwise, remember this slot so we can update the value.
519 ExistingResult = &*Entry;
522 // If the dirty entry has a pointer, start scanning from it so we don't have
523 // to rescan the entire block.
524 BasicBlock::iterator ScanPos = DirtyBB->end();
525 if (ExistingResult) {
526 if (Instruction *Inst = ExistingResult->getResult().getInst()) {
528 // We're removing QueryInst's use of Inst.
529 RemoveFromReverseMap(ReverseNonLocalDeps, Inst,
530 QueryCS.getInstruction());
534 // Find out if this block has a local dependency for QueryInst.
537 if (ScanPos != DirtyBB->begin()) {
538 Dep = getCallSiteDependencyFrom(QueryCS, isReadonlyCall,ScanPos, DirtyBB);
539 } else if (DirtyBB != &DirtyBB->getParent()->getEntryBlock()) {
540 // No dependence found. If this is the entry block of the function, it is
541 // a clobber, otherwise it is non-local.
542 Dep = MemDepResult::getNonLocal();
544 Dep = MemDepResult::getClobber(ScanPos);
547 // If we had a dirty entry for the block, update it. Otherwise, just add
550 ExistingResult->setResult(Dep);
552 Cache.push_back(NonLocalDepEntry(DirtyBB, Dep));
554 // If the block has a dependency (i.e. it isn't completely transparent to
555 // the value), remember the association!
556 if (!Dep.isNonLocal()) {
557 // Keep the ReverseNonLocalDeps map up to date so we can efficiently
558 // update this when we remove instructions.
559 if (Instruction *Inst = Dep.getInst())
560 ReverseNonLocalDeps[Inst].insert(QueryCS.getInstruction());
563 // If the block *is* completely transparent to the load, we need to check
564 // the predecessors of this block. Add them to our worklist.
565 for (BasicBlock **PI = PredCache->GetPreds(DirtyBB); *PI; ++PI)
566 DirtyBlocks.push_back(*PI);
573 /// getNonLocalPointerDependency - Perform a full dependency query for an
574 /// access to the specified (non-volatile) memory location, returning the
575 /// set of instructions that either define or clobber the value.
577 /// This method assumes the pointer has a "NonLocal" dependency within its
580 void MemoryDependenceAnalysis::
581 getNonLocalPointerDependency(Value *Pointer, bool isLoad, BasicBlock *FromBB,
582 SmallVectorImpl<NonLocalDepResult> &Result) {
583 assert(Pointer->getType()->isPointerTy() &&
584 "Can't get pointer deps of a non-pointer!");
587 // We know that the pointer value is live into FromBB find the def/clobbers
588 // from presecessors.
589 const Type *EltTy = cast<PointerType>(Pointer->getType())->getElementType();
590 uint64_t PointeeSize = AA->getTypeStoreSize(EltTy);
592 PHITransAddr Address(Pointer, TD);
594 // This is the set of blocks we've inspected, and the pointer we consider in
595 // each block. Because of critical edges, we currently bail out if querying
596 // a block with multiple different pointers. This can happen during PHI
598 DenseMap<BasicBlock*, Value*> Visited;
599 if (!getNonLocalPointerDepFromBB(Address, PointeeSize, isLoad, FromBB,
600 Result, Visited, true))
603 Result.push_back(NonLocalDepResult(FromBB,
604 MemDepResult::getClobber(FromBB->begin()),
608 /// GetNonLocalInfoForBlock - Compute the memdep value for BB with
609 /// Pointer/PointeeSize using either cached information in Cache or by doing a
610 /// lookup (which may use dirty cache info if available). If we do a lookup,
611 /// add the result to the cache.
612 MemDepResult MemoryDependenceAnalysis::
613 GetNonLocalInfoForBlock(Value *Pointer, uint64_t PointeeSize,
614 bool isLoad, BasicBlock *BB,
615 NonLocalDepInfo *Cache, unsigned NumSortedEntries) {
617 // Do a binary search to see if we already have an entry for this block in
618 // the cache set. If so, find it.
619 NonLocalDepInfo::iterator Entry =
620 std::upper_bound(Cache->begin(), Cache->begin()+NumSortedEntries,
621 NonLocalDepEntry(BB));
622 if (Entry != Cache->begin() && (Entry-1)->getBB() == BB)
625 NonLocalDepEntry *ExistingResult = 0;
626 if (Entry != Cache->begin()+NumSortedEntries && Entry->getBB() == BB)
627 ExistingResult = &*Entry;
629 // If we have a cached entry, and it is non-dirty, use it as the value for
631 if (ExistingResult && !ExistingResult->getResult().isDirty()) {
632 ++NumCacheNonLocalPtr;
633 return ExistingResult->getResult();
636 // Otherwise, we have to scan for the value. If we have a dirty cache
637 // entry, start scanning from its position, otherwise we scan from the end
639 BasicBlock::iterator ScanPos = BB->end();
640 if (ExistingResult && ExistingResult->getResult().getInst()) {
641 assert(ExistingResult->getResult().getInst()->getParent() == BB &&
642 "Instruction invalidated?");
643 ++NumCacheDirtyNonLocalPtr;
644 ScanPos = ExistingResult->getResult().getInst();
646 // Eliminating the dirty entry from 'Cache', so update the reverse info.
647 ValueIsLoadPair CacheKey(Pointer, isLoad);
648 RemoveFromReverseMap(ReverseNonLocalPtrDeps, ScanPos, CacheKey);
650 ++NumUncacheNonLocalPtr;
653 // Scan the block for the dependency.
654 MemDepResult Dep = getPointerDependencyFrom(Pointer, PointeeSize, isLoad,
657 // If we had a dirty entry for the block, update it. Otherwise, just add
660 ExistingResult->setResult(Dep);
662 Cache->push_back(NonLocalDepEntry(BB, Dep));
664 // If the block has a dependency (i.e. it isn't completely transparent to
665 // the value), remember the reverse association because we just added it
667 if (Dep.isNonLocal())
670 // Keep the ReverseNonLocalPtrDeps map up to date so we can efficiently
671 // update MemDep when we remove instructions.
672 Instruction *Inst = Dep.getInst();
673 assert(Inst && "Didn't depend on anything?");
674 ValueIsLoadPair CacheKey(Pointer, isLoad);
675 ReverseNonLocalPtrDeps[Inst].insert(CacheKey);
679 /// SortNonLocalDepInfoCache - Sort the a NonLocalDepInfo cache, given a certain
680 /// number of elements in the array that are already properly ordered. This is
681 /// optimized for the case when only a few entries are added.
683 SortNonLocalDepInfoCache(MemoryDependenceAnalysis::NonLocalDepInfo &Cache,
684 unsigned NumSortedEntries) {
685 switch (Cache.size() - NumSortedEntries) {
687 // done, no new entries.
690 // Two new entries, insert the last one into place.
691 NonLocalDepEntry Val = Cache.back();
693 MemoryDependenceAnalysis::NonLocalDepInfo::iterator Entry =
694 std::upper_bound(Cache.begin(), Cache.end()-1, Val);
695 Cache.insert(Entry, Val);
699 // One new entry, Just insert the new value at the appropriate position.
700 if (Cache.size() != 1) {
701 NonLocalDepEntry Val = Cache.back();
703 MemoryDependenceAnalysis::NonLocalDepInfo::iterator Entry =
704 std::upper_bound(Cache.begin(), Cache.end(), Val);
705 Cache.insert(Entry, Val);
709 // Added many values, do a full scale sort.
710 std::sort(Cache.begin(), Cache.end());
715 /// getNonLocalPointerDepFromBB - Perform a dependency query based on
716 /// pointer/pointeesize starting at the end of StartBB. Add any clobber/def
717 /// results to the results vector and keep track of which blocks are visited in
720 /// This has special behavior for the first block queries (when SkipFirstBlock
721 /// is true). In this special case, it ignores the contents of the specified
722 /// block and starts returning dependence info for its predecessors.
724 /// This function returns false on success, or true to indicate that it could
725 /// not compute dependence information for some reason. This should be treated
726 /// as a clobber dependence on the first instruction in the predecessor block.
727 bool MemoryDependenceAnalysis::
728 getNonLocalPointerDepFromBB(const PHITransAddr &Pointer, uint64_t PointeeSize,
729 bool isLoad, BasicBlock *StartBB,
730 SmallVectorImpl<NonLocalDepResult> &Result,
731 DenseMap<BasicBlock*, Value*> &Visited,
732 bool SkipFirstBlock) {
734 // Look up the cached info for Pointer.
735 ValueIsLoadPair CacheKey(Pointer.getAddr(), isLoad);
737 std::pair<BBSkipFirstBlockPair, NonLocalDepInfo> *CacheInfo =
738 &NonLocalPointerDeps[CacheKey];
739 NonLocalDepInfo *Cache = &CacheInfo->second;
741 // If we have valid cached information for exactly the block we are
742 // investigating, just return it with no recomputation.
743 if (CacheInfo->first == BBSkipFirstBlockPair(StartBB, SkipFirstBlock)) {
744 // We have a fully cached result for this query then we can just return the
745 // cached results and populate the visited set. However, we have to verify
746 // that we don't already have conflicting results for these blocks. Check
747 // to ensure that if a block in the results set is in the visited set that
748 // it was for the same pointer query.
749 if (!Visited.empty()) {
750 for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end();
752 DenseMap<BasicBlock*, Value*>::iterator VI = Visited.find(I->getBB());
753 if (VI == Visited.end() || VI->second == Pointer.getAddr())
756 // We have a pointer mismatch in a block. Just return clobber, saying
757 // that something was clobbered in this result. We could also do a
758 // non-fully cached query, but there is little point in doing this.
763 Value *Addr = Pointer.getAddr();
764 for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end();
766 Visited.insert(std::make_pair(I->getBB(), Addr));
767 if (!I->getResult().isNonLocal())
768 Result.push_back(NonLocalDepResult(I->getBB(), I->getResult(), Addr));
770 ++NumCacheCompleteNonLocalPtr;
774 // Otherwise, either this is a new block, a block with an invalid cache
775 // pointer or one that we're about to invalidate by putting more info into it
776 // than its valid cache info. If empty, the result will be valid cache info,
777 // otherwise it isn't.
779 CacheInfo->first = BBSkipFirstBlockPair(StartBB, SkipFirstBlock);
781 CacheInfo->first = BBSkipFirstBlockPair();
783 SmallVector<BasicBlock*, 32> Worklist;
784 Worklist.push_back(StartBB);
786 // Keep track of the entries that we know are sorted. Previously cached
787 // entries will all be sorted. The entries we add we only sort on demand (we
788 // don't insert every element into its sorted position). We know that we
789 // won't get any reuse from currently inserted values, because we don't
790 // revisit blocks after we insert info for them.
791 unsigned NumSortedEntries = Cache->size();
792 DEBUG(AssertSorted(*Cache));
794 while (!Worklist.empty()) {
795 BasicBlock *BB = Worklist.pop_back_val();
797 // Skip the first block if we have it.
798 if (!SkipFirstBlock) {
799 // Analyze the dependency of *Pointer in FromBB. See if we already have
801 assert(Visited.count(BB) && "Should check 'visited' before adding to WL");
803 // Get the dependency info for Pointer in BB. If we have cached
804 // information, we will use it, otherwise we compute it.
805 DEBUG(AssertSorted(*Cache, NumSortedEntries));
806 MemDepResult Dep = GetNonLocalInfoForBlock(Pointer.getAddr(), PointeeSize,
810 // If we got a Def or Clobber, add this to the list of results.
811 if (!Dep.isNonLocal()) {
812 Result.push_back(NonLocalDepResult(BB, Dep, Pointer.getAddr()));
817 // If 'Pointer' is an instruction defined in this block, then we need to do
818 // phi translation to change it into a value live in the predecessor block.
819 // If not, we just add the predecessors to the worklist and scan them with
821 if (!Pointer.NeedsPHITranslationFromBlock(BB)) {
822 SkipFirstBlock = false;
823 for (BasicBlock **PI = PredCache->GetPreds(BB); *PI; ++PI) {
824 // Verify that we haven't looked at this block yet.
825 std::pair<DenseMap<BasicBlock*,Value*>::iterator, bool>
826 InsertRes = Visited.insert(std::make_pair(*PI, Pointer.getAddr()));
827 if (InsertRes.second) {
828 // First time we've looked at *PI.
829 Worklist.push_back(*PI);
833 // If we have seen this block before, but it was with a different
834 // pointer then we have a phi translation failure and we have to treat
835 // this as a clobber.
836 if (InsertRes.first->second != Pointer.getAddr())
837 goto PredTranslationFailure;
842 // We do need to do phi translation, if we know ahead of time we can't phi
843 // translate this value, don't even try.
844 if (!Pointer.IsPotentiallyPHITranslatable())
845 goto PredTranslationFailure;
847 // We may have added values to the cache list before this PHI translation.
848 // If so, we haven't done anything to ensure that the cache remains sorted.
849 // Sort it now (if needed) so that recursive invocations of
850 // getNonLocalPointerDepFromBB and other routines that could reuse the cache
851 // value will only see properly sorted cache arrays.
852 if (Cache && NumSortedEntries != Cache->size()) {
853 SortNonLocalDepInfoCache(*Cache, NumSortedEntries);
854 NumSortedEntries = Cache->size();
858 for (BasicBlock **PI = PredCache->GetPreds(BB); *PI; ++PI) {
859 BasicBlock *Pred = *PI;
861 // Get the PHI translated pointer in this predecessor. This can fail if
862 // not translatable, in which case the getAddr() returns null.
863 PHITransAddr PredPointer(Pointer);
864 PredPointer.PHITranslateValue(BB, Pred, 0);
866 Value *PredPtrVal = PredPointer.getAddr();
868 // Check to see if we have already visited this pred block with another
869 // pointer. If so, we can't do this lookup. This failure can occur
870 // with PHI translation when a critical edge exists and the PHI node in
871 // the successor translates to a pointer value different than the
872 // pointer the block was first analyzed with.
873 std::pair<DenseMap<BasicBlock*,Value*>::iterator, bool>
874 InsertRes = Visited.insert(std::make_pair(Pred, PredPtrVal));
876 if (!InsertRes.second) {
877 // If the predecessor was visited with PredPtr, then we already did
878 // the analysis and can ignore it.
879 if (InsertRes.first->second == PredPtrVal)
882 // Otherwise, the block was previously analyzed with a different
883 // pointer. We can't represent the result of this case, so we just
884 // treat this as a phi translation failure.
885 goto PredTranslationFailure;
888 // If PHI translation was unable to find an available pointer in this
889 // predecessor, then we have to assume that the pointer is clobbered in
890 // that predecessor. We can still do PRE of the load, which would insert
891 // a computation of the pointer in this predecessor.
892 if (PredPtrVal == 0) {
893 // Add the entry to the Result list.
894 NonLocalDepResult Entry(Pred,
895 MemDepResult::getClobber(Pred->getTerminator()),
897 Result.push_back(Entry);
899 // Since we had a phi translation failure, the cache for CacheKey won't
900 // include all of the entries that we need to immediately satisfy future
901 // queries. Mark this in NonLocalPointerDeps by setting the
902 // BBSkipFirstBlockPair pointer to null. This requires reuse of the
903 // cached value to do more work but not miss the phi trans failure.
904 NonLocalPointerDeps[CacheKey].first = BBSkipFirstBlockPair();
908 // FIXME: it is entirely possible that PHI translating will end up with
909 // the same value. Consider PHI translating something like:
910 // X = phi [x, bb1], [y, bb2]. PHI translating for bb1 doesn't *need*
911 // to recurse here, pedantically speaking.
913 // If we have a problem phi translating, fall through to the code below
914 // to handle the failure condition.
915 if (getNonLocalPointerDepFromBB(PredPointer, PointeeSize, isLoad, Pred,
917 goto PredTranslationFailure;
920 // Refresh the CacheInfo/Cache pointer so that it isn't invalidated.
921 CacheInfo = &NonLocalPointerDeps[CacheKey];
922 Cache = &CacheInfo->second;
923 NumSortedEntries = Cache->size();
925 // Since we did phi translation, the "Cache" set won't contain all of the
926 // results for the query. This is ok (we can still use it to accelerate
927 // specific block queries) but we can't do the fastpath "return all
928 // results from the set" Clear out the indicator for this.
929 CacheInfo->first = BBSkipFirstBlockPair();
930 SkipFirstBlock = false;
933 PredTranslationFailure:
936 // Refresh the CacheInfo/Cache pointer if it got invalidated.
937 CacheInfo = &NonLocalPointerDeps[CacheKey];
938 Cache = &CacheInfo->second;
939 NumSortedEntries = Cache->size();
942 // Since we failed phi translation, the "Cache" set won't contain all of the
943 // results for the query. This is ok (we can still use it to accelerate
944 // specific block queries) but we can't do the fastpath "return all
945 // results from the set". Clear out the indicator for this.
946 CacheInfo->first = BBSkipFirstBlockPair();
948 // If *nothing* works, mark the pointer as being clobbered by the first
949 // instruction in this block.
951 // If this is the magic first block, return this as a clobber of the whole
952 // incoming value. Since we can't phi translate to one of the predecessors,
953 // we have to bail out.
957 for (NonLocalDepInfo::reverse_iterator I = Cache->rbegin(); ; ++I) {
958 assert(I != Cache->rend() && "Didn't find current block??");
959 if (I->getBB() != BB)
962 assert(I->getResult().isNonLocal() &&
963 "Should only be here with transparent block");
964 I->setResult(MemDepResult::getClobber(BB->begin()));
965 ReverseNonLocalPtrDeps[BB->begin()].insert(CacheKey);
966 Result.push_back(NonLocalDepResult(I->getBB(), I->getResult(),
972 // Okay, we're done now. If we added new values to the cache, re-sort it.
973 SortNonLocalDepInfoCache(*Cache, NumSortedEntries);
974 DEBUG(AssertSorted(*Cache));
978 /// RemoveCachedNonLocalPointerDependencies - If P exists in
979 /// CachedNonLocalPointerInfo, remove it.
980 void MemoryDependenceAnalysis::
981 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair P) {
982 CachedNonLocalPointerInfo::iterator It =
983 NonLocalPointerDeps.find(P);
984 if (It == NonLocalPointerDeps.end()) return;
986 // Remove all of the entries in the BB->val map. This involves removing
987 // instructions from the reverse map.
988 NonLocalDepInfo &PInfo = It->second.second;
990 for (unsigned i = 0, e = PInfo.size(); i != e; ++i) {
991 Instruction *Target = PInfo[i].getResult().getInst();
992 if (Target == 0) continue; // Ignore non-local dep results.
993 assert(Target->getParent() == PInfo[i].getBB());
995 // Eliminating the dirty entry from 'Cache', so update the reverse info.
996 RemoveFromReverseMap(ReverseNonLocalPtrDeps, Target, P);
999 // Remove P from NonLocalPointerDeps (which deletes NonLocalDepInfo).
1000 NonLocalPointerDeps.erase(It);
1004 /// invalidateCachedPointerInfo - This method is used to invalidate cached
1005 /// information about the specified pointer, because it may be too
1006 /// conservative in memdep. This is an optional call that can be used when
1007 /// the client detects an equivalence between the pointer and some other
1008 /// value and replaces the other value with ptr. This can make Ptr available
1009 /// in more places that cached info does not necessarily keep.
1010 void MemoryDependenceAnalysis::invalidateCachedPointerInfo(Value *Ptr) {
1011 // If Ptr isn't really a pointer, just ignore it.
1012 if (!Ptr->getType()->isPointerTy()) return;
1013 // Flush store info for the pointer.
1014 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, false));
1015 // Flush load info for the pointer.
1016 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, true));
1019 /// invalidateCachedPredecessors - Clear the PredIteratorCache info.
1020 /// This needs to be done when the CFG changes, e.g., due to splitting
1022 void MemoryDependenceAnalysis::invalidateCachedPredecessors() {
1026 /// removeInstruction - Remove an instruction from the dependence analysis,
1027 /// updating the dependence of instructions that previously depended on it.
1028 /// This method attempts to keep the cache coherent using the reverse map.
1029 void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
1030 // Walk through the Non-local dependencies, removing this one as the value
1031 // for any cached queries.
1032 NonLocalDepMapType::iterator NLDI = NonLocalDeps.find(RemInst);
1033 if (NLDI != NonLocalDeps.end()) {
1034 NonLocalDepInfo &BlockMap = NLDI->second.first;
1035 for (NonLocalDepInfo::iterator DI = BlockMap.begin(), DE = BlockMap.end();
1037 if (Instruction *Inst = DI->getResult().getInst())
1038 RemoveFromReverseMap(ReverseNonLocalDeps, Inst, RemInst);
1039 NonLocalDeps.erase(NLDI);
1042 // If we have a cached local dependence query for this instruction, remove it.
1044 LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst);
1045 if (LocalDepEntry != LocalDeps.end()) {
1046 // Remove us from DepInst's reverse set now that the local dep info is gone.
1047 if (Instruction *Inst = LocalDepEntry->second.getInst())
1048 RemoveFromReverseMap(ReverseLocalDeps, Inst, RemInst);
1050 // Remove this local dependency info.
1051 LocalDeps.erase(LocalDepEntry);
1054 // If we have any cached pointer dependencies on this instruction, remove
1055 // them. If the instruction has non-pointer type, then it can't be a pointer
1058 // Remove it from both the load info and the store info. The instruction
1059 // can't be in either of these maps if it is non-pointer.
1060 if (RemInst->getType()->isPointerTy()) {
1061 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, false));
1062 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, true));
1065 // Loop over all of the things that depend on the instruction we're removing.
1067 SmallVector<std::pair<Instruction*, Instruction*>, 8> ReverseDepsToAdd;
1069 // If we find RemInst as a clobber or Def in any of the maps for other values,
1070 // we need to replace its entry with a dirty version of the instruction after
1071 // it. If RemInst is a terminator, we use a null dirty value.
1073 // Using a dirty version of the instruction after RemInst saves having to scan
1074 // the entire block to get to this point.
1075 MemDepResult NewDirtyVal;
1076 if (!RemInst->isTerminator())
1077 NewDirtyVal = MemDepResult::getDirty(++BasicBlock::iterator(RemInst));
1079 ReverseDepMapType::iterator ReverseDepIt = ReverseLocalDeps.find(RemInst);
1080 if (ReverseDepIt != ReverseLocalDeps.end()) {
1081 SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second;
1082 // RemInst can't be the terminator if it has local stuff depending on it.
1083 assert(!ReverseDeps.empty() && !isa<TerminatorInst>(RemInst) &&
1084 "Nothing can locally depend on a terminator");
1086 for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(),
1087 E = ReverseDeps.end(); I != E; ++I) {
1088 Instruction *InstDependingOnRemInst = *I;
1089 assert(InstDependingOnRemInst != RemInst &&
1090 "Already removed our local dep info");
1092 LocalDeps[InstDependingOnRemInst] = NewDirtyVal;
1094 // Make sure to remember that new things depend on NewDepInst.
1095 assert(NewDirtyVal.getInst() && "There is no way something else can have "
1096 "a local dep on this if it is a terminator!");
1097 ReverseDepsToAdd.push_back(std::make_pair(NewDirtyVal.getInst(),
1098 InstDependingOnRemInst));
1101 ReverseLocalDeps.erase(ReverseDepIt);
1103 // Add new reverse deps after scanning the set, to avoid invalidating the
1104 // 'ReverseDeps' reference.
1105 while (!ReverseDepsToAdd.empty()) {
1106 ReverseLocalDeps[ReverseDepsToAdd.back().first]
1107 .insert(ReverseDepsToAdd.back().second);
1108 ReverseDepsToAdd.pop_back();
1112 ReverseDepIt = ReverseNonLocalDeps.find(RemInst);
1113 if (ReverseDepIt != ReverseNonLocalDeps.end()) {
1114 SmallPtrSet<Instruction*, 4> &Set = ReverseDepIt->second;
1115 for (SmallPtrSet<Instruction*, 4>::iterator I = Set.begin(), E = Set.end();
1117 assert(*I != RemInst && "Already removed NonLocalDep info for RemInst");
1119 PerInstNLInfo &INLD = NonLocalDeps[*I];
1120 // The information is now dirty!
1123 for (NonLocalDepInfo::iterator DI = INLD.first.begin(),
1124 DE = INLD.first.end(); DI != DE; ++DI) {
1125 if (DI->getResult().getInst() != RemInst) continue;
1127 // Convert to a dirty entry for the subsequent instruction.
1128 DI->setResult(NewDirtyVal);
1130 if (Instruction *NextI = NewDirtyVal.getInst())
1131 ReverseDepsToAdd.push_back(std::make_pair(NextI, *I));
1135 ReverseNonLocalDeps.erase(ReverseDepIt);
1137 // Add new reverse deps after scanning the set, to avoid invalidating 'Set'
1138 while (!ReverseDepsToAdd.empty()) {
1139 ReverseNonLocalDeps[ReverseDepsToAdd.back().first]
1140 .insert(ReverseDepsToAdd.back().second);
1141 ReverseDepsToAdd.pop_back();
1145 // If the instruction is in ReverseNonLocalPtrDeps then it appears as a
1146 // value in the NonLocalPointerDeps info.
1147 ReverseNonLocalPtrDepTy::iterator ReversePtrDepIt =
1148 ReverseNonLocalPtrDeps.find(RemInst);
1149 if (ReversePtrDepIt != ReverseNonLocalPtrDeps.end()) {
1150 SmallPtrSet<ValueIsLoadPair, 4> &Set = ReversePtrDepIt->second;
1151 SmallVector<std::pair<Instruction*, ValueIsLoadPair>,8> ReversePtrDepsToAdd;
1153 for (SmallPtrSet<ValueIsLoadPair, 4>::iterator I = Set.begin(),
1154 E = Set.end(); I != E; ++I) {
1155 ValueIsLoadPair P = *I;
1156 assert(P.getPointer() != RemInst &&
1157 "Already removed NonLocalPointerDeps info for RemInst");
1159 NonLocalDepInfo &NLPDI = NonLocalPointerDeps[P].second;
1161 // The cache is not valid for any specific block anymore.
1162 NonLocalPointerDeps[P].first = BBSkipFirstBlockPair();
1164 // Update any entries for RemInst to use the instruction after it.
1165 for (NonLocalDepInfo::iterator DI = NLPDI.begin(), DE = NLPDI.end();
1167 if (DI->getResult().getInst() != RemInst) continue;
1169 // Convert to a dirty entry for the subsequent instruction.
1170 DI->setResult(NewDirtyVal);
1172 if (Instruction *NewDirtyInst = NewDirtyVal.getInst())
1173 ReversePtrDepsToAdd.push_back(std::make_pair(NewDirtyInst, P));
1176 // Re-sort the NonLocalDepInfo. Changing the dirty entry to its
1177 // subsequent value may invalidate the sortedness.
1178 std::sort(NLPDI.begin(), NLPDI.end());
1181 ReverseNonLocalPtrDeps.erase(ReversePtrDepIt);
1183 while (!ReversePtrDepsToAdd.empty()) {
1184 ReverseNonLocalPtrDeps[ReversePtrDepsToAdd.back().first]
1185 .insert(ReversePtrDepsToAdd.back().second);
1186 ReversePtrDepsToAdd.pop_back();
1191 assert(!NonLocalDeps.count(RemInst) && "RemInst got reinserted?");
1192 AA->deleteValue(RemInst);
1193 DEBUG(verifyRemoved(RemInst));
1195 /// verifyRemoved - Verify that the specified instruction does not occur
1196 /// in our internal data structures.
1197 void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const {
1198 for (LocalDepMapType::const_iterator I = LocalDeps.begin(),
1199 E = LocalDeps.end(); I != E; ++I) {
1200 assert(I->first != D && "Inst occurs in data structures");
1201 assert(I->second.getInst() != D &&
1202 "Inst occurs in data structures");
1205 for (CachedNonLocalPointerInfo::const_iterator I =NonLocalPointerDeps.begin(),
1206 E = NonLocalPointerDeps.end(); I != E; ++I) {
1207 assert(I->first.getPointer() != D && "Inst occurs in NLPD map key");
1208 const NonLocalDepInfo &Val = I->second.second;
1209 for (NonLocalDepInfo::const_iterator II = Val.begin(), E = Val.end();
1211 assert(II->getResult().getInst() != D && "Inst occurs as NLPD value");
1214 for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(),
1215 E = NonLocalDeps.end(); I != E; ++I) {
1216 assert(I->first != D && "Inst occurs in data structures");
1217 const PerInstNLInfo &INLD = I->second;
1218 for (NonLocalDepInfo::const_iterator II = INLD.first.begin(),
1219 EE = INLD.first.end(); II != EE; ++II)
1220 assert(II->getResult().getInst() != D && "Inst occurs in data structures");
1223 for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(),
1224 E = ReverseLocalDeps.end(); I != E; ++I) {
1225 assert(I->first != D && "Inst occurs in data structures");
1226 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
1227 EE = I->second.end(); II != EE; ++II)
1228 assert(*II != D && "Inst occurs in data structures");
1231 for (ReverseDepMapType::const_iterator I = ReverseNonLocalDeps.begin(),
1232 E = ReverseNonLocalDeps.end();
1234 assert(I->first != D && "Inst occurs in data structures");
1235 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
1236 EE = I->second.end(); II != EE; ++II)
1237 assert(*II != D && "Inst occurs in data structures");
1240 for (ReverseNonLocalPtrDepTy::const_iterator
1241 I = ReverseNonLocalPtrDeps.begin(),
1242 E = ReverseNonLocalPtrDeps.end(); I != E; ++I) {
1243 assert(I->first != D && "Inst occurs in rev NLPD map");
1245 for (SmallPtrSet<ValueIsLoadPair, 4>::const_iterator II = I->second.begin(),
1246 E = I->second.end(); II != E; ++II)
1247 assert(*II != ValueIsLoadPair(D, false) &&
1248 *II != ValueIsLoadPair(D, true) &&
1249 "Inst occurs in ReverseNonLocalPtrDeps map");