1 //===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements an analysis that determines, for a given memory
11 // operation, what preceding memory operations it depends on. It builds on
12 // alias analysis information, and tries to provide a lazy, caching interface to
13 // a common kind of alias information query.
15 //===----------------------------------------------------------------------===//
17 #include "llvm/Analysis/MemoryDependenceAnalysis.h"
18 #include "llvm/Constants.h"
19 #include "llvm/Instructions.h"
20 #include "llvm/Function.h"
21 #include "llvm/Analysis/AliasAnalysis.h"
22 #include "llvm/Support/CFG.h"
23 #include "llvm/Support/CommandLine.h"
24 #include "llvm/Target/TargetData.h"
25 #include "llvm/ADT/Statistic.h"
27 #define DEBUG_TYPE "memdep"
32 // Control the calculation of non-local dependencies by only examining the
33 // predecessors if the basic block has less than X amount (50 by default).
35 PredLimit("nonlocaldep-threshold", cl::Hidden, cl::init(50),
36 cl::desc("Control the calculation of non-local"
37 "dependencies (default = 50)"));
40 STATISTIC(NumCacheNonlocal, "Number of cached non-local responses");
41 STATISTIC(NumUncacheNonlocal, "Number of uncached non-local responses");
43 char MemoryDependenceAnalysis::ID = 0;
45 Instruction* const MemoryDependenceAnalysis::NonLocal = (Instruction*)-3;
46 Instruction* const MemoryDependenceAnalysis::None = (Instruction*)-4;
47 Instruction* const MemoryDependenceAnalysis::Dirty = (Instruction*)-5;
49 // Register this pass...
50 static RegisterPass<MemoryDependenceAnalysis> X("memdep",
51 "Memory Dependence Analysis");
53 void MemoryDependenceAnalysis::ping(Instruction *D) {
54 for (depMapType::iterator I = depGraphLocal.begin(), E = depGraphLocal.end();
56 assert(I->first != D);
57 assert(I->second.first != D);
60 for (nonLocalDepMapType::iterator I = depGraphNonLocal.begin(), E = depGraphNonLocal.end();
62 assert(I->first != D);
65 for (reverseDepMapType::iterator I = reverseDep.begin(), E = reverseDep.end();
67 for (SmallPtrSet<Instruction*, 4>::iterator II = I->second.begin(), EE = I->second.end();
71 for (reverseDepMapType::iterator I = reverseDepNonLocal.begin(), E = reverseDepNonLocal.end();
73 for (SmallPtrSet<Instruction*, 4>::iterator II = I->second.begin(), EE = I->second.end();
78 /// getAnalysisUsage - Does not modify anything. It uses Alias Analysis.
80 void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
82 AU.addRequiredTransitive<AliasAnalysis>();
83 AU.addRequiredTransitive<TargetData>();
86 /// getCallSiteDependency - Private helper for finding the local dependencies
88 Instruction* MemoryDependenceAnalysis::getCallSiteDependency(CallSite C,
92 std::pair<Instruction*, bool>& cachedResult =
93 depGraphLocal[C.getInstruction()];
94 AliasAnalysis& AA = getAnalysis<AliasAnalysis>();
95 TargetData& TD = getAnalysis<TargetData>();
96 BasicBlock::iterator blockBegin = C.getInstruction()->getParent()->begin();
97 BasicBlock::iterator QI = C.getInstruction();
99 // If the starting point was specifiy, use it
102 blockBegin = start->getParent()->end();
103 // If the starting point wasn't specified, but the block was, use it
104 } else if (!start && block) {
106 blockBegin = block->end();
109 // Walk backwards through the block, looking for dependencies
110 while (QI != blockBegin) {
113 // If this inst is a memory op, get the pointer it accessed
115 uint64_t pointerSize = 0;
116 if (StoreInst* S = dyn_cast<StoreInst>(QI)) {
117 pointer = S->getPointerOperand();
118 pointerSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
119 } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) {
121 if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize()))
122 pointerSize = C->getZExtValue() * \
123 TD.getABITypeSize(AI->getAllocatedType());
126 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) {
127 pointer = V->getOperand(0);
128 pointerSize = TD.getTypeStoreSize(V->getType());
129 } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) {
130 pointer = F->getPointerOperand();
132 // FreeInsts erase the entire structure
134 } else if (isa<CallInst>(QI)) {
135 AliasAnalysis::ModRefBehavior result =
136 AA.getModRefBehavior(CallSite::get(QI));
137 if (result != AliasAnalysis::DoesNotAccessMemory &&
138 result != AliasAnalysis::OnlyReadsMemory) {
139 if (!start && !block) {
140 cachedResult.first = QI;
141 cachedResult.second = true;
142 reverseDep[QI].insert(C.getInstruction());
151 if (AA.getModRefInfo(C, pointer, pointerSize) != AliasAnalysis::NoModRef) {
152 if (!start && !block) {
153 cachedResult.first = QI;
154 cachedResult.second = true;
155 reverseDep[QI].insert(C.getInstruction());
161 // No dependence found
162 cachedResult.first = NonLocal;
163 cachedResult.second = true;
164 reverseDep[NonLocal].insert(C.getInstruction());
168 /// nonLocalHelper - Private helper used to calculate non-local dependencies
169 /// by doing DFS on the predecessors of a block to find its dependencies
170 void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query,
172 DenseMap<BasicBlock*, Value*>& resp) {
173 // Set of blocks that we've already visited in our DFS
174 SmallPtrSet<BasicBlock*, 4> visited;
175 // If we're updating a dirtied cache entry, we don't need to reprocess
176 // already computed entries.
177 for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(),
178 E = resp.end(); I != E; ++I)
179 if (I->second != Dirty)
180 visited.insert(I->first);
182 // Current stack of the DFS
183 SmallVector<BasicBlock*, 4> stack;
184 stack.push_back(block);
187 while (!stack.empty()) {
188 BasicBlock* BB = stack.back();
190 // If we've already visited this block, no need to revist
191 if (visited.count(BB)) {
196 // If we find a new block with a local dependency for query,
197 // then we insert the new dependency and backtrack.
201 Instruction* localDep = getDependency(query, 0, BB);
202 if (localDep != NonLocal) {
203 resp.insert(std::make_pair(BB, localDep));
208 // If we re-encounter the starting block, we still need to search it
209 // because there might be a dependency in the starting block AFTER
210 // the position of the query. This is necessary to get loops right.
211 } else if (BB == block && stack.size() > 1) {
214 Instruction* localDep = getDependency(query, 0, BB);
215 if (localDep != query)
216 resp.insert(std::make_pair(BB, localDep));
223 // If we didn't find anything, recurse on the precessors of this block
224 // Only do this for blocks with a small number of predecessors.
225 bool predOnStack = false;
226 bool inserted = false;
227 if (std::distance(pred_begin(BB), pred_end(BB)) <= PredLimit) {
228 for (pred_iterator PI = pred_begin(BB), PE = pred_end(BB);
230 if (!visited.count(*PI)) {
231 stack.push_back(*PI);
237 // If we inserted a new predecessor, then we'll come back to this block
240 // If we didn't insert because we have no predecessors, then this
241 // query has no dependency at all.
242 else if (!inserted && !predOnStack) {
243 resp.insert(std::make_pair(BB, None));
244 // If we didn't insert because our predecessors are already on the stack,
245 // then we might still have a dependency, but it will be discovered during
247 } else if (!inserted && predOnStack){
248 resp.insert(std::make_pair(BB, NonLocal));
255 /// getNonLocalDependency - Fills the passed-in map with the non-local
256 /// dependencies of the queries. The map will contain NonLocal for
257 /// blocks between the query and its dependencies.
258 void MemoryDependenceAnalysis::getNonLocalDependency(Instruction* query,
259 DenseMap<BasicBlock*, Value*>& resp) {
260 if (depGraphNonLocal.count(query)) {
261 DenseMap<BasicBlock*, Value*>& cached = depGraphNonLocal[query];
264 SmallVector<BasicBlock*, 4> dirtied;
265 for (DenseMap<BasicBlock*, Value*>::iterator I = cached.begin(),
266 E = cached.end(); I != E; ++I)
267 if (I->second == Dirty)
268 dirtied.push_back(I->first);
270 for (SmallVector<BasicBlock*, 4>::iterator I = dirtied.begin(),
271 E = dirtied.end(); I != E; ++I) {
272 Instruction* localDep = getDependency(query, 0, *I);
273 if (localDep != NonLocal)
274 cached[*I] = localDep;
277 nonLocalHelper(query, *I, cached);
285 NumUncacheNonlocal++;
287 // If not, go ahead and search for non-local deps.
288 nonLocalHelper(query, query->getParent(), resp);
290 // Update the non-local dependency cache
291 for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(), E = resp.end();
293 depGraphNonLocal[query].insert(*I);
294 reverseDepNonLocal[I->second].insert(query);
298 /// getDependency - Return the instruction on which a memory operation
299 /// depends. The local paramter indicates if the query should only
300 /// evaluate dependencies within the same basic block.
301 Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query,
304 // Start looking for dependencies with the queried inst
305 BasicBlock::iterator QI = query;
307 // Check for a cached result
308 std::pair<Instruction*, bool>& cachedResult = depGraphLocal[query];
309 // If we have a _confirmed_ cached entry, return it
310 if (!block && !start) {
311 if (cachedResult.second)
312 return cachedResult.first;
313 else if (cachedResult.first && cachedResult.first != NonLocal)
314 // If we have an unconfirmed cached entry, we can start our search from there
315 QI = cachedResult.first;
320 else if (!start && block)
323 AliasAnalysis& AA = getAnalysis<AliasAnalysis>();
324 TargetData& TD = getAnalysis<TargetData>();
326 // Get the pointer value for which dependence will be determined
328 uint64_t dependeeSize = 0;
329 bool queryIsVolatile = false;
330 if (StoreInst* S = dyn_cast<StoreInst>(query)) {
331 dependee = S->getPointerOperand();
332 dependeeSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
333 queryIsVolatile = S->isVolatile();
334 } else if (LoadInst* L = dyn_cast<LoadInst>(query)) {
335 dependee = L->getPointerOperand();
336 dependeeSize = TD.getTypeStoreSize(L->getType());
337 queryIsVolatile = L->isVolatile();
338 } else if (VAArgInst* V = dyn_cast<VAArgInst>(query)) {
339 dependee = V->getOperand(0);
340 dependeeSize = TD.getTypeStoreSize(V->getType());
341 } else if (FreeInst* F = dyn_cast<FreeInst>(query)) {
342 dependee = F->getPointerOperand();
344 // FreeInsts erase the entire structure, not just a field
346 } else if (CallSite::get(query).getInstruction() != 0)
347 return getCallSiteDependency(CallSite::get(query), start, block);
348 else if (isa<AllocationInst>(query))
353 BasicBlock::iterator blockBegin = block ? block->begin()
354 : query->getParent()->begin();
356 // Walk backwards through the basic block, looking for dependencies
357 while (QI != blockBegin) {
360 // If this inst is a memory op, get the pointer it accessed
362 uint64_t pointerSize = 0;
363 if (StoreInst* S = dyn_cast<StoreInst>(QI)) {
364 // All volatile loads/stores depend on each other
365 if (queryIsVolatile && S->isVolatile()) {
366 if (!start && !block) {
367 cachedResult.first = S;
368 cachedResult.second = true;
369 reverseDep[S].insert(query);
375 pointer = S->getPointerOperand();
376 pointerSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
377 } else if (LoadInst* L = dyn_cast<LoadInst>(QI)) {
378 // All volatile loads/stores depend on each other
379 if (queryIsVolatile && L->isVolatile()) {
380 if (!start && !block) {
381 cachedResult.first = L;
382 cachedResult.second = true;
383 reverseDep[L].insert(query);
389 pointer = L->getPointerOperand();
390 pointerSize = TD.getTypeStoreSize(L->getType());
391 } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) {
393 if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize()))
394 pointerSize = C->getZExtValue() * \
395 TD.getABITypeSize(AI->getAllocatedType());
398 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) {
399 pointer = V->getOperand(0);
400 pointerSize = TD.getTypeStoreSize(V->getType());
401 } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) {
402 pointer = F->getPointerOperand();
404 // FreeInsts erase the entire structure
406 } else if (CallSite::get(QI).getInstruction() != 0) {
407 // Call insts need special handling. Check if they can modify our pointer
408 AliasAnalysis::ModRefResult MR = AA.getModRefInfo(CallSite::get(QI),
409 dependee, dependeeSize);
411 if (MR != AliasAnalysis::NoModRef) {
412 // Loads don't depend on read-only calls
413 if (isa<LoadInst>(query) && MR == AliasAnalysis::Ref)
416 if (!start && !block) {
417 cachedResult.first = QI;
418 cachedResult.second = true;
419 reverseDep[QI].insert(query);
428 // If we found a pointer, check if it could be the same as our pointer
430 AliasAnalysis::AliasResult R = AA.alias(pointer, pointerSize,
431 dependee, dependeeSize);
433 if (R != AliasAnalysis::NoAlias) {
434 // May-alias loads don't depend on each other
435 if (isa<LoadInst>(query) && isa<LoadInst>(QI) &&
436 R == AliasAnalysis::MayAlias)
439 if (!start && !block) {
440 cachedResult.first = QI;
441 cachedResult.second = true;
442 reverseDep[QI].insert(query);
450 // If we found nothing, return the non-local flag
451 if (!start && !block) {
452 cachedResult.first = NonLocal;
453 cachedResult.second = true;
454 reverseDep[NonLocal].insert(query);
460 /// dropInstruction - Remove an instruction from the analysis, making
461 /// absolutely conservative assumptions when updating the cache. This is
462 /// useful, for example when an instruction is changed rather than removed.
463 void MemoryDependenceAnalysis::dropInstruction(Instruction* drop) {
464 depMapType::iterator depGraphEntry = depGraphLocal.find(drop);
465 if (depGraphEntry != depGraphLocal.end())
466 reverseDep[depGraphEntry->second.first].erase(drop);
468 // Drop dependency information for things that depended on this instr
469 SmallPtrSet<Instruction*, 4>& set = reverseDep[drop];
470 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
472 depGraphLocal.erase(*I);
474 depGraphLocal.erase(drop);
475 reverseDep.erase(drop);
477 for (DenseMap<BasicBlock*, Value*>::iterator DI =
478 depGraphNonLocal[drop].begin(), DE = depGraphNonLocal[drop].end();
480 if (DI->second != None)
481 reverseDepNonLocal[DI->second].erase(drop);
483 if (reverseDepNonLocal.count(drop)) {
484 SmallPtrSet<Instruction*, 4>& set = reverseDepNonLocal[drop];
485 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
487 for (DenseMap<BasicBlock*, Value*>::iterator DI =
488 depGraphNonLocal[*I].begin(), DE = depGraphNonLocal[*I].end();
490 if (DI->second == drop)
494 reverseDepNonLocal.erase(drop);
495 nonLocalDepMapType::iterator I = depGraphNonLocal.find(drop);
496 if (I != depGraphNonLocal.end())
497 depGraphNonLocal.erase(I);
500 /// removeInstruction - Remove an instruction from the dependence analysis,
501 /// updating the dependence of instructions that previously depended on it.
502 /// This method attempts to keep the cache coherent using the reverse map.
503 void MemoryDependenceAnalysis::removeInstruction(Instruction* rem) {
504 // Figure out the new dep for things that currently depend on rem
505 Instruction* newDep = NonLocal;
507 for (DenseMap<BasicBlock*, Value*>::iterator DI =
508 depGraphNonLocal[rem].begin(), DE = depGraphNonLocal[rem].end();
510 if (DI->second != None)
511 reverseDepNonLocal[DI->second].erase(rem);
513 depMapType::iterator depGraphEntry = depGraphLocal.find(rem);
515 if (depGraphEntry != depGraphLocal.end()) {
516 reverseDep[depGraphEntry->second.first].erase(rem);
518 if (depGraphEntry->second.first != NonLocal &&
519 depGraphEntry->second.first != None &&
520 depGraphEntry->second.second) {
521 // If we have dep info for rem, set them to it
522 BasicBlock::iterator RI = depGraphEntry->second.first;
525 } else if ( (depGraphEntry->second.first == NonLocal ||
526 depGraphEntry->second.first == None ) &&
527 depGraphEntry->second.second ) {
528 // If we have a confirmed non-local flag, use it
529 newDep = depGraphEntry->second.first;
531 // Otherwise, use the immediate successor of rem
532 // NOTE: This is because, when getDependence is called, it will first
533 // check the immediate predecessor of what is in the cache.
534 BasicBlock::iterator RI = rem;
539 // Otherwise, use the immediate successor of rem
540 // NOTE: This is because, when getDependence is called, it will first
541 // check the immediate predecessor of what is in the cache.
542 BasicBlock::iterator RI = rem;
547 SmallPtrSet<Instruction*, 4>& set = reverseDep[rem];
548 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
550 // Insert the new dependencies
551 // Mark it as unconfirmed as long as it is not the non-local flag
552 depGraphLocal[*I] = std::make_pair(newDep, (newDep == NonLocal ||
556 depGraphLocal.erase(rem);
557 reverseDep.erase(rem);
559 if (reverseDepNonLocal.count(rem)) {
560 SmallPtrSet<Instruction*, 4>& set = reverseDepNonLocal[rem];
561 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
563 for (DenseMap<BasicBlock*, Value*>::iterator DI =
564 depGraphNonLocal[*I].begin(), DE = depGraphNonLocal[*I].end();
566 if (DI->second == rem)
571 reverseDepNonLocal.erase(rem);
572 nonLocalDepMapType::iterator I = depGraphNonLocal.find(rem);
573 if (I != depGraphNonLocal.end())
574 depGraphNonLocal.erase(I);
576 getAnalysis<AliasAnalysis>().deleteValue(rem);