1 //===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file was developed by the Owen Anderson and is distributed under
6 // the University of Illinois Open Source License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements an analysis that determines, for a given memory
11 // operation, what preceding memory operations it depends on. It builds on
12 // alias analysis information, and tries to provide a lazy, caching interface to
13 // a common kind of alias information query.
15 //===----------------------------------------------------------------------===//
17 #include "llvm/Analysis/MemoryDependenceAnalysis.h"
18 #include "llvm/Constants.h"
19 #include "llvm/Instructions.h"
20 #include "llvm/Function.h"
21 #include "llvm/Analysis/AliasAnalysis.h"
22 #include "llvm/Support/CFG.h"
23 #include "llvm/Target/TargetData.h"
24 #include "llvm/ADT/Statistic.h"
26 #define DEBUG_TYPE "memdep"
30 STATISTIC(NumCacheNonlocal, "Number of cached non-local responses");
31 STATISTIC(NumUncacheNonlocal, "Number of uncached non-local responses");
33 char MemoryDependenceAnalysis::ID = 0;
35 Instruction* const MemoryDependenceAnalysis::NonLocal = (Instruction*)-3;
36 Instruction* const MemoryDependenceAnalysis::None = (Instruction*)-4;
37 Instruction* const MemoryDependenceAnalysis::Dirty = (Instruction*)-5;
39 // Register this pass...
40 static RegisterPass<MemoryDependenceAnalysis> X("memdep",
41 "Memory Dependence Analysis");
43 /// getAnalysisUsage - Does not modify anything. It uses Alias Analysis.
45 void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
47 AU.addRequiredTransitive<AliasAnalysis>();
48 AU.addRequiredTransitive<TargetData>();
51 /// getCallSiteDependency - Private helper for finding the local dependencies
53 Instruction* MemoryDependenceAnalysis::getCallSiteDependency(CallSite C,
57 AliasAnalysis& AA = getAnalysis<AliasAnalysis>();
58 TargetData& TD = getAnalysis<TargetData>();
59 BasicBlock::iterator blockBegin = C.getInstruction()->getParent()->begin();
60 BasicBlock::iterator QI = C.getInstruction();
62 // If the starting point was specifiy, use it
65 blockBegin = start->getParent()->end();
66 // If the starting point wasn't specified, but the block was, use it
67 } else if (!start && block) {
69 blockBegin = block->end();
72 // Walk backwards through the block, looking for dependencies
73 while (QI != blockBegin) {
76 // If this inst is a memory op, get the pointer it accessed
78 uint64_t pointerSize = 0;
79 if (StoreInst* S = dyn_cast<StoreInst>(QI)) {
80 pointer = S->getPointerOperand();
81 pointerSize = TD.getTypeSize(S->getOperand(0)->getType());
82 } else if (LoadInst* L = dyn_cast<LoadInst>(QI)) {
83 pointer = L->getPointerOperand();
84 pointerSize = TD.getTypeSize(L->getType());
85 } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) {
87 if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize()))
88 pointerSize = C->getZExtValue() * \
89 TD.getTypeSize(AI->getAllocatedType());
92 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) {
93 pointer = V->getOperand(0);
94 pointerSize = TD.getTypeSize(V->getType());
95 } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) {
96 pointer = F->getPointerOperand();
98 // FreeInsts erase the entire structure
100 } else if (CallSite::get(QI).getInstruction() != 0) {
101 if (AA.getModRefInfo(C, CallSite::get(QI)) != AliasAnalysis::NoModRef) {
102 if (!start && !block) {
103 depGraphLocal.insert(std::make_pair(C.getInstruction(),
104 std::make_pair(QI, true)));
105 reverseDep[QI].insert(C.getInstruction());
114 if (AA.getModRefInfo(C, pointer, pointerSize) != AliasAnalysis::NoModRef) {
115 if (!start && !block) {
116 depGraphLocal.insert(std::make_pair(C.getInstruction(),
117 std::make_pair(QI, true)));
118 reverseDep[QI].insert(C.getInstruction());
124 // No dependence found
125 depGraphLocal.insert(std::make_pair(C.getInstruction(),
126 std::make_pair(NonLocal, true)));
127 reverseDep[NonLocal].insert(C.getInstruction());
131 /// nonLocalHelper - Private helper used to calculate non-local dependencies
132 /// by doing DFS on the predecessors of a block to find its dependencies
133 void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query,
135 DenseMap<BasicBlock*, Value*>& resp) {
136 // Set of blocks that we've already visited in our DFS
137 SmallPtrSet<BasicBlock*, 4> visited;
138 // If we're updating a dirtied cache entry, we don't need to reprocess
139 // already computed entries.
140 for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(),
141 E = resp.end(); I != E; ++I)
142 if (I->second != Dirty)
143 visited.insert(I->first);
145 // Current stack of the DFS
146 SmallVector<BasicBlock*, 4> stack;
147 stack.push_back(block);
150 while (!stack.empty()) {
151 BasicBlock* BB = stack.back();
153 // If we've already visited this block, no need to revist
154 if (visited.count(BB)) {
159 // If we find a new block with a local dependency for query,
160 // then we insert the new dependency and backtrack.
164 Instruction* localDep = getDependency(query, 0, BB);
165 if (localDep != NonLocal) {
166 resp.insert(std::make_pair(BB, localDep));
171 // If we re-encounter the starting block, we still need to search it
172 // because there might be a dependency in the starting block AFTER
173 // the position of the query. This is necessary to get loops right.
174 } else if (BB == block && stack.size() > 1) {
177 Instruction* localDep = getDependency(query, 0, BB);
178 if (localDep != query)
179 resp.insert(std::make_pair(BB, localDep));
186 // If we didn't find anything, recurse on the precessors of this block
187 bool predOnStack = false;
188 bool inserted = false;
189 for (pred_iterator PI = pred_begin(BB), PE = pred_end(BB);
191 if (!visited.count(*PI)) {
192 stack.push_back(*PI);
197 // If we inserted a new predecessor, then we'll come back to this block
200 // If we didn't insert because we have no predecessors, then this
201 // query has no dependency at all.
202 else if (!inserted && !predOnStack) {
203 resp.insert(std::make_pair(BB, None));
204 // If we didn't insert because our predecessors are already on the stack,
205 // then we might still have a dependency, but it will be discovered during
207 } else if (!inserted && predOnStack){
208 resp.insert(std::make_pair(BB, NonLocal));
215 /// getNonLocalDependency - Fills the passed-in map with the non-local
216 /// dependencies of the queries. The map will contain NonLocal for
217 /// blocks between the query and its dependencies.
218 void MemoryDependenceAnalysis::getNonLocalDependency(Instruction* query,
219 DenseMap<BasicBlock*, Value*>& resp) {
220 if (depGraphNonLocal.count(query)) {
221 DenseMap<BasicBlock*, Value*>& cached = depGraphNonLocal[query];
224 SmallVector<BasicBlock*, 4> dirtied;
225 for (DenseMap<BasicBlock*, Value*>::iterator I = cached.begin(),
226 E = cached.end(); I != E; ++I)
227 if (I->second == Dirty)
228 dirtied.push_back(I->first);
230 for (SmallVector<BasicBlock*, 4>::iterator I = dirtied.begin(),
231 E = dirtied.end(); I != E; ++I) {
232 Instruction* localDep = getDependency(query, 0, *I);
233 if (localDep != NonLocal)
234 cached[*I] = localDep;
237 nonLocalHelper(query, *I, cached);
245 NumUncacheNonlocal++;
247 // If not, go ahead and search for non-local deps.
248 nonLocalHelper(query, query->getParent(), resp);
250 // Update the non-local dependency cache
251 for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(), E = resp.end();
253 depGraphNonLocal[query].insert(*I);
254 reverseDepNonLocal[I->second].insert(query);
258 /// getDependency - Return the instruction on which a memory operation
259 /// depends. The local paramter indicates if the query should only
260 /// evaluate dependencies within the same basic block.
261 Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query,
264 // Start looking for dependencies with the queried inst
265 BasicBlock::iterator QI = query;
267 // Check for a cached result
268 std::pair<Instruction*, bool> cachedResult = depGraphLocal[query];
269 // If we have a _confirmed_ cached entry, return it
270 if (cachedResult.second)
271 return cachedResult.first;
272 else if (cachedResult.first && cachedResult.first != NonLocal)
273 // If we have an unconfirmed cached entry, we can start our search from there
274 QI = cachedResult.first;
278 else if (!start && block)
281 AliasAnalysis& AA = getAnalysis<AliasAnalysis>();
282 TargetData& TD = getAnalysis<TargetData>();
284 // Get the pointer value for which dependence will be determined
286 uint64_t dependeeSize = 0;
287 bool queryIsVolatile = false;
288 if (StoreInst* S = dyn_cast<StoreInst>(query)) {
289 dependee = S->getPointerOperand();
290 dependeeSize = TD.getTypeSize(S->getOperand(0)->getType());
291 queryIsVolatile = S->isVolatile();
292 } else if (LoadInst* L = dyn_cast<LoadInst>(query)) {
293 dependee = L->getPointerOperand();
294 dependeeSize = TD.getTypeSize(L->getType());
295 queryIsVolatile = L->isVolatile();
296 } else if (VAArgInst* V = dyn_cast<VAArgInst>(query)) {
297 dependee = V->getOperand(0);
298 dependeeSize = TD.getTypeSize(V->getType());
299 } else if (FreeInst* F = dyn_cast<FreeInst>(query)) {
300 dependee = F->getPointerOperand();
302 // FreeInsts erase the entire structure, not just a field
304 } else if (CallSite::get(query).getInstruction() != 0)
305 return getCallSiteDependency(CallSite::get(query), start, block);
306 else if (isa<AllocationInst>(query))
311 BasicBlock::iterator blockBegin = block ? block->begin()
312 : query->getParent()->begin();
314 // Walk backwards through the basic block, looking for dependencies
315 while (QI != blockBegin) {
318 // If this inst is a memory op, get the pointer it accessed
320 uint64_t pointerSize = 0;
321 if (StoreInst* S = dyn_cast<StoreInst>(QI)) {
322 // All volatile loads/stores depend on each other
323 if (queryIsVolatile && S->isVolatile()) {
324 if (!start && !block) {
325 depGraphLocal.insert(std::make_pair(query, std::make_pair(S, true)));
326 reverseDep[S].insert(query);
332 pointer = S->getPointerOperand();
333 pointerSize = TD.getTypeSize(S->getOperand(0)->getType());
334 } else if (LoadInst* L = dyn_cast<LoadInst>(QI)) {
335 // All volatile loads/stores depend on each other
336 if (queryIsVolatile && L->isVolatile()) {
337 if (!start && !block) {
338 depGraphLocal.insert(std::make_pair(query, std::make_pair(L, true)));
339 reverseDep[L].insert(query);
345 pointer = L->getPointerOperand();
346 pointerSize = TD.getTypeSize(L->getType());
347 } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) {
349 if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize()))
350 pointerSize = C->getZExtValue() * \
351 TD.getTypeSize(AI->getAllocatedType());
354 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) {
355 pointer = V->getOperand(0);
356 pointerSize = TD.getTypeSize(V->getType());
357 } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) {
358 pointer = F->getPointerOperand();
360 // FreeInsts erase the entire structure
362 } else if (CallSite::get(QI).getInstruction() != 0) {
363 // Call insts need special handling. Check if they can modify our pointer
364 AliasAnalysis::ModRefResult MR = AA.getModRefInfo(CallSite::get(QI),
365 dependee, dependeeSize);
367 if (MR != AliasAnalysis::NoModRef) {
368 // Loads don't depend on read-only calls
369 if (isa<LoadInst>(query) && MR == AliasAnalysis::Ref)
372 if (!start && !block) {
373 depGraphLocal.insert(std::make_pair(query,
374 std::make_pair(QI, true)));
375 reverseDep[QI].insert(query);
384 // If we found a pointer, check if it could be the same as our pointer
386 AliasAnalysis::AliasResult R = AA.alias(pointer, pointerSize,
387 dependee, dependeeSize);
389 if (R != AliasAnalysis::NoAlias) {
390 // May-alias loads don't depend on each other
391 if (isa<LoadInst>(query) && isa<LoadInst>(QI) &&
392 R == AliasAnalysis::MayAlias)
395 if (!start && !block) {
396 depGraphLocal.insert(std::make_pair(query,
397 std::make_pair(QI, true)));
398 reverseDep[QI].insert(query);
406 // If we found nothing, return the non-local flag
407 if (!start && !block) {
408 depGraphLocal.insert(std::make_pair(query,
409 std::make_pair(NonLocal, true)));
410 reverseDep[NonLocal].insert(query);
416 /// removeInstruction - Remove an instruction from the dependence analysis,
417 /// updating the dependence of instructions that previously depended on it.
418 /// This method attempts to keep the cache coherent using the reverse map.
419 void MemoryDependenceAnalysis::removeInstruction(Instruction* rem) {
420 // Figure out the new dep for things that currently depend on rem
421 Instruction* newDep = NonLocal;
423 depMapType::iterator depGraphEntry = depGraphLocal.find(rem);
425 if (depGraphEntry != depGraphLocal.end()) {
426 if (depGraphEntry->second.first != NonLocal &&
427 depGraphEntry->second.second) {
428 // If we have dep info for rem, set them to it
429 BasicBlock::iterator RI = depGraphEntry->second.first;
432 } else if (depGraphEntry->second.first == NonLocal &&
433 depGraphEntry->second.second ) {
434 // If we have a confirmed non-local flag, use it
437 // Otherwise, use the immediate successor of rem
438 // NOTE: This is because, when getDependence is called, it will first
439 // check the immediate predecessor of what is in the cache.
440 BasicBlock::iterator RI = rem;
445 SmallPtrSet<Instruction*, 4>& set = reverseDep[rem];
446 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
448 // Insert the new dependencies
449 // Mark it as unconfirmed as long as it is not the non-local flag
450 depGraphLocal[*I] = std::make_pair(newDep, !newDep);
453 reverseDep.erase(rem);
456 if (reverseDepNonLocal.count(rem)) {
457 SmallPtrSet<Instruction*, 4>& set = reverseDepNonLocal[rem];
458 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
460 for (DenseMap<BasicBlock*, Value*>::iterator DI =
461 depGraphNonLocal[*I].begin(), DE = depGraphNonLocal[*I].end();
463 if (DI->second == rem)
466 reverseDepNonLocal.erase(rem);
469 getAnalysis<AliasAnalysis>().deleteValue(rem);