1 //===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file was developed by the Owen Anderson and is distributed under
6 // the University of Illinois Open Source License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements an analysis that determines, for a given memory
11 // operation, what preceding memory operations it depends on. It builds on
12 // alias analysis information, and tries to provide a lazy, caching interface to
13 // a common kind of alias information query.
15 //===----------------------------------------------------------------------===//
17 #include "llvm/Analysis/MemoryDependenceAnalysis.h"
18 #include "llvm/Constants.h"
19 #include "llvm/Instructions.h"
20 #include "llvm/Function.h"
21 #include "llvm/Analysis/AliasAnalysis.h"
22 #include "llvm/Support/CFG.h"
23 #include "llvm/Target/TargetData.h"
24 #include "llvm/ADT/Statistic.h"
26 #define DEBUG_TYPE "memdep"
30 STATISTIC(NumCacheNonlocal, "Number of cached non-local responses");
31 STATISTIC(NumUncacheNonlocal, "Number of uncached non-local responses");
33 char MemoryDependenceAnalysis::ID = 0;
35 Instruction* const MemoryDependenceAnalysis::NonLocal = (Instruction*)-3;
36 Instruction* const MemoryDependenceAnalysis::None = (Instruction*)-4;
37 Instruction* const MemoryDependenceAnalysis::Dirty = (Instruction*)-5;
39 // Register this pass...
40 static RegisterPass<MemoryDependenceAnalysis> X("memdep",
41 "Memory Dependence Analysis");
43 /// getAnalysisUsage - Does not modify anything. It uses Alias Analysis.
45 void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
47 AU.addRequiredTransitive<AliasAnalysis>();
48 AU.addRequiredTransitive<TargetData>();
51 /// getCallSiteDependency - Private helper for finding the local dependencies
53 Instruction* MemoryDependenceAnalysis::getCallSiteDependency(CallSite C,
57 AliasAnalysis& AA = getAnalysis<AliasAnalysis>();
58 TargetData& TD = getAnalysis<TargetData>();
59 BasicBlock::iterator blockBegin = C.getInstruction()->getParent()->begin();
60 BasicBlock::iterator QI = C.getInstruction();
62 // If the starting point was specifiy, use it
65 blockBegin = start->getParent()->end();
66 // If the starting point wasn't specified, but the block was, use it
67 } else if (!start && block) {
69 blockBegin = block->end();
72 // Walk backwards through the block, looking for dependencies
73 while (QI != blockBegin) {
76 // If this inst is a memory op, get the pointer it accessed
78 uint64_t pointerSize = 0;
79 if (StoreInst* S = dyn_cast<StoreInst>(QI)) {
80 pointer = S->getPointerOperand();
81 pointerSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
82 } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) {
84 if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize()))
85 pointerSize = C->getZExtValue() * \
86 TD.getABITypeSize(AI->getAllocatedType());
89 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) {
90 pointer = V->getOperand(0);
91 pointerSize = TD.getTypeStoreSize(V->getType());
92 } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) {
93 pointer = F->getPointerOperand();
95 // FreeInsts erase the entire structure
97 } else if (CallSite::get(QI).getInstruction() != 0) {
98 AliasAnalysis::ModRefBehavior result =
99 AA.getModRefBehavior(cast<CallInst>(QI)->getCalledFunction(),
101 if (result != AliasAnalysis::DoesNotAccessMemory &&
102 result != AliasAnalysis::OnlyReadsMemory) {
103 if (!start && !block) {
104 depGraphLocal.insert(std::make_pair(C.getInstruction(),
105 std::make_pair(QI, true)));
106 reverseDep[QI].insert(C.getInstruction());
115 if (AA.getModRefInfo(C, pointer, pointerSize) != AliasAnalysis::NoModRef) {
116 if (!start && !block) {
117 depGraphLocal.insert(std::make_pair(C.getInstruction(),
118 std::make_pair(QI, true)));
119 reverseDep[QI].insert(C.getInstruction());
125 // No dependence found
126 depGraphLocal.insert(std::make_pair(C.getInstruction(),
127 std::make_pair(NonLocal, true)));
128 reverseDep[NonLocal].insert(C.getInstruction());
132 /// nonLocalHelper - Private helper used to calculate non-local dependencies
133 /// by doing DFS on the predecessors of a block to find its dependencies
134 void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query,
136 DenseMap<BasicBlock*, Value*>& resp) {
137 // Set of blocks that we've already visited in our DFS
138 SmallPtrSet<BasicBlock*, 4> visited;
139 // If we're updating a dirtied cache entry, we don't need to reprocess
140 // already computed entries.
141 for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(),
142 E = resp.end(); I != E; ++I)
143 if (I->second != Dirty)
144 visited.insert(I->first);
146 // Current stack of the DFS
147 SmallVector<BasicBlock*, 4> stack;
148 stack.push_back(block);
151 while (!stack.empty()) {
152 BasicBlock* BB = stack.back();
154 // If we've already visited this block, no need to revist
155 if (visited.count(BB)) {
160 // If we find a new block with a local dependency for query,
161 // then we insert the new dependency and backtrack.
165 Instruction* localDep = getDependency(query, 0, BB);
166 if (localDep != NonLocal) {
167 resp.insert(std::make_pair(BB, localDep));
172 // If we re-encounter the starting block, we still need to search it
173 // because there might be a dependency in the starting block AFTER
174 // the position of the query. This is necessary to get loops right.
175 } else if (BB == block && stack.size() > 1) {
178 Instruction* localDep = getDependency(query, 0, BB);
179 if (localDep != query)
180 resp.insert(std::make_pair(BB, localDep));
187 // If we didn't find anything, recurse on the precessors of this block
188 bool predOnStack = false;
189 bool inserted = false;
190 for (pred_iterator PI = pred_begin(BB), PE = pred_end(BB);
192 if (!visited.count(*PI)) {
193 stack.push_back(*PI);
198 // If we inserted a new predecessor, then we'll come back to this block
201 // If we didn't insert because we have no predecessors, then this
202 // query has no dependency at all.
203 else if (!inserted && !predOnStack) {
204 resp.insert(std::make_pair(BB, None));
205 // If we didn't insert because our predecessors are already on the stack,
206 // then we might still have a dependency, but it will be discovered during
208 } else if (!inserted && predOnStack){
209 resp.insert(std::make_pair(BB, NonLocal));
216 /// getNonLocalDependency - Fills the passed-in map with the non-local
217 /// dependencies of the queries. The map will contain NonLocal for
218 /// blocks between the query and its dependencies.
219 void MemoryDependenceAnalysis::getNonLocalDependency(Instruction* query,
220 DenseMap<BasicBlock*, Value*>& resp) {
221 if (depGraphNonLocal.count(query)) {
222 DenseMap<BasicBlock*, Value*>& cached = depGraphNonLocal[query];
225 SmallVector<BasicBlock*, 4> dirtied;
226 for (DenseMap<BasicBlock*, Value*>::iterator I = cached.begin(),
227 E = cached.end(); I != E; ++I)
228 if (I->second == Dirty)
229 dirtied.push_back(I->first);
231 for (SmallVector<BasicBlock*, 4>::iterator I = dirtied.begin(),
232 E = dirtied.end(); I != E; ++I) {
233 Instruction* localDep = getDependency(query, 0, *I);
234 if (localDep != NonLocal)
235 cached[*I] = localDep;
238 nonLocalHelper(query, *I, cached);
246 NumUncacheNonlocal++;
248 // If not, go ahead and search for non-local deps.
249 nonLocalHelper(query, query->getParent(), resp);
251 // Update the non-local dependency cache
252 for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(), E = resp.end();
254 depGraphNonLocal[query].insert(*I);
255 reverseDepNonLocal[I->second].insert(query);
259 /// getDependency - Return the instruction on which a memory operation
260 /// depends. The local paramter indicates if the query should only
261 /// evaluate dependencies within the same basic block.
262 Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query,
265 // Start looking for dependencies with the queried inst
266 BasicBlock::iterator QI = query;
268 // Check for a cached result
269 std::pair<Instruction*, bool> cachedResult = depGraphLocal[query];
270 // If we have a _confirmed_ cached entry, return it
271 if (cachedResult.second)
272 return cachedResult.first;
273 else if (cachedResult.first && cachedResult.first != NonLocal)
274 // If we have an unconfirmed cached entry, we can start our search from there
275 QI = cachedResult.first;
279 else if (!start && block)
282 AliasAnalysis& AA = getAnalysis<AliasAnalysis>();
283 TargetData& TD = getAnalysis<TargetData>();
285 // Get the pointer value for which dependence will be determined
287 uint64_t dependeeSize = 0;
288 bool queryIsVolatile = false;
289 if (StoreInst* S = dyn_cast<StoreInst>(query)) {
290 dependee = S->getPointerOperand();
291 dependeeSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
292 queryIsVolatile = S->isVolatile();
293 } else if (LoadInst* L = dyn_cast<LoadInst>(query)) {
294 dependee = L->getPointerOperand();
295 dependeeSize = TD.getTypeStoreSize(L->getType());
296 queryIsVolatile = L->isVolatile();
297 } else if (VAArgInst* V = dyn_cast<VAArgInst>(query)) {
298 dependee = V->getOperand(0);
299 dependeeSize = TD.getTypeStoreSize(V->getType());
300 } else if (FreeInst* F = dyn_cast<FreeInst>(query)) {
301 dependee = F->getPointerOperand();
303 // FreeInsts erase the entire structure, not just a field
305 } else if (CallSite::get(query).getInstruction() != 0)
306 return getCallSiteDependency(CallSite::get(query), start, block);
307 else if (isa<AllocationInst>(query))
312 BasicBlock::iterator blockBegin = block ? block->begin()
313 : query->getParent()->begin();
315 // Walk backwards through the basic block, looking for dependencies
316 while (QI != blockBegin) {
319 // If this inst is a memory op, get the pointer it accessed
321 uint64_t pointerSize = 0;
322 if (StoreInst* S = dyn_cast<StoreInst>(QI)) {
323 // All volatile loads/stores depend on each other
324 if (queryIsVolatile && S->isVolatile()) {
325 if (!start && !block) {
326 depGraphLocal.insert(std::make_pair(query, std::make_pair(S, true)));
327 reverseDep[S].insert(query);
333 pointer = S->getPointerOperand();
334 pointerSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
335 } else if (LoadInst* L = dyn_cast<LoadInst>(QI)) {
336 // All volatile loads/stores depend on each other
337 if (queryIsVolatile && L->isVolatile()) {
338 if (!start && !block) {
339 depGraphLocal.insert(std::make_pair(query, std::make_pair(L, true)));
340 reverseDep[L].insert(query);
346 pointer = L->getPointerOperand();
347 pointerSize = TD.getTypeStoreSize(L->getType());
348 } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) {
350 if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize()))
351 pointerSize = C->getZExtValue() * \
352 TD.getABITypeSize(AI->getAllocatedType());
355 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) {
356 pointer = V->getOperand(0);
357 pointerSize = TD.getTypeStoreSize(V->getType());
358 } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) {
359 pointer = F->getPointerOperand();
361 // FreeInsts erase the entire structure
363 } else if (CallSite::get(QI).getInstruction() != 0) {
364 // Call insts need special handling. Check if they can modify our pointer
365 AliasAnalysis::ModRefResult MR = AA.getModRefInfo(CallSite::get(QI),
366 dependee, dependeeSize);
368 if (MR != AliasAnalysis::NoModRef) {
369 // Loads don't depend on read-only calls
370 if (isa<LoadInst>(query) && MR == AliasAnalysis::Ref)
373 if (!start && !block) {
374 depGraphLocal.insert(std::make_pair(query,
375 std::make_pair(QI, true)));
376 reverseDep[QI].insert(query);
385 // If we found a pointer, check if it could be the same as our pointer
387 AliasAnalysis::AliasResult R = AA.alias(pointer, pointerSize,
388 dependee, dependeeSize);
390 if (R != AliasAnalysis::NoAlias) {
391 // May-alias loads don't depend on each other
392 if (isa<LoadInst>(query) && isa<LoadInst>(QI) &&
393 R == AliasAnalysis::MayAlias)
396 if (!start && !block) {
397 depGraphLocal.insert(std::make_pair(query,
398 std::make_pair(QI, true)));
399 reverseDep[QI].insert(query);
407 // If we found nothing, return the non-local flag
408 if (!start && !block) {
409 depGraphLocal.insert(std::make_pair(query,
410 std::make_pair(NonLocal, true)));
411 reverseDep[NonLocal].insert(query);
417 /// removeInstruction - Remove an instruction from the dependence analysis,
418 /// updating the dependence of instructions that previously depended on it.
419 /// This method attempts to keep the cache coherent using the reverse map.
420 void MemoryDependenceAnalysis::removeInstruction(Instruction* rem) {
421 // Figure out the new dep for things that currently depend on rem
422 Instruction* newDep = NonLocal;
424 depMapType::iterator depGraphEntry = depGraphLocal.find(rem);
426 if (depGraphEntry != depGraphLocal.end()) {
427 if (depGraphEntry->second.first != NonLocal &&
428 depGraphEntry->second.second) {
429 // If we have dep info for rem, set them to it
430 BasicBlock::iterator RI = depGraphEntry->second.first;
433 } else if (depGraphEntry->second.first == NonLocal &&
434 depGraphEntry->second.second ) {
435 // If we have a confirmed non-local flag, use it
438 // Otherwise, use the immediate successor of rem
439 // NOTE: This is because, when getDependence is called, it will first
440 // check the immediate predecessor of what is in the cache.
441 BasicBlock::iterator RI = rem;
446 SmallPtrSet<Instruction*, 4>& set = reverseDep[rem];
447 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
449 // Insert the new dependencies
450 // Mark it as unconfirmed as long as it is not the non-local flag
451 depGraphLocal[*I] = std::make_pair(newDep, !newDep);
454 reverseDep.erase(rem);
457 if (reverseDepNonLocal.count(rem)) {
458 SmallPtrSet<Instruction*, 4>& set = reverseDepNonLocal[rem];
459 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
461 for (DenseMap<BasicBlock*, Value*>::iterator DI =
462 depGraphNonLocal[*I].begin(), DE = depGraphNonLocal[*I].end();
464 if (DI->second == rem)
467 reverseDepNonLocal.erase(rem);
470 getAnalysis<AliasAnalysis>().deleteValue(rem);