1 //===- AliasAnalysis.cpp - Generic Alias Analysis Interface Implementation -==//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements the generic AliasAnalysis interface which is used as the
11 // common interface used by all clients and implementations of alias analysis.
13 // This file also implements the default version of the AliasAnalysis interface
14 // that is to be used when no other implementation is specified. This does some
15 // simple tests that detect obvious cases: two different global pointers cannot
16 // alias, a global cannot alias a malloc, two different mallocs cannot alias,
19 // This alias analysis implementation really isn't very good for anything, but
20 // it is very fast, and makes a nice clean default implementation. Because it
21 // handles lots of little corner cases, other, more complex, alias analysis
22 // implementations may choose to rely on this pass to resolve these simple and
25 //===----------------------------------------------------------------------===//
27 #include "llvm/Analysis/AliasAnalysis.h"
28 #include "llvm/Pass.h"
29 #include "llvm/BasicBlock.h"
30 #include "llvm/Function.h"
31 #include "llvm/IntrinsicInst.h"
32 #include "llvm/Instructions.h"
33 #include "llvm/Type.h"
34 #include "llvm/Target/TargetData.h"
37 // Register the AliasAnalysis interface, providing a nice name to refer to.
38 static RegisterAnalysisGroup<AliasAnalysis> Z("Alias Analysis");
39 char AliasAnalysis::ID = 0;
41 //===----------------------------------------------------------------------===//
42 // Default chaining methods
43 //===----------------------------------------------------------------------===//
45 AliasAnalysis::AliasResult
46 AliasAnalysis::alias(const Value *V1, unsigned V1Size,
47 const Value *V2, unsigned V2Size) {
48 assert(AA && "AA didn't call InitializeAliasAnalysis in its run method!");
49 return AA->alias(V1, V1Size, V2, V2Size);
52 bool AliasAnalysis::pointsToConstantMemory(const Value *P) {
53 assert(AA && "AA didn't call InitializeAliasAnalysis in its run method!");
54 return AA->pointsToConstantMemory(P);
57 void AliasAnalysis::deleteValue(Value *V) {
58 assert(AA && "AA didn't call InitializeAliasAnalysis in its run method!");
62 void AliasAnalysis::copyValue(Value *From, Value *To) {
63 assert(AA && "AA didn't call InitializeAliasAnalysis in its run method!");
64 AA->copyValue(From, To);
67 AliasAnalysis::ModRefResult
68 AliasAnalysis::getModRefInfo(ImmutableCallSite CS,
69 const Value *P, unsigned Size) {
70 // Don't assert AA because BasicAA calls us in order to make use of the
73 ModRefBehavior MRB = getModRefBehavior(CS);
74 if (MRB == DoesNotAccessMemory)
77 ModRefResult Mask = ModRef;
78 if (MRB == OnlyReadsMemory)
80 else if (MRB == AliasAnalysis::AccessesArguments) {
81 bool doesAlias = false;
82 for (ImmutableCallSite::arg_iterator AI = CS.arg_begin(), AE = CS.arg_end();
84 if (!isNoAlias(*AI, ~0U, P, Size)) {
93 // If P points to a constant memory location, the call definitely could not
94 // modify the memory location.
95 if ((Mask & Mod) && pointsToConstantMemory(P))
96 Mask = ModRefResult(Mask & ~Mod);
98 // If this is BasicAA, don't forward.
101 // Otherwise, fall back to the next AA in the chain. But we can merge
102 // in any mask we've managed to compute.
103 return ModRefResult(AA->getModRefInfo(CS, P, Size) & Mask);
106 AliasAnalysis::ModRefResult
107 AliasAnalysis::getModRefInfo(ImmutableCallSite CS1, ImmutableCallSite CS2) {
108 // Don't assert AA because BasicAA calls us in order to make use of the
111 // If CS1 or CS2 are readnone, they don't interact.
112 ModRefBehavior CS1B = getModRefBehavior(CS1);
113 if (CS1B == DoesNotAccessMemory) return NoModRef;
115 ModRefBehavior CS2B = getModRefBehavior(CS2);
116 if (CS2B == DoesNotAccessMemory) return NoModRef;
118 // If they both only read from memory, there is no dependence.
119 if (CS1B == OnlyReadsMemory && CS2B == OnlyReadsMemory)
122 AliasAnalysis::ModRefResult Mask = ModRef;
124 // If CS1 only reads memory, the only dependence on CS2 can be
125 // from CS1 reading memory written by CS2.
126 if (CS1B == OnlyReadsMemory)
127 Mask = ModRefResult(Mask & Ref);
129 // If CS2 only access memory through arguments, accumulate the mod/ref
130 // information from CS1's references to the memory referenced by
132 if (CS2B == AccessesArguments) {
133 AliasAnalysis::ModRefResult R = NoModRef;
134 for (ImmutableCallSite::arg_iterator
135 I = CS2.arg_begin(), E = CS2.arg_end(); I != E; ++I) {
136 R = ModRefResult((R | getModRefInfo(CS1, *I, UnknownSize)) & Mask);
143 // If CS1 only accesses memory through arguments, check if CS2 references
144 // any of the memory referenced by CS1's arguments. If not, return NoModRef.
145 if (CS1B == AccessesArguments) {
146 AliasAnalysis::ModRefResult R = NoModRef;
147 for (ImmutableCallSite::arg_iterator
148 I = CS1.arg_begin(), E = CS1.arg_end(); I != E; ++I)
149 if (getModRefInfo(CS2, *I, UnknownSize) != NoModRef) {
157 // If this is BasicAA, don't forward.
158 if (!AA) return Mask;
160 // Otherwise, fall back to the next AA in the chain. But we can merge
161 // in any mask we've managed to compute.
162 return ModRefResult(AA->getModRefInfo(CS1, CS2) & Mask);
165 AliasAnalysis::ModRefBehavior
166 AliasAnalysis::getModRefBehavior(ImmutableCallSite CS) {
167 // Don't assert AA because BasicAA calls us in order to make use of the
170 ModRefBehavior Min = UnknownModRefBehavior;
172 // Call back into the alias analysis with the other form of getModRefBehavior
173 // to see if it can give a better response.
174 if (const Function *F = CS.getCalledFunction())
175 Min = getModRefBehavior(F);
177 // If this is BasicAA, don't forward.
180 // Otherwise, fall back to the next AA in the chain. But we can merge
181 // in any result we've managed to compute.
182 return std::min(AA->getModRefBehavior(CS), Min);
185 AliasAnalysis::ModRefBehavior
186 AliasAnalysis::getModRefBehavior(const Function *F) {
187 assert(AA && "AA didn't call InitializeAliasAnalysis in its run method!");
188 return AA->getModRefBehavior(F);
191 AliasAnalysis::DependenceResult
192 AliasAnalysis::getDependence(const Instruction *First,
193 DependenceQueryFlags FirstFlags,
194 const Instruction *Second,
195 DependenceQueryFlags SecondFlags) {
196 assert(AA && "AA didn't call InitializeAliasAnalyais in its run method!");
197 return AA->getDependence(First, FirstFlags, Second, SecondFlags);
200 //===----------------------------------------------------------------------===//
201 // AliasAnalysis non-virtual helper method implementation
202 //===----------------------------------------------------------------------===//
204 AliasAnalysis::ModRefResult
205 AliasAnalysis::getModRefInfo(const LoadInst *L, const Value *P, unsigned Size) {
206 // Be conservative in the face of volatile.
210 // If the load address doesn't alias the given address, it doesn't read
211 // or write the specified memory.
212 if (!alias(L->getOperand(0), getTypeStoreSize(L->getType()), P, Size))
215 // Otherwise, a load just reads.
219 AliasAnalysis::ModRefResult
220 AliasAnalysis::getModRefInfo(const StoreInst *S, const Value *P, unsigned Size) {
221 // Be conservative in the face of volatile.
225 // If the store address cannot alias the pointer in question, then the
226 // specified memory cannot be modified by the store.
227 if (!alias(S->getOperand(1),
228 getTypeStoreSize(S->getOperand(0)->getType()), P, Size))
231 // If the pointer is a pointer to constant memory, then it could not have been
232 // modified by this store.
233 if (pointsToConstantMemory(P))
236 // Otherwise, a store just writes.
240 AliasAnalysis::ModRefResult
241 AliasAnalysis::getModRefInfo(const VAArgInst *V, const Value *P, unsigned Size) {
242 // If the va_arg address cannot alias the pointer in question, then the
243 // specified memory cannot be accessed by the va_arg.
244 if (!alias(V->getOperand(0), UnknownSize, P, Size))
247 // If the pointer is a pointer to constant memory, then it could not have been
248 // modified by this va_arg.
249 if (pointsToConstantMemory(P))
252 // Otherwise, a va_arg reads and writes.
256 AliasAnalysis::DependenceResult
257 AliasAnalysis::getDependenceViaModRefInfo(const Instruction *First,
258 DependenceQueryFlags FirstFlags,
259 const Instruction *Second,
260 DependenceQueryFlags SecondFlags) {
261 if (const LoadInst *L = dyn_cast<LoadInst>(First)) {
262 // Be over-conservative with volatile for now.
266 // Forward this query to getModRefInfo.
267 switch (getModRefInfo(Second,
268 L->getPointerOperand(),
269 getTypeStoreSize(L->getType()))) {
271 // Second doesn't reference First's memory, so they're independent.
275 // Second only reads from the memory read from by First. If it
276 // also writes to any other memory, be conservative.
277 if (Second->mayWriteToMemory())
280 // If it's loading the same size from the same address, we can
281 // give a more precise result.
282 if (const LoadInst *SecondL = dyn_cast<LoadInst>(Second)) {
283 unsigned LSize = getTypeStoreSize(L->getType());
284 unsigned SecondLSize = getTypeStoreSize(SecondL->getType());
285 if (alias(L->getPointerOperand(), LSize,
286 SecondL->getPointerOperand(), SecondLSize) ==
288 // If the loads are the same size, it's ReadThenRead.
289 if (LSize == SecondLSize)
292 // If the second load is smaller, it's only ReadThenReadSome.
293 if (LSize > SecondLSize)
294 return ReadThenReadSome;
298 // Otherwise it's just two loads.
302 // Second only writes to the memory read from by First. If it
303 // also reads from any other memory, be conservative.
304 if (Second->mayReadFromMemory())
307 // If it's storing the same size to the same address, we can
308 // give a more precise result.
309 if (const StoreInst *SecondS = dyn_cast<StoreInst>(Second)) {
310 unsigned LSize = getTypeStoreSize(L->getType());
311 unsigned SecondSSize = getTypeStoreSize(SecondS->getType());
312 if (alias(L->getPointerOperand(), LSize,
313 SecondS->getPointerOperand(), SecondSSize) ==
315 // If the load and the store are the same size, it's ReadThenWrite.
316 if (LSize == SecondSSize)
317 return ReadThenWrite;
321 // Otherwise we don't know if it could be writing to other memory.
325 // Second reads and writes to the memory read from by First.
326 // We don't have a way to express that.
330 } else if (const StoreInst *S = dyn_cast<StoreInst>(First)) {
331 // Be over-conservative with volatile for now.
335 // Forward this query to getModRefInfo.
336 switch (getModRefInfo(Second,
337 S->getPointerOperand(),
338 getTypeStoreSize(S->getValueOperand()->getType()))) {
340 // Second doesn't reference First's memory, so they're independent.
344 // Second only reads from the memory written to by First. If it
345 // also writes to any other memory, be conservative.
346 if (Second->mayWriteToMemory())
349 // If it's loading the same size from the same address, we can
350 // give a more precise result.
351 if (const LoadInst *SecondL = dyn_cast<LoadInst>(Second)) {
352 unsigned SSize = getTypeStoreSize(S->getValueOperand()->getType());
353 unsigned SecondLSize = getTypeStoreSize(SecondL->getType());
354 if (alias(S->getPointerOperand(), SSize,
355 SecondL->getPointerOperand(), SecondLSize) ==
357 // If the store and the load are the same size, it's WriteThenRead.
358 if (SSize == SecondLSize)
359 return WriteThenRead;
361 // If the load is smaller, it's only WriteThenReadSome.
362 if (SSize > SecondLSize)
363 return WriteThenReadSome;
367 // Otherwise we don't know if it could be reading from other memory.
371 // Second only writes to the memory written to by First. If it
372 // also reads from any other memory, be conservative.
373 if (Second->mayReadFromMemory())
376 // If it's storing the same size to the same address, we can
377 // give a more precise result.
378 if (const StoreInst *SecondS = dyn_cast<StoreInst>(Second)) {
379 unsigned SSize = getTypeStoreSize(S->getValueOperand()->getType());
380 unsigned SecondSSize = getTypeStoreSize(SecondS->getType());
381 if (alias(S->getPointerOperand(), SSize,
382 SecondS->getPointerOperand(), SecondSSize) ==
384 // If the stores are the same size, it's WriteThenWrite.
385 if (SSize == SecondSSize)
386 return WriteThenWrite;
388 // If the second store is larger, it's only WriteSomeThenWrite.
389 if (SSize < SecondSSize)
390 return WriteSomeThenWrite;
394 // Otherwise we don't know if it could be writing to other memory.
398 // Second reads and writes to the memory written to by First.
399 // We don't have a way to express that.
403 } else if (const VAArgInst *V = dyn_cast<VAArgInst>(First)) {
404 // Forward this query to getModRefInfo.
405 if (getModRefInfo(Second, V->getOperand(0), UnknownSize) == NoModRef)
406 // Second doesn't reference First's memory, so they're independent.
409 } else if (ImmutableCallSite FirstCS = cast<Value>(First)) {
410 // If both instructions are calls/invokes we can use the two-callsite
411 // form of getModRefInfo.
412 if (ImmutableCallSite SecondCS = cast<Value>(Second))
413 // getModRefInfo's arguments are backwards from intuition.
414 switch (getModRefInfo(SecondCS, FirstCS)) {
416 // Second doesn't reference First's memory, so they're independent.
420 // If they're both read-only, there's no dependence.
421 if (FirstCS.onlyReadsMemory() && SecondCS.onlyReadsMemory())
424 // Otherwise it's not obvious what we can do here.
428 // It's not obvious what we can do here.
437 // For anything else, be conservative.
441 AliasAnalysis::ModRefBehavior
442 AliasAnalysis::getIntrinsicModRefBehavior(unsigned iid) {
443 #define GET_INTRINSIC_MODREF_BEHAVIOR
444 #include "llvm/Intrinsics.gen"
445 #undef GET_INTRINSIC_MODREF_BEHAVIOR
448 // AliasAnalysis destructor: DO NOT move this to the header file for
449 // AliasAnalysis or else clients of the AliasAnalysis class may not depend on
450 // the AliasAnalysis.o file in the current .a file, causing alias analysis
451 // support to not be included in the tool correctly!
453 AliasAnalysis::~AliasAnalysis() {}
455 /// InitializeAliasAnalysis - Subclasses must call this method to initialize the
456 /// AliasAnalysis interface before any other methods are called.
458 void AliasAnalysis::InitializeAliasAnalysis(Pass *P) {
459 TD = P->getAnalysisIfAvailable<TargetData>();
460 AA = &P->getAnalysis<AliasAnalysis>();
463 // getAnalysisUsage - All alias analysis implementations should invoke this
464 // directly (using AliasAnalysis::getAnalysisUsage(AU)).
465 void AliasAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
466 AU.addRequired<AliasAnalysis>(); // All AA's chain
469 /// getTypeStoreSize - Return the TargetData store size for the given type,
470 /// if known, or a conservative value otherwise.
472 unsigned AliasAnalysis::getTypeStoreSize(const Type *Ty) {
473 return TD ? TD->getTypeStoreSize(Ty) : ~0u;
476 /// canBasicBlockModify - Return true if it is possible for execution of the
477 /// specified basic block to modify the value pointed to by Ptr.
479 bool AliasAnalysis::canBasicBlockModify(const BasicBlock &BB,
480 const Value *Ptr, unsigned Size) {
481 return canInstructionRangeModify(BB.front(), BB.back(), Ptr, Size);
484 /// canInstructionRangeModify - Return true if it is possible for the execution
485 /// of the specified instructions to modify the value pointed to by Ptr. The
486 /// instructions to consider are all of the instructions in the range of [I1,I2]
487 /// INCLUSIVE. I1 and I2 must be in the same basic block.
489 bool AliasAnalysis::canInstructionRangeModify(const Instruction &I1,
490 const Instruction &I2,
491 const Value *Ptr, unsigned Size) {
492 assert(I1.getParent() == I2.getParent() &&
493 "Instructions not in same basic block!");
494 BasicBlock::const_iterator I = &I1;
495 BasicBlock::const_iterator E = &I2;
496 ++E; // Convert from inclusive to exclusive range.
498 for (; I != E; ++I) // Check every instruction in range
499 if (getModRefInfo(I, Ptr, Size) & Mod)
504 /// isNoAliasCall - Return true if this pointer is returned by a noalias
506 bool llvm::isNoAliasCall(const Value *V) {
507 if (isa<CallInst>(V) || isa<InvokeInst>(V))
508 return ImmutableCallSite(cast<Instruction>(V))
509 .paramHasAttr(0, Attribute::NoAlias);
513 /// isIdentifiedObject - Return true if this pointer refers to a distinct and
514 /// identifiable object. This returns true for:
515 /// Global Variables and Functions (but not Global Aliases)
516 /// Allocas and Mallocs
517 /// ByVal and NoAlias Arguments
520 bool llvm::isIdentifiedObject(const Value *V) {
521 if (isa<AllocaInst>(V))
523 if (isa<GlobalValue>(V) && !isa<GlobalAlias>(V))
525 if (isNoAliasCall(V))
527 if (const Argument *A = dyn_cast<Argument>(V))
528 return A->hasNoAliasAttr() || A->hasByValAttr();
532 // Because of the way .a files work, we must force the BasicAA implementation to
533 // be pulled in if the AliasAnalysis classes are pulled in. Otherwise we run
534 // the risk of AliasAnalysis being used, but the default implementation not
535 // being linked into the tool that uses it.
536 DEFINING_FILE_FOR(AliasAnalysis)