1 //===- BasicAliasAnalysis.cpp - Local Alias Analysis Impl -----------------===//
3 // The LLVM Compiler Infrastructure
5 // This file was developed by the LLVM research group and is distributed under
6 // the University of Illinois Open Source License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines the default implementation of the Alias Analysis interface
11 // that simply implements a few identities (two different globals cannot alias,
12 // etc), but otherwise does no analysis.
14 //===----------------------------------------------------------------------===//
16 #include "llvm/Analysis/AliasAnalysis.h"
17 #include "llvm/Analysis/Passes.h"
18 #include "llvm/Constants.h"
19 #include "llvm/DerivedTypes.h"
20 #include "llvm/Function.h"
21 #include "llvm/GlobalVariable.h"
22 #include "llvm/Instructions.h"
23 #include "llvm/Pass.h"
24 #include "llvm/Target/TargetData.h"
25 #include "llvm/Support/Compiler.h"
26 #include "llvm/Support/GetElementPtrTypeIterator.h"
27 #include "llvm/Support/ManagedStatic.h"
32 /// NoAA - This class implements the -no-aa pass, which always returns "I
33 /// don't know" for alias queries. NoAA is unlike other alias analysis
34 /// implementations, in that it does not chain to a previous analysis. As
35 /// such it doesn't follow many of the rules that other alias analyses must.
37 struct VISIBILITY_HIDDEN NoAA : public ImmutablePass, public AliasAnalysis {
38 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
39 AU.addRequired<TargetData>();
42 virtual void initializePass() {
43 TD = &getAnalysis<TargetData>();
46 virtual AliasResult alias(const Value *V1, unsigned V1Size,
47 const Value *V2, unsigned V2Size) {
51 virtual ModRefBehavior getModRefBehavior(Function *F, CallSite CS,
52 std::vector<PointerAccessInfo> *Info) {
53 return UnknownModRefBehavior;
56 virtual void getArgumentAccesses(Function *F, CallSite CS,
57 std::vector<PointerAccessInfo> &Info) {
58 assert(0 && "This method may not be called on this function!");
61 virtual void getMustAliases(Value *P, std::vector<Value*> &RetVals) { }
62 virtual bool pointsToConstantMemory(const Value *P) { return false; }
63 virtual ModRefResult getModRefInfo(CallSite CS, Value *P, unsigned Size) {
66 virtual ModRefResult getModRefInfo(CallSite CS1, CallSite CS2) {
69 virtual bool hasNoModRefInfoForCalls() const { return true; }
71 virtual void deleteValue(Value *V) {}
72 virtual void copyValue(Value *From, Value *To) {}
75 // Register this pass...
77 U("no-aa", "No Alias Analysis (always returns 'may' alias)");
79 // Declare that we implement the AliasAnalysis interface
80 RegisterAnalysisGroup<AliasAnalysis> V(U);
81 } // End of anonymous namespace
83 ImmutablePass *llvm::createNoAAPass() { return new NoAA(); }
86 /// BasicAliasAnalysis - This is the default alias analysis implementation.
87 /// Because it doesn't chain to a previous alias analysis (like -no-aa), it
88 /// derives from the NoAA class.
89 struct VISIBILITY_HIDDEN BasicAliasAnalysis : public NoAA {
90 AliasResult alias(const Value *V1, unsigned V1Size,
91 const Value *V2, unsigned V2Size);
93 ModRefResult getModRefInfo(CallSite CS, Value *P, unsigned Size);
94 ModRefResult getModRefInfo(CallSite CS1, CallSite CS2) {
95 return NoAA::getModRefInfo(CS1,CS2);
98 /// hasNoModRefInfoForCalls - We can provide mod/ref information against
99 /// non-escaping allocations.
100 virtual bool hasNoModRefInfoForCalls() const { return false; }
102 /// pointsToConstantMemory - Chase pointers until we find a (constant
104 bool pointsToConstantMemory(const Value *P);
106 virtual ModRefBehavior getModRefBehavior(Function *F, CallSite CS,
107 std::vector<PointerAccessInfo> *Info);
110 // CheckGEPInstructions - Check two GEP instructions with known
111 // must-aliasing base pointers. This checks to see if the index expressions
112 // preclude the pointers from aliasing...
114 CheckGEPInstructions(const Type* BasePtr1Ty, std::vector<Value*> &GEP1Ops,
116 const Type *BasePtr2Ty, std::vector<Value*> &GEP2Ops,
120 // Register this pass...
121 RegisterPass<BasicAliasAnalysis>
122 X("basicaa", "Basic Alias Analysis (default AA impl)");
124 // Declare that we implement the AliasAnalysis interface
125 RegisterAnalysisGroup<AliasAnalysis, true> Y(X);
126 } // End of anonymous namespace
128 ImmutablePass *llvm::createBasicAliasAnalysisPass() {
129 return new BasicAliasAnalysis();
132 // getUnderlyingObject - This traverses the use chain to figure out what object
133 // the specified value points to. If the value points to, or is derived from, a
134 // unique object or an argument, return it.
135 static const Value *getUnderlyingObject(const Value *V) {
136 if (!isa<PointerType>(V->getType())) return 0;
138 // If we are at some type of object, return it. GlobalValues and Allocations
139 // have unique addresses.
140 if (isa<GlobalValue>(V) || isa<AllocationInst>(V) || isa<Argument>(V))
143 // Traverse through different addressing mechanisms...
144 if (const Instruction *I = dyn_cast<Instruction>(V)) {
145 if (isa<BitCastInst>(I) || isa<GetElementPtrInst>(I))
146 return getUnderlyingObject(I->getOperand(0));
147 } else if (const ConstantExpr *CE = dyn_cast<ConstantExpr>(V)) {
148 if (CE->getOpcode() == Instruction::BitCast ||
149 CE->getOpcode() == Instruction::GetElementPtr)
150 return getUnderlyingObject(CE->getOperand(0));
155 static const User *isGEP(const Value *V) {
156 if (isa<GetElementPtrInst>(V) ||
157 (isa<ConstantExpr>(V) &&
158 cast<ConstantExpr>(V)->getOpcode() == Instruction::GetElementPtr))
159 return cast<User>(V);
163 static const Value *GetGEPOperands(const Value *V, std::vector<Value*> &GEPOps){
164 assert(GEPOps.empty() && "Expect empty list to populate!");
165 GEPOps.insert(GEPOps.end(), cast<User>(V)->op_begin()+1,
166 cast<User>(V)->op_end());
168 // Accumulate all of the chained indexes into the operand array
169 V = cast<User>(V)->getOperand(0);
171 while (const User *G = isGEP(V)) {
172 if (!isa<Constant>(GEPOps[0]) || isa<GlobalValue>(GEPOps[0]) ||
173 !cast<Constant>(GEPOps[0])->isNullValue())
174 break; // Don't handle folding arbitrary pointer offsets yet...
175 GEPOps.erase(GEPOps.begin()); // Drop the zero index
176 GEPOps.insert(GEPOps.begin(), G->op_begin()+1, G->op_end());
177 V = G->getOperand(0);
182 /// pointsToConstantMemory - Chase pointers until we find a (constant
184 bool BasicAliasAnalysis::pointsToConstantMemory(const Value *P) {
185 if (const Value *V = getUnderlyingObject(P))
186 if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(V))
187 return GV->isConstant();
191 // Determine if an AllocationInst instruction escapes from the function it is
192 // contained in. If it does not escape, there is no way for another function to
193 // mod/ref it. We do this by looking at its uses and determining if the uses
194 // can escape (recursively).
195 static bool AddressMightEscape(const Value *V) {
196 for (Value::use_const_iterator UI = V->use_begin(), E = V->use_end();
198 const Instruction *I = cast<Instruction>(*UI);
199 switch (I->getOpcode()) {
200 case Instruction::Load:
202 case Instruction::Store:
203 if (I->getOperand(0) == V)
204 return true; // Escapes if the pointer is stored.
206 case Instruction::GetElementPtr:
207 if (AddressMightEscape(I))
209 case Instruction::BitCast:
210 if (!isa<PointerType>(I->getType()))
212 if (AddressMightEscape(I))
215 case Instruction::Ret:
216 // If returned, the address will escape to calling functions, but no
217 // callees could modify it.
226 // getModRefInfo - Check to see if the specified callsite can clobber the
227 // specified memory object. Since we only look at local properties of this
228 // function, we really can't say much about this query. We do, however, use
229 // simple "address taken" analysis on local objects.
231 AliasAnalysis::ModRefResult
232 BasicAliasAnalysis::getModRefInfo(CallSite CS, Value *P, unsigned Size) {
233 if (!isa<Constant>(P))
234 if (const AllocationInst *AI =
235 dyn_cast_or_null<AllocationInst>(getUnderlyingObject(P))) {
236 // Okay, the pointer is to a stack allocated object. If we can prove that
237 // the pointer never "escapes", then we know the call cannot clobber it,
238 // because it simply can't get its address.
239 if (!AddressMightEscape(AI))
242 // If this is a tail call and P points to a stack location, we know that
243 // the tail call cannot access or modify the local stack.
244 if (CallInst *CI = dyn_cast<CallInst>(CS.getInstruction()))
245 if (CI->isTailCall() && isa<AllocaInst>(AI))
249 // The AliasAnalysis base class has some smarts, lets use them.
250 return AliasAnalysis::getModRefInfo(CS, P, Size);
253 // alias - Provide a bunch of ad-hoc rules to disambiguate in common cases, such
254 // as array references. Note that this function is heavily tail recursive.
255 // Hopefully we have a smart C++ compiler. :)
257 AliasAnalysis::AliasResult
258 BasicAliasAnalysis::alias(const Value *V1, unsigned V1Size,
259 const Value *V2, unsigned V2Size) {
260 // Strip off any constant expression casts if they exist
261 if (const ConstantExpr *CE = dyn_cast<ConstantExpr>(V1))
262 if (CE->isCast() && isa<PointerType>(CE->getOperand(0)->getType()))
263 V1 = CE->getOperand(0);
264 if (const ConstantExpr *CE = dyn_cast<ConstantExpr>(V2))
265 if (CE->isCast() && isa<PointerType>(CE->getOperand(0)->getType()))
266 V2 = CE->getOperand(0);
268 // Are we checking for alias of the same value?
269 if (V1 == V2) return MustAlias;
271 if ((!isa<PointerType>(V1->getType()) || !isa<PointerType>(V2->getType())) &&
272 V1->getType() != Type::Int64Ty && V2->getType() != Type::Int64Ty)
273 return NoAlias; // Scalars cannot alias each other
275 // Strip off cast instructions...
276 if (const BitCastInst *I = dyn_cast<BitCastInst>(V1))
277 if (isa<PointerType>(I->getOperand(0)->getType()))
278 return alias(I->getOperand(0), V1Size, V2, V2Size);
279 if (const BitCastInst *I = dyn_cast<BitCastInst>(V2))
280 if (isa<PointerType>(I->getOperand(0)->getType()))
281 return alias(V1, V1Size, I->getOperand(0), V2Size);
283 // Figure out what objects these things are pointing to if we can...
284 const Value *O1 = getUnderlyingObject(V1);
285 const Value *O2 = getUnderlyingObject(V2);
287 // Pointing at a discernible object?
290 if (isa<Argument>(O1)) {
291 // Incoming argument cannot alias locally allocated object!
292 if (isa<AllocationInst>(O2)) return NoAlias;
293 // Otherwise, nothing is known...
294 } else if (isa<Argument>(O2)) {
295 // Incoming argument cannot alias locally allocated object!
296 if (isa<AllocationInst>(O1)) return NoAlias;
297 // Otherwise, nothing is known...
298 } else if (O1 != O2) {
299 // If they are two different objects, we know that we have no alias...
303 // If they are the same object, they we can look at the indexes. If they
304 // index off of the object is the same for both pointers, they must alias.
305 // If they are provably different, they must not alias. Otherwise, we
306 // can't tell anything.
310 if (!isa<Argument>(O1) && isa<ConstantPointerNull>(V2))
311 return NoAlias; // Unique values don't alias null
313 if (isa<GlobalVariable>(O1) ||
314 (isa<AllocationInst>(O1) &&
315 !cast<AllocationInst>(O1)->isArrayAllocation()))
316 if (cast<PointerType>(O1->getType())->getElementType()->isSized()) {
317 // If the size of the other access is larger than the total size of the
318 // global/alloca/malloc, it cannot be accessing the global (it's
319 // undefined to load or store bytes before or after an object).
320 const Type *ElTy = cast<PointerType>(O1->getType())->getElementType();
321 unsigned GlobalSize = getTargetData().getTypeSize(ElTy);
322 if (GlobalSize < V2Size && V2Size != ~0U)
328 if (!isa<Argument>(O2) && isa<ConstantPointerNull>(V1))
329 return NoAlias; // Unique values don't alias null
331 if (isa<GlobalVariable>(O2) ||
332 (isa<AllocationInst>(O2) &&
333 !cast<AllocationInst>(O2)->isArrayAllocation()))
334 if (cast<PointerType>(O2->getType())->getElementType()->isSized()) {
335 // If the size of the other access is larger than the total size of the
336 // global/alloca/malloc, it cannot be accessing the object (it's
337 // undefined to load or store bytes before or after an object).
338 const Type *ElTy = cast<PointerType>(O2->getType())->getElementType();
339 unsigned GlobalSize = getTargetData().getTypeSize(ElTy);
340 if (GlobalSize < V1Size && V1Size != ~0U)
345 // If we have two gep instructions with must-alias'ing base pointers, figure
346 // out if the indexes to the GEP tell us anything about the derived pointer.
347 // Note that we also handle chains of getelementptr instructions as well as
348 // constant expression getelementptrs here.
350 if (isGEP(V1) && isGEP(V2)) {
351 // Drill down into the first non-gep value, to test for must-aliasing of
352 // the base pointers.
353 const Value *BasePtr1 = V1, *BasePtr2 = V2;
355 BasePtr1 = cast<User>(BasePtr1)->getOperand(0);
356 } while (isGEP(BasePtr1) &&
357 cast<User>(BasePtr1)->getOperand(1) ==
358 Constant::getNullValue(cast<User>(BasePtr1)->getOperand(1)->getType()));
360 BasePtr2 = cast<User>(BasePtr2)->getOperand(0);
361 } while (isGEP(BasePtr2) &&
362 cast<User>(BasePtr2)->getOperand(1) ==
363 Constant::getNullValue(cast<User>(BasePtr2)->getOperand(1)->getType()));
365 // Do the base pointers alias?
366 AliasResult BaseAlias = alias(BasePtr1, V1Size, BasePtr2, V2Size);
367 if (BaseAlias == NoAlias) return NoAlias;
368 if (BaseAlias == MustAlias) {
369 // If the base pointers alias each other exactly, check to see if we can
370 // figure out anything about the resultant pointers, to try to prove
373 // Collect all of the chained GEP operands together into one simple place
374 std::vector<Value*> GEP1Ops, GEP2Ops;
375 BasePtr1 = GetGEPOperands(V1, GEP1Ops);
376 BasePtr2 = GetGEPOperands(V2, GEP2Ops);
378 // If GetGEPOperands were able to fold to the same must-aliased pointer,
379 // do the comparison.
380 if (BasePtr1 == BasePtr2) {
382 CheckGEPInstructions(BasePtr1->getType(), GEP1Ops, V1Size,
383 BasePtr2->getType(), GEP2Ops, V2Size);
384 if (GAlias != MayAlias)
390 // Check to see if these two pointers are related by a getelementptr
391 // instruction. If one pointer is a GEP with a non-zero index of the other
392 // pointer, we know they cannot alias.
396 std::swap(V1Size, V2Size);
399 if (V1Size != ~0U && V2Size != ~0U)
401 std::vector<Value*> GEPOperands;
402 const Value *BasePtr = GetGEPOperands(V1, GEPOperands);
404 AliasResult R = alias(BasePtr, V1Size, V2, V2Size);
405 if (R == MustAlias) {
406 // If there is at least one non-zero constant index, we know they cannot
408 bool ConstantFound = false;
409 bool AllZerosFound = true;
410 for (unsigned i = 0, e = GEPOperands.size(); i != e; ++i)
411 if (const Constant *C = dyn_cast<Constant>(GEPOperands[i])) {
412 if (!C->isNullValue()) {
413 ConstantFound = true;
414 AllZerosFound = false;
418 AllZerosFound = false;
421 // If we have getelementptr <ptr>, 0, 0, 0, 0, ... and V2 must aliases
422 // the ptr, the end result is a must alias also.
427 if (V2Size <= 1 && V1Size <= 1) // Just pointer check?
430 // Otherwise we have to check to see that the distance is more than
431 // the size of the argument... build an index vector that is equal to
432 // the arguments provided, except substitute 0's for any variable
433 // indexes we find...
434 if (cast<PointerType>(
435 BasePtr->getType())->getElementType()->isSized()) {
436 for (unsigned i = 0; i != GEPOperands.size(); ++i)
437 if (!isa<ConstantInt>(GEPOperands[i]) ||
438 GEPOperands[i]->getType() == Type::BoolTy)
440 Constant::getNullValue(GEPOperands[i]->getType());
442 getTargetData().getIndexedOffset(BasePtr->getType(), GEPOperands);
444 if (Offset >= (int64_t)V2Size || Offset <= -(int64_t)V1Size)
454 // This function is used to determin if the indices of two GEP instructions are
455 // equal. V1 and V2 are the indices.
456 static bool IndexOperandsEqual(Value *V1, Value *V2) {
457 if (V1->getType() == V2->getType())
459 if (Constant *C1 = dyn_cast<Constant>(V1))
460 if (Constant *C2 = dyn_cast<Constant>(V2)) {
461 // Sign extend the constants to long types, if necessary
462 if (C1->getType() != Type::Int64Ty)
463 C1 = ConstantExpr::getSExt(C1, Type::Int64Ty);
464 if (C2->getType() != Type::Int64Ty)
465 C2 = ConstantExpr::getSExt(C2, Type::Int64Ty);
471 /// CheckGEPInstructions - Check two GEP instructions with known must-aliasing
472 /// base pointers. This checks to see if the index expressions preclude the
473 /// pointers from aliasing...
474 AliasAnalysis::AliasResult
475 BasicAliasAnalysis::CheckGEPInstructions(
476 const Type* BasePtr1Ty, std::vector<Value*> &GEP1Ops, unsigned G1S,
477 const Type *BasePtr2Ty, std::vector<Value*> &GEP2Ops, unsigned G2S) {
478 // We currently can't handle the case when the base pointers have different
479 // primitive types. Since this is uncommon anyway, we are happy being
480 // extremely conservative.
481 if (BasePtr1Ty != BasePtr2Ty)
484 const PointerType *GEPPointerTy = cast<PointerType>(BasePtr1Ty);
486 // Find the (possibly empty) initial sequence of equal values... which are not
487 // necessarily constants.
488 unsigned NumGEP1Operands = GEP1Ops.size(), NumGEP2Operands = GEP2Ops.size();
489 unsigned MinOperands = std::min(NumGEP1Operands, NumGEP2Operands);
490 unsigned MaxOperands = std::max(NumGEP1Operands, NumGEP2Operands);
491 unsigned UnequalOper = 0;
492 while (UnequalOper != MinOperands &&
493 IndexOperandsEqual(GEP1Ops[UnequalOper], GEP2Ops[UnequalOper])) {
494 // Advance through the type as we go...
496 if (const CompositeType *CT = dyn_cast<CompositeType>(BasePtr1Ty))
497 BasePtr1Ty = CT->getTypeAtIndex(GEP1Ops[UnequalOper-1]);
499 // If all operands equal each other, then the derived pointers must
500 // alias each other...
502 assert(UnequalOper == NumGEP1Operands && UnequalOper == NumGEP2Operands &&
503 "Ran out of type nesting, but not out of operands?");
508 // If we have seen all constant operands, and run out of indexes on one of the
509 // getelementptrs, check to see if the tail of the leftover one is all zeros.
510 // If so, return mustalias.
511 if (UnequalOper == MinOperands) {
512 if (GEP1Ops.size() < GEP2Ops.size()) std::swap(GEP1Ops, GEP2Ops);
514 bool AllAreZeros = true;
515 for (unsigned i = UnequalOper; i != MaxOperands; ++i)
516 if (!isa<Constant>(GEP1Ops[i]) ||
517 !cast<Constant>(GEP1Ops[i])->isNullValue()) {
521 if (AllAreZeros) return MustAlias;
525 // So now we know that the indexes derived from the base pointers,
526 // which are known to alias, are different. We can still determine a
527 // no-alias result if there are differing constant pairs in the index
528 // chain. For example:
529 // A[i][0] != A[j][1] iff (&A[0][1]-&A[0][0] >= std::max(G1S, G2S))
531 // We have to be careful here about array accesses. In particular, consider:
532 // A[1][0] vs A[0][i]
533 // In this case, we don't *know* that the array will be accessed in bounds:
534 // the index could even be negative. Because of this, we have to
535 // conservatively *give up* and return may alias. We disregard differing
536 // array subscripts that are followed by a variable index without going
539 unsigned SizeMax = std::max(G1S, G2S);
540 if (SizeMax == ~0U) return MayAlias; // Avoid frivolous work.
542 // Scan for the first operand that is constant and unequal in the
543 // two getelementptrs...
544 unsigned FirstConstantOper = UnequalOper;
545 for (; FirstConstantOper != MinOperands; ++FirstConstantOper) {
546 const Value *G1Oper = GEP1Ops[FirstConstantOper];
547 const Value *G2Oper = GEP2Ops[FirstConstantOper];
549 if (G1Oper != G2Oper) // Found non-equal constant indexes...
550 if (Constant *G1OC = dyn_cast<ConstantInt>(const_cast<Value*>(G1Oper)))
551 if (Constant *G2OC = dyn_cast<ConstantInt>(const_cast<Value*>(G2Oper))){
552 if (G1OC->getType() != G2OC->getType()) {
553 // Sign extend both operands to long.
554 if (G1OC->getType() != Type::Int64Ty)
555 G1OC = ConstantExpr::getSExt(G1OC, Type::Int64Ty);
556 if (G2OC->getType() != Type::Int64Ty)
557 G2OC = ConstantExpr::getSExt(G2OC, Type::Int64Ty);
558 GEP1Ops[FirstConstantOper] = G1OC;
559 GEP2Ops[FirstConstantOper] = G2OC;
563 // Handle the "be careful" case above: if this is an array/packed
564 // subscript, scan for a subsequent variable array index.
565 if (isa<SequentialType>(BasePtr1Ty)) {
567 cast<SequentialType>(BasePtr1Ty)->getElementType();
568 bool isBadCase = false;
570 for (unsigned Idx = FirstConstantOper+1;
571 Idx != MinOperands && isa<SequentialType>(NextTy); ++Idx) {
572 const Value *V1 = GEP1Ops[Idx], *V2 = GEP2Ops[Idx];
573 if (!isa<Constant>(V1) || !isa<Constant>(V2)) {
577 NextTy = cast<SequentialType>(NextTy)->getElementType();
580 if (isBadCase) G1OC = 0;
583 // Make sure they are comparable (ie, not constant expressions), and
584 // make sure the GEP with the smaller leading constant is GEP1.
586 Constant *Compare = ConstantExpr::getICmp(ICmpInst::ICMP_SGT,
588 if (ConstantInt *CV = dyn_cast<ConstantInt>(Compare)) {
589 if (CV->getBoolValue()) // If they are comparable and G2 > G1
590 std::swap(GEP1Ops, GEP2Ops); // Make GEP1 < GEP2
596 BasePtr1Ty = cast<CompositeType>(BasePtr1Ty)->getTypeAtIndex(G1Oper);
599 // No shared constant operands, and we ran out of common operands. At this
600 // point, the GEP instructions have run through all of their operands, and we
601 // haven't found evidence that there are any deltas between the GEP's.
602 // However, one GEP may have more operands than the other. If this is the
603 // case, there may still be hope. Check this now.
604 if (FirstConstantOper == MinOperands) {
605 // Make GEP1Ops be the longer one if there is a longer one.
606 if (GEP1Ops.size() < GEP2Ops.size())
607 std::swap(GEP1Ops, GEP2Ops);
609 // Is there anything to check?
610 if (GEP1Ops.size() > MinOperands) {
611 for (unsigned i = FirstConstantOper; i != MaxOperands; ++i)
612 if (isa<ConstantInt>(GEP1Ops[i]) &&
613 GEP1Ops[i]->getType() != Type::BoolTy &&
614 !cast<Constant>(GEP1Ops[i])->isNullValue()) {
615 // Yup, there's a constant in the tail. Set all variables to
616 // constants in the GEP instruction to make it suiteable for
617 // TargetData::getIndexedOffset.
618 for (i = 0; i != MaxOperands; ++i)
619 if (!isa<ConstantInt>(GEP1Ops[i]) ||
620 GEP1Ops[i]->getType() == Type::BoolTy)
621 GEP1Ops[i] = Constant::getNullValue(GEP1Ops[i]->getType());
622 // Okay, now get the offset. This is the relative offset for the full
624 const TargetData &TD = getTargetData();
625 int64_t Offset1 = TD.getIndexedOffset(GEPPointerTy, GEP1Ops);
627 // Now crop off any constants from the end...
628 GEP1Ops.resize(MinOperands);
629 int64_t Offset2 = TD.getIndexedOffset(GEPPointerTy, GEP1Ops);
631 // If the tail provided a bit enough offset, return noalias!
632 if ((uint64_t)(Offset2-Offset1) >= SizeMax)
637 // Couldn't find anything useful.
641 // If there are non-equal constants arguments, then we can figure
642 // out a minimum known delta between the two index expressions... at
643 // this point we know that the first constant index of GEP1 is less
644 // than the first constant index of GEP2.
646 // Advance BasePtr[12]Ty over this first differing constant operand.
647 BasePtr2Ty = cast<CompositeType>(BasePtr1Ty)->
648 getTypeAtIndex(GEP2Ops[FirstConstantOper]);
649 BasePtr1Ty = cast<CompositeType>(BasePtr1Ty)->
650 getTypeAtIndex(GEP1Ops[FirstConstantOper]);
652 // We are going to be using TargetData::getIndexedOffset to determine the
653 // offset that each of the GEP's is reaching. To do this, we have to convert
654 // all variable references to constant references. To do this, we convert the
655 // initial sequence of array subscripts into constant zeros to start with.
656 const Type *ZeroIdxTy = GEPPointerTy;
657 for (unsigned i = 0; i != FirstConstantOper; ++i) {
658 if (!isa<StructType>(ZeroIdxTy))
659 GEP1Ops[i] = GEP2Ops[i] = Constant::getNullValue(Type::Int32Ty);
661 if (const CompositeType *CT = dyn_cast<CompositeType>(ZeroIdxTy))
662 ZeroIdxTy = CT->getTypeAtIndex(GEP1Ops[i]);
665 // We know that GEP1Ops[FirstConstantOper] & GEP2Ops[FirstConstantOper] are ok
667 // Loop over the rest of the operands...
668 for (unsigned i = FirstConstantOper+1; i != MaxOperands; ++i) {
669 const Value *Op1 = i < GEP1Ops.size() ? GEP1Ops[i] : 0;
670 const Value *Op2 = i < GEP2Ops.size() ? GEP2Ops[i] : 0;
671 // If they are equal, use a zero index...
672 if (Op1 == Op2 && BasePtr1Ty == BasePtr2Ty) {
673 if (!isa<ConstantInt>(Op1) || Op1->getType() == Type::BoolTy)
674 GEP1Ops[i] = GEP2Ops[i] = Constant::getNullValue(Op1->getType());
675 // Otherwise, just keep the constants we have.
678 if (const ConstantInt *Op1C = dyn_cast<ConstantInt>(Op1)) {
679 // If this is an array index, make sure the array element is in range.
680 if (const ArrayType *AT = dyn_cast<ArrayType>(BasePtr1Ty)) {
681 if (Op1C->getZExtValue() >= AT->getNumElements())
682 return MayAlias; // Be conservative with out-of-range accesses
683 } else if (const PackedType *PT = dyn_cast<PackedType>(BasePtr1Ty)) {
684 if (Op1C->getZExtValue() >= PT->getNumElements())
685 return MayAlias; // Be conservative with out-of-range accesses
689 // GEP1 is known to produce a value less than GEP2. To be
690 // conservatively correct, we must assume the largest possible
691 // constant is used in this position. This cannot be the initial
692 // index to the GEP instructions (because we know we have at least one
693 // element before this one with the different constant arguments), so
694 // we know that the current index must be into either a struct or
695 // array. Because we know it's not constant, this cannot be a
696 // structure index. Because of this, we can calculate the maximum
699 if (const ArrayType *AT = dyn_cast<ArrayType>(BasePtr1Ty))
700 GEP1Ops[i] = ConstantInt::get(Type::Int64Ty, AT->getNumElements()-1);
701 else if (const PackedType *PT = dyn_cast<PackedType>(BasePtr1Ty))
702 GEP1Ops[i] = ConstantInt::get(Type::Int64Ty, PT->getNumElements()-1);
708 if (const ConstantInt *Op2C = dyn_cast<ConstantInt>(Op2)) {
709 // If this is an array index, make sure the array element is in range.
710 if (const ArrayType *AT = dyn_cast<ArrayType>(BasePtr1Ty)) {
711 if (Op2C->getZExtValue() >= AT->getNumElements())
712 return MayAlias; // Be conservative with out-of-range accesses
713 } else if (const PackedType *PT = dyn_cast<PackedType>(BasePtr1Ty)) {
714 if (Op2C->getZExtValue() >= PT->getNumElements())
715 return MayAlias; // Be conservative with out-of-range accesses
717 } else { // Conservatively assume the minimum value for this index
718 GEP2Ops[i] = Constant::getNullValue(Op2->getType());
723 if (BasePtr1Ty && Op1) {
724 if (const CompositeType *CT = dyn_cast<CompositeType>(BasePtr1Ty))
725 BasePtr1Ty = CT->getTypeAtIndex(GEP1Ops[i]);
730 if (BasePtr2Ty && Op2) {
731 if (const CompositeType *CT = dyn_cast<CompositeType>(BasePtr2Ty))
732 BasePtr2Ty = CT->getTypeAtIndex(GEP2Ops[i]);
738 if (GEPPointerTy->getElementType()->isSized()) {
739 int64_t Offset1 = getTargetData().getIndexedOffset(GEPPointerTy, GEP1Ops);
740 int64_t Offset2 = getTargetData().getIndexedOffset(GEPPointerTy, GEP2Ops);
741 assert(Offset1<Offset2 && "There is at least one different constant here!");
743 if ((uint64_t)(Offset2-Offset1) >= SizeMax) {
744 //cerr << "Determined that these two GEP's don't alias ["
745 // << SizeMax << " bytes]: \n" << *GEP1 << *GEP2;
753 struct StringCompare {
754 bool operator()(const char *LHS, const char *RHS) {
755 return strcmp(LHS, RHS) < 0;
760 // Note that this list cannot contain libm functions (such as acos and sqrt)
761 // that set errno on a domain or other error.
762 static const char *DoesntAccessMemoryFns[] = {
763 "abs", "labs", "llabs", "imaxabs", "fabs", "fabsf", "fabsl",
764 "trunc", "truncf", "truncl", "ldexp",
766 "atan", "atanf", "atanl", "atan2", "atan2f", "atan2l",
768 "cos", "cosf", "cosl",
769 "exp", "expf", "expl",
771 "sin", "sinf", "sinl",
772 "tan", "tanf", "tanl", "tanh", "tanhf", "tanhl",
774 "floor", "floorf", "floorl", "ceil", "ceilf", "ceill",
777 "isalnum", "isalpha", "iscntrl", "isdigit", "isgraph", "islower", "isprint"
778 "ispunct", "isspace", "isupper", "isxdigit", "tolower", "toupper",
781 "iswalnum", "iswalpha", "iswcntrl", "iswdigit", "iswgraph", "iswlower",
782 "iswprint", "iswpunct", "iswspace", "iswupper", "iswxdigit",
784 "iswctype", "towctrans", "towlower", "towupper",
788 "isinf", "isnan", "finite",
790 // C99 math functions
791 "copysign", "copysignf", "copysignd",
792 "nexttoward", "nexttowardf", "nexttowardd",
793 "nextafter", "nextafterf", "nextafterd",
796 "__signbit", "__signbitf", "__signbitl",
800 static const char *OnlyReadsMemoryFns[] = {
801 "atoi", "atol", "atof", "atoll", "atoq", "a64l",
802 "bcmp", "memcmp", "memchr", "memrchr", "wmemcmp", "wmemchr",
805 "strcmp", "strcasecmp", "strcoll", "strncmp", "strncasecmp",
806 "strchr", "strcspn", "strlen", "strpbrk", "strrchr", "strspn", "strstr",
810 "wcschr", "wcscmp", "wcscoll", "wcscspn", "wcslen", "wcsncmp", "wcspbrk",
811 "wcsrchr", "wcsspn", "wcsstr",
814 "alphasort", "alphasort64", "versionsort", "versionsort64",
817 "nan", "nanf", "nand",
820 "feof", "ferror", "fileno",
821 "feof_unlocked", "ferror_unlocked", "fileno_unlocked"
824 static ManagedStatic<std::vector<const char*> > NoMemoryTable;
825 static ManagedStatic<std::vector<const char*> > OnlyReadsMemoryTable;
828 AliasAnalysis::ModRefBehavior
829 BasicAliasAnalysis::getModRefBehavior(Function *F, CallSite CS,
830 std::vector<PointerAccessInfo> *Info) {
831 if (!F->isExternal()) return UnknownModRefBehavior;
833 static bool Initialized = false;
835 NoMemoryTable->insert(NoMemoryTable->end(),
836 DoesntAccessMemoryFns,
837 DoesntAccessMemoryFns+
838 sizeof(DoesntAccessMemoryFns)/sizeof(DoesntAccessMemoryFns[0]));
840 OnlyReadsMemoryTable->insert(OnlyReadsMemoryTable->end(),
843 sizeof(OnlyReadsMemoryFns)/sizeof(OnlyReadsMemoryFns[0]));
844 #define GET_MODREF_BEHAVIOR
845 #include "llvm/Intrinsics.gen"
846 #undef GET_MODREF_BEHAVIOR
848 // Sort the table the first time through.
849 std::sort(NoMemoryTable->begin(), NoMemoryTable->end(), StringCompare());
850 std::sort(OnlyReadsMemoryTable->begin(), OnlyReadsMemoryTable->end(),
855 std::vector<const char*>::iterator Ptr =
856 std::lower_bound(NoMemoryTable->begin(), NoMemoryTable->end(),
857 F->getName().c_str(), StringCompare());
858 if (Ptr != NoMemoryTable->end() && *Ptr == F->getName())
859 return DoesNotAccessMemory;
861 Ptr = std::lower_bound(OnlyReadsMemoryTable->begin(),
862 OnlyReadsMemoryTable->end(),
863 F->getName().c_str(), StringCompare());
864 if (Ptr != OnlyReadsMemoryTable->end() && *Ptr == F->getName())
865 return OnlyReadsMemory;
867 return UnknownModRefBehavior;
870 // Make sure that anything that uses AliasAnalysis pulls in this file...
871 DEFINING_FILE_FOR(BasicAliasAnalysis)