1 //===- ObjCARC.cpp - ObjC ARC Optimization --------------------------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines ObjC ARC optimizations. ARC stands for
11 // Automatic Reference Counting and is a system for managing reference counts
12 // for objects in Objective C.
14 // The optimizations performed include elimination of redundant, partially
15 // redundant, and inconsequential reference count operations, elimination of
16 // redundant weak pointer operations, pattern-matching and replacement of
17 // low-level operations into higher-level operations, and numerous minor
20 // This file also defines a simple ARC-aware AliasAnalysis.
22 // WARNING: This file knows about certain library functions. It recognizes them
23 // by name, and hardwires knowledge of their semantics.
25 // WARNING: This file knows about how certain Objective-C library functions are
26 // used. Naive LLVM IR transformations which would otherwise be
27 // behavior-preserving may break these assumptions.
29 //===----------------------------------------------------------------------===//
31 #define DEBUG_TYPE "objc-arc"
32 #include "llvm/Support/raw_ostream.h"
33 #include "llvm/Support/CommandLine.h"
34 #include "llvm/ADT/DenseMap.h"
37 // A handy option to enable/disable all optimizations in this file.
38 static cl::opt<bool> EnableARCOpts("enable-objc-arc-opts", cl::init(true));
40 //===----------------------------------------------------------------------===//
42 //===----------------------------------------------------------------------===//
45 /// MapVector - An associative container with fast insertion-order
46 /// (deterministic) iteration over its elements. Plus the special
48 template<class KeyT, class ValueT>
50 /// Map - Map keys to indices in Vector.
51 typedef DenseMap<KeyT, size_t> MapTy;
54 /// Vector - Keys and values.
55 typedef std::vector<std::pair<KeyT, ValueT> > VectorTy;
59 typedef typename VectorTy::iterator iterator;
60 typedef typename VectorTy::const_iterator const_iterator;
61 iterator begin() { return Vector.begin(); }
62 iterator end() { return Vector.end(); }
63 const_iterator begin() const { return Vector.begin(); }
64 const_iterator end() const { return Vector.end(); }
68 assert(Vector.size() >= Map.size()); // May differ due to blotting.
69 for (typename MapTy::const_iterator I = Map.begin(), E = Map.end();
71 assert(I->second < Vector.size());
72 assert(Vector[I->second].first == I->first);
74 for (typename VectorTy::const_iterator I = Vector.begin(),
75 E = Vector.end(); I != E; ++I)
77 (Map.count(I->first) &&
78 Map[I->first] == size_t(I - Vector.begin())));
82 ValueT &operator[](const KeyT &Arg) {
83 std::pair<typename MapTy::iterator, bool> Pair =
84 Map.insert(std::make_pair(Arg, size_t(0)));
86 size_t Num = Vector.size();
87 Pair.first->second = Num;
88 Vector.push_back(std::make_pair(Arg, ValueT()));
89 return Vector[Num].second;
91 return Vector[Pair.first->second].second;
94 std::pair<iterator, bool>
95 insert(const std::pair<KeyT, ValueT> &InsertPair) {
96 std::pair<typename MapTy::iterator, bool> Pair =
97 Map.insert(std::make_pair(InsertPair.first, size_t(0)));
99 size_t Num = Vector.size();
100 Pair.first->second = Num;
101 Vector.push_back(InsertPair);
102 return std::make_pair(Vector.begin() + Num, true);
104 return std::make_pair(Vector.begin() + Pair.first->second, false);
107 const_iterator find(const KeyT &Key) const {
108 typename MapTy::const_iterator It = Map.find(Key);
109 if (It == Map.end()) return Vector.end();
110 return Vector.begin() + It->second;
113 /// blot - This is similar to erase, but instead of removing the element
114 /// from the vector, it just zeros out the key in the vector. This leaves
115 /// iterators intact, but clients must be prepared for zeroed-out keys when
117 void blot(const KeyT &Key) {
118 typename MapTy::iterator It = Map.find(Key);
119 if (It == Map.end()) return;
120 Vector[It->second].first = KeyT();
131 //===----------------------------------------------------------------------===//
133 //===----------------------------------------------------------------------===//
135 #include "llvm/Intrinsics.h"
136 #include "llvm/Module.h"
137 #include "llvm/Analysis/ValueTracking.h"
138 #include "llvm/Transforms/Utils/Local.h"
139 #include "llvm/Support/CallSite.h"
140 #include "llvm/ADT/StringSwitch.h"
143 /// InstructionClass - A simple classification for instructions.
144 enum InstructionClass {
145 IC_Retain, ///< objc_retain
146 IC_RetainRV, ///< objc_retainAutoreleasedReturnValue
147 IC_RetainBlock, ///< objc_retainBlock
148 IC_Release, ///< objc_release
149 IC_Autorelease, ///< objc_autorelease
150 IC_AutoreleaseRV, ///< objc_autoreleaseReturnValue
151 IC_AutoreleasepoolPush, ///< objc_autoreleasePoolPush
152 IC_AutoreleasepoolPop, ///< objc_autoreleasePoolPop
153 IC_NoopCast, ///< objc_retainedObject, etc.
154 IC_FusedRetainAutorelease, ///< objc_retainAutorelease
155 IC_FusedRetainAutoreleaseRV, ///< objc_retainAutoreleaseReturnValue
156 IC_LoadWeakRetained, ///< objc_loadWeakRetained (primitive)
157 IC_StoreWeak, ///< objc_storeWeak (primitive)
158 IC_InitWeak, ///< objc_initWeak (derived)
159 IC_LoadWeak, ///< objc_loadWeak (derived)
160 IC_MoveWeak, ///< objc_moveWeak (derived)
161 IC_CopyWeak, ///< objc_copyWeak (derived)
162 IC_DestroyWeak, ///< objc_destroyWeak (derived)
163 IC_StoreStrong, ///< objc_storeStrong (derived)
164 IC_CallOrUser, ///< could call objc_release and/or "use" pointers
165 IC_Call, ///< could call objc_release
166 IC_User, ///< could "use" a pointer
167 IC_None ///< anything else
171 /// IsPotentialUse - Test whether the given value is possible a
172 /// reference-counted pointer.
173 static bool IsPotentialUse(const Value *Op) {
174 // Pointers to static or stack storage are not reference-counted pointers.
175 if (isa<Constant>(Op) || isa<AllocaInst>(Op))
177 // Special arguments are not reference-counted.
178 if (const Argument *Arg = dyn_cast<Argument>(Op))
179 if (Arg->hasByValAttr() ||
180 Arg->hasNestAttr() ||
181 Arg->hasStructRetAttr())
183 // Only consider values with pointer types.
184 // It seemes intuitive to exclude function pointer types as well, since
185 // functions are never reference-counted, however clang occasionally
186 // bitcasts reference-counted pointers to function-pointer type
188 PointerType *Ty = dyn_cast<PointerType>(Op->getType());
191 // Conservatively assume anything else is a potential use.
195 /// GetCallSiteClass - Helper for GetInstructionClass. Determines what kind
196 /// of construct CS is.
197 static InstructionClass GetCallSiteClass(ImmutableCallSite CS) {
198 for (ImmutableCallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
200 if (IsPotentialUse(*I))
201 return CS.onlyReadsMemory() ? IC_User : IC_CallOrUser;
203 return CS.onlyReadsMemory() ? IC_None : IC_Call;
206 /// GetFunctionClass - Determine if F is one of the special known Functions.
207 /// If it isn't, return IC_CallOrUser.
208 static InstructionClass GetFunctionClass(const Function *F) {
209 Function::const_arg_iterator AI = F->arg_begin(), AE = F->arg_end();
213 return StringSwitch<InstructionClass>(F->getName())
214 .Case("objc_autoreleasePoolPush", IC_AutoreleasepoolPush)
215 .Default(IC_CallOrUser);
218 const Argument *A0 = AI++;
220 // Argument is a pointer.
221 if (PointerType *PTy = dyn_cast<PointerType>(A0->getType())) {
222 Type *ETy = PTy->getElementType();
224 if (ETy->isIntegerTy(8))
225 return StringSwitch<InstructionClass>(F->getName())
226 .Case("objc_retain", IC_Retain)
227 .Case("objc_retainAutoreleasedReturnValue", IC_RetainRV)
228 .Case("objc_retainBlock", IC_RetainBlock)
229 .Case("objc_release", IC_Release)
230 .Case("objc_autorelease", IC_Autorelease)
231 .Case("objc_autoreleaseReturnValue", IC_AutoreleaseRV)
232 .Case("objc_autoreleasePoolPop", IC_AutoreleasepoolPop)
233 .Case("objc_retainedObject", IC_NoopCast)
234 .Case("objc_unretainedObject", IC_NoopCast)
235 .Case("objc_unretainedPointer", IC_NoopCast)
236 .Case("objc_retain_autorelease", IC_FusedRetainAutorelease)
237 .Case("objc_retainAutorelease", IC_FusedRetainAutorelease)
238 .Case("objc_retainAutoreleaseReturnValue",IC_FusedRetainAutoreleaseRV)
239 .Default(IC_CallOrUser);
242 if (PointerType *Pte = dyn_cast<PointerType>(ETy))
243 if (Pte->getElementType()->isIntegerTy(8))
244 return StringSwitch<InstructionClass>(F->getName())
245 .Case("objc_loadWeakRetained", IC_LoadWeakRetained)
246 .Case("objc_loadWeak", IC_LoadWeak)
247 .Case("objc_destroyWeak", IC_DestroyWeak)
248 .Default(IC_CallOrUser);
251 // Two arguments, first is i8**.
252 const Argument *A1 = AI++;
254 if (PointerType *PTy = dyn_cast<PointerType>(A0->getType()))
255 if (PointerType *Pte = dyn_cast<PointerType>(PTy->getElementType()))
256 if (Pte->getElementType()->isIntegerTy(8))
257 if (PointerType *PTy1 = dyn_cast<PointerType>(A1->getType())) {
258 Type *ETy1 = PTy1->getElementType();
259 // Second argument is i8*
260 if (ETy1->isIntegerTy(8))
261 return StringSwitch<InstructionClass>(F->getName())
262 .Case("objc_storeWeak", IC_StoreWeak)
263 .Case("objc_initWeak", IC_InitWeak)
264 .Case("objc_storeStrong", IC_StoreStrong)
265 .Default(IC_CallOrUser);
266 // Second argument is i8**.
267 if (PointerType *Pte1 = dyn_cast<PointerType>(ETy1))
268 if (Pte1->getElementType()->isIntegerTy(8))
269 return StringSwitch<InstructionClass>(F->getName())
270 .Case("objc_moveWeak", IC_MoveWeak)
271 .Case("objc_copyWeak", IC_CopyWeak)
272 .Default(IC_CallOrUser);
276 return IC_CallOrUser;
279 /// GetInstructionClass - Determine what kind of construct V is.
280 static InstructionClass GetInstructionClass(const Value *V) {
281 if (const Instruction *I = dyn_cast<Instruction>(V)) {
282 // Any instruction other than bitcast and gep with a pointer operand have a
283 // use of an objc pointer. Bitcasts, GEPs, Selects, PHIs transfer a pointer
284 // to a subsequent use, rather than using it themselves, in this sense.
285 // As a short cut, several other opcodes are known to have no pointer
286 // operands of interest. And ret is never followed by a release, so it's
287 // not interesting to examine.
288 switch (I->getOpcode()) {
289 case Instruction::Call: {
290 const CallInst *CI = cast<CallInst>(I);
291 // Check for calls to special functions.
292 if (const Function *F = CI->getCalledFunction()) {
293 InstructionClass Class = GetFunctionClass(F);
294 if (Class != IC_CallOrUser)
297 // None of the intrinsic functions do objc_release. For intrinsics, the
298 // only question is whether or not they may be users.
299 switch (F->getIntrinsicID()) {
300 case Intrinsic::returnaddress: case Intrinsic::frameaddress:
301 case Intrinsic::stacksave: case Intrinsic::stackrestore:
302 case Intrinsic::vastart: case Intrinsic::vacopy: case Intrinsic::vaend:
303 case Intrinsic::objectsize: case Intrinsic::prefetch:
304 case Intrinsic::stackprotector:
305 case Intrinsic::eh_return_i32: case Intrinsic::eh_return_i64:
306 case Intrinsic::eh_typeid_for: case Intrinsic::eh_dwarf_cfa:
307 case Intrinsic::eh_sjlj_lsda: case Intrinsic::eh_sjlj_functioncontext:
308 case Intrinsic::init_trampoline: case Intrinsic::adjust_trampoline:
309 case Intrinsic::lifetime_start: case Intrinsic::lifetime_end:
310 case Intrinsic::invariant_start: case Intrinsic::invariant_end:
311 // Don't let dbg info affect our results.
312 case Intrinsic::dbg_declare: case Intrinsic::dbg_value:
313 // Short cut: Some intrinsics obviously don't use ObjC pointers.
319 return GetCallSiteClass(CI);
321 case Instruction::Invoke:
322 return GetCallSiteClass(cast<InvokeInst>(I));
323 case Instruction::BitCast:
324 case Instruction::GetElementPtr:
325 case Instruction::Select: case Instruction::PHI:
326 case Instruction::Ret: case Instruction::Br:
327 case Instruction::Switch: case Instruction::IndirectBr:
328 case Instruction::Alloca: case Instruction::VAArg:
329 case Instruction::Add: case Instruction::FAdd:
330 case Instruction::Sub: case Instruction::FSub:
331 case Instruction::Mul: case Instruction::FMul:
332 case Instruction::SDiv: case Instruction::UDiv: case Instruction::FDiv:
333 case Instruction::SRem: case Instruction::URem: case Instruction::FRem:
334 case Instruction::Shl: case Instruction::LShr: case Instruction::AShr:
335 case Instruction::And: case Instruction::Or: case Instruction::Xor:
336 case Instruction::SExt: case Instruction::ZExt: case Instruction::Trunc:
337 case Instruction::IntToPtr: case Instruction::FCmp:
338 case Instruction::FPTrunc: case Instruction::FPExt:
339 case Instruction::FPToUI: case Instruction::FPToSI:
340 case Instruction::UIToFP: case Instruction::SIToFP:
341 case Instruction::InsertElement: case Instruction::ExtractElement:
342 case Instruction::ShuffleVector:
343 case Instruction::ExtractValue:
345 case Instruction::ICmp:
346 // Comparing a pointer with null, or any other constant, isn't an
347 // interesting use, because we don't care what the pointer points to, or
348 // about the values of any other dynamic reference-counted pointers.
349 if (IsPotentialUse(I->getOperand(1)))
353 // For anything else, check all the operands.
354 // Note that this includes both operands of a Store: while the first
355 // operand isn't actually being dereferenced, it is being stored to
356 // memory where we can no longer track who might read it and dereference
357 // it, so we have to consider it potentially used.
358 for (User::const_op_iterator OI = I->op_begin(), OE = I->op_end();
360 if (IsPotentialUse(*OI))
365 // Otherwise, it's totally inert for ARC purposes.
369 /// GetBasicInstructionClass - Determine what kind of construct V is. This is
370 /// similar to GetInstructionClass except that it only detects objc runtine
371 /// calls. This allows it to be faster.
372 static InstructionClass GetBasicInstructionClass(const Value *V) {
373 if (const CallInst *CI = dyn_cast<CallInst>(V)) {
374 if (const Function *F = CI->getCalledFunction())
375 return GetFunctionClass(F);
376 // Otherwise, be conservative.
377 return IC_CallOrUser;
380 // Otherwise, be conservative.
381 return isa<InvokeInst>(V) ? IC_CallOrUser : IC_User;
384 /// IsRetain - Test if the given class is objc_retain or
386 static bool IsRetain(InstructionClass Class) {
387 return Class == IC_Retain ||
388 Class == IC_RetainRV;
391 /// IsAutorelease - Test if the given class is objc_autorelease or
393 static bool IsAutorelease(InstructionClass Class) {
394 return Class == IC_Autorelease ||
395 Class == IC_AutoreleaseRV;
398 /// IsForwarding - Test if the given class represents instructions which return
399 /// their argument verbatim.
400 static bool IsForwarding(InstructionClass Class) {
401 // objc_retainBlock technically doesn't always return its argument
402 // verbatim, but it doesn't matter for our purposes here.
403 return Class == IC_Retain ||
404 Class == IC_RetainRV ||
405 Class == IC_Autorelease ||
406 Class == IC_AutoreleaseRV ||
407 Class == IC_RetainBlock ||
408 Class == IC_NoopCast;
411 /// IsNoopOnNull - Test if the given class represents instructions which do
412 /// nothing if passed a null pointer.
413 static bool IsNoopOnNull(InstructionClass Class) {
414 return Class == IC_Retain ||
415 Class == IC_RetainRV ||
416 Class == IC_Release ||
417 Class == IC_Autorelease ||
418 Class == IC_AutoreleaseRV ||
419 Class == IC_RetainBlock;
422 /// IsAlwaysTail - Test if the given class represents instructions which are
423 /// always safe to mark with the "tail" keyword.
424 static bool IsAlwaysTail(InstructionClass Class) {
425 // IC_RetainBlock may be given a stack argument.
426 return Class == IC_Retain ||
427 Class == IC_RetainRV ||
428 Class == IC_Autorelease ||
429 Class == IC_AutoreleaseRV;
432 /// IsNoThrow - Test if the given class represents instructions which are always
433 /// safe to mark with the nounwind attribute..
434 static bool IsNoThrow(InstructionClass Class) {
435 // objc_retainBlock is not nounwind because it calls user copy constructors
436 // which could theoretically throw.
437 return Class == IC_Retain ||
438 Class == IC_RetainRV ||
439 Class == IC_Release ||
440 Class == IC_Autorelease ||
441 Class == IC_AutoreleaseRV ||
442 Class == IC_AutoreleasepoolPush ||
443 Class == IC_AutoreleasepoolPop;
446 /// EraseInstruction - Erase the given instruction. Many ObjC calls return their
447 /// argument verbatim, so if it's such a call and the return value has users,
448 /// replace them with the argument value.
449 static void EraseInstruction(Instruction *CI) {
450 Value *OldArg = cast<CallInst>(CI)->getArgOperand(0);
452 bool Unused = CI->use_empty();
455 // Replace the return value with the argument.
456 assert(IsForwarding(GetBasicInstructionClass(CI)) &&
457 "Can't delete non-forwarding instruction with users!");
458 CI->replaceAllUsesWith(OldArg);
461 CI->eraseFromParent();
464 RecursivelyDeleteTriviallyDeadInstructions(OldArg);
467 /// GetUnderlyingObjCPtr - This is a wrapper around getUnderlyingObject which
468 /// also knows how to look through objc_retain and objc_autorelease calls, which
469 /// we know to return their argument verbatim.
470 static const Value *GetUnderlyingObjCPtr(const Value *V) {
472 V = GetUnderlyingObject(V);
473 if (!IsForwarding(GetBasicInstructionClass(V)))
475 V = cast<CallInst>(V)->getArgOperand(0);
481 /// StripPointerCastsAndObjCCalls - This is a wrapper around
482 /// Value::stripPointerCasts which also knows how to look through objc_retain
483 /// and objc_autorelease calls, which we know to return their argument verbatim.
484 static const Value *StripPointerCastsAndObjCCalls(const Value *V) {
486 V = V->stripPointerCasts();
487 if (!IsForwarding(GetBasicInstructionClass(V)))
489 V = cast<CallInst>(V)->getArgOperand(0);
494 /// StripPointerCastsAndObjCCalls - This is a wrapper around
495 /// Value::stripPointerCasts which also knows how to look through objc_retain
496 /// and objc_autorelease calls, which we know to return their argument verbatim.
497 static Value *StripPointerCastsAndObjCCalls(Value *V) {
499 V = V->stripPointerCasts();
500 if (!IsForwarding(GetBasicInstructionClass(V)))
502 V = cast<CallInst>(V)->getArgOperand(0);
507 /// GetObjCArg - Assuming the given instruction is one of the special calls such
508 /// as objc_retain or objc_release, return the argument value, stripped of no-op
509 /// casts and forwarding calls.
510 static Value *GetObjCArg(Value *Inst) {
511 return StripPointerCastsAndObjCCalls(cast<CallInst>(Inst)->getArgOperand(0));
514 /// IsObjCIdentifiedObject - This is similar to AliasAnalysis'
515 /// isObjCIdentifiedObject, except that it uses special knowledge of
516 /// ObjC conventions...
517 static bool IsObjCIdentifiedObject(const Value *V) {
518 // Assume that call results and arguments have their own "provenance".
519 // Constants (including GlobalVariables) and Allocas are never
520 // reference-counted.
521 if (isa<CallInst>(V) || isa<InvokeInst>(V) ||
522 isa<Argument>(V) || isa<Constant>(V) ||
526 if (const LoadInst *LI = dyn_cast<LoadInst>(V)) {
527 const Value *Pointer =
528 StripPointerCastsAndObjCCalls(LI->getPointerOperand());
529 if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(Pointer)) {
530 // A constant pointer can't be pointing to an object on the heap. It may
531 // be reference-counted, but it won't be deleted.
532 if (GV->isConstant())
534 StringRef Name = GV->getName();
535 // These special variables are known to hold values which are not
536 // reference-counted pointers.
537 if (Name.startswith("\01L_OBJC_SELECTOR_REFERENCES_") ||
538 Name.startswith("\01L_OBJC_CLASSLIST_REFERENCES_") ||
539 Name.startswith("\01L_OBJC_CLASSLIST_SUP_REFS_$_") ||
540 Name.startswith("\01L_OBJC_METH_VAR_NAME_") ||
541 Name.startswith("\01l_objc_msgSend_fixup_"))
549 /// FindSingleUseIdentifiedObject - This is similar to
550 /// StripPointerCastsAndObjCCalls but it stops as soon as it finds a value
551 /// with multiple uses.
552 static const Value *FindSingleUseIdentifiedObject(const Value *Arg) {
553 if (Arg->hasOneUse()) {
554 if (const BitCastInst *BC = dyn_cast<BitCastInst>(Arg))
555 return FindSingleUseIdentifiedObject(BC->getOperand(0));
556 if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Arg))
557 if (GEP->hasAllZeroIndices())
558 return FindSingleUseIdentifiedObject(GEP->getPointerOperand());
559 if (IsForwarding(GetBasicInstructionClass(Arg)))
560 return FindSingleUseIdentifiedObject(
561 cast<CallInst>(Arg)->getArgOperand(0));
562 if (!IsObjCIdentifiedObject(Arg))
567 // If we found an identifiable object but it has multiple uses, but they are
568 // trivial uses, we can still consider this to be a single-use value.
569 if (IsObjCIdentifiedObject(Arg)) {
570 for (Value::const_use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
573 if (!U->use_empty() || StripPointerCastsAndObjCCalls(U) != Arg)
583 /// ModuleHasARC - Test if the given module looks interesting to run ARC
585 static bool ModuleHasARC(const Module &M) {
587 M.getNamedValue("objc_retain") ||
588 M.getNamedValue("objc_release") ||
589 M.getNamedValue("objc_autorelease") ||
590 M.getNamedValue("objc_retainAutoreleasedReturnValue") ||
591 M.getNamedValue("objc_retainBlock") ||
592 M.getNamedValue("objc_autoreleaseReturnValue") ||
593 M.getNamedValue("objc_autoreleasePoolPush") ||
594 M.getNamedValue("objc_loadWeakRetained") ||
595 M.getNamedValue("objc_loadWeak") ||
596 M.getNamedValue("objc_destroyWeak") ||
597 M.getNamedValue("objc_storeWeak") ||
598 M.getNamedValue("objc_initWeak") ||
599 M.getNamedValue("objc_moveWeak") ||
600 M.getNamedValue("objc_copyWeak") ||
601 M.getNamedValue("objc_retainedObject") ||
602 M.getNamedValue("objc_unretainedObject") ||
603 M.getNamedValue("objc_unretainedPointer");
606 /// DoesObjCBlockEscape - Test whether the given pointer, which is an
607 /// Objective C block pointer, does not "escape". This differs from regular
608 /// escape analysis in that a use as an argument to a call is not considered
610 static bool DoesObjCBlockEscape(const Value *BlockPtr) {
611 // Walk the def-use chains.
612 SmallVector<const Value *, 4> Worklist;
613 Worklist.push_back(BlockPtr);
615 const Value *V = Worklist.pop_back_val();
616 for (Value::const_use_iterator UI = V->use_begin(), UE = V->use_end();
618 const User *UUser = *UI;
619 // Special - Use by a call (callee or argument) is not considered
621 switch (GetBasicInstructionClass(UUser)) {
626 case IC_AutoreleaseRV:
627 // These special functions make copies of their pointer arguments.
631 // Use by an instruction which copies the value is an escape if the
632 // result is an escape.
633 if (isa<BitCastInst>(UUser) || isa<GetElementPtrInst>(UUser) ||
634 isa<PHINode>(UUser) || isa<SelectInst>(UUser)) {
635 Worklist.push_back(UUser);
638 // Use by a load is not an escape.
639 if (isa<LoadInst>(UUser))
641 // Use by a store is not an escape if the use is the address.
642 if (const StoreInst *SI = dyn_cast<StoreInst>(UUser))
643 if (V != SI->getValueOperand())
647 // Regular calls and other stuff are not considered escapes.
650 // Otherwise, conservatively assume an escape.
653 } while (!Worklist.empty());
659 //===----------------------------------------------------------------------===//
660 // ARC AliasAnalysis.
661 //===----------------------------------------------------------------------===//
663 #include "llvm/Pass.h"
664 #include "llvm/Analysis/AliasAnalysis.h"
665 #include "llvm/Analysis/Passes.h"
668 /// ObjCARCAliasAnalysis - This is a simple alias analysis
669 /// implementation that uses knowledge of ARC constructs to answer queries.
671 /// TODO: This class could be generalized to know about other ObjC-specific
672 /// tricks. Such as knowing that ivars in the non-fragile ABI are non-aliasing
673 /// even though their offsets are dynamic.
674 class ObjCARCAliasAnalysis : public ImmutablePass,
675 public AliasAnalysis {
677 static char ID; // Class identification, replacement for typeinfo
678 ObjCARCAliasAnalysis() : ImmutablePass(ID) {
679 initializeObjCARCAliasAnalysisPass(*PassRegistry::getPassRegistry());
683 virtual void initializePass() {
684 InitializeAliasAnalysis(this);
687 /// getAdjustedAnalysisPointer - This method is used when a pass implements
688 /// an analysis interface through multiple inheritance. If needed, it
689 /// should override this to adjust the this pointer as needed for the
690 /// specified pass info.
691 virtual void *getAdjustedAnalysisPointer(const void *PI) {
692 if (PI == &AliasAnalysis::ID)
693 return static_cast<AliasAnalysis *>(this);
697 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
698 virtual AliasResult alias(const Location &LocA, const Location &LocB);
699 virtual bool pointsToConstantMemory(const Location &Loc, bool OrLocal);
700 virtual ModRefBehavior getModRefBehavior(ImmutableCallSite CS);
701 virtual ModRefBehavior getModRefBehavior(const Function *F);
702 virtual ModRefResult getModRefInfo(ImmutableCallSite CS,
703 const Location &Loc);
704 virtual ModRefResult getModRefInfo(ImmutableCallSite CS1,
705 ImmutableCallSite CS2);
707 } // End of anonymous namespace
709 // Register this pass...
710 char ObjCARCAliasAnalysis::ID = 0;
711 INITIALIZE_AG_PASS(ObjCARCAliasAnalysis, AliasAnalysis, "objc-arc-aa",
712 "ObjC-ARC-Based Alias Analysis", false, true, false)
714 ImmutablePass *llvm::createObjCARCAliasAnalysisPass() {
715 return new ObjCARCAliasAnalysis();
719 ObjCARCAliasAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
720 AU.setPreservesAll();
721 AliasAnalysis::getAnalysisUsage(AU);
724 AliasAnalysis::AliasResult
725 ObjCARCAliasAnalysis::alias(const Location &LocA, const Location &LocB) {
727 return AliasAnalysis::alias(LocA, LocB);
729 // First, strip off no-ops, including ObjC-specific no-ops, and try making a
730 // precise alias query.
731 const Value *SA = StripPointerCastsAndObjCCalls(LocA.Ptr);
732 const Value *SB = StripPointerCastsAndObjCCalls(LocB.Ptr);
734 AliasAnalysis::alias(Location(SA, LocA.Size, LocA.TBAATag),
735 Location(SB, LocB.Size, LocB.TBAATag));
736 if (Result != MayAlias)
739 // If that failed, climb to the underlying object, including climbing through
740 // ObjC-specific no-ops, and try making an imprecise alias query.
741 const Value *UA = GetUnderlyingObjCPtr(SA);
742 const Value *UB = GetUnderlyingObjCPtr(SB);
743 if (UA != SA || UB != SB) {
744 Result = AliasAnalysis::alias(Location(UA), Location(UB));
745 // We can't use MustAlias or PartialAlias results here because
746 // GetUnderlyingObjCPtr may return an offsetted pointer value.
747 if (Result == NoAlias)
751 // If that failed, fail. We don't need to chain here, since that's covered
752 // by the earlier precise query.
757 ObjCARCAliasAnalysis::pointsToConstantMemory(const Location &Loc,
760 return AliasAnalysis::pointsToConstantMemory(Loc, OrLocal);
762 // First, strip off no-ops, including ObjC-specific no-ops, and try making
763 // a precise alias query.
764 const Value *S = StripPointerCastsAndObjCCalls(Loc.Ptr);
765 if (AliasAnalysis::pointsToConstantMemory(Location(S, Loc.Size, Loc.TBAATag),
769 // If that failed, climb to the underlying object, including climbing through
770 // ObjC-specific no-ops, and try making an imprecise alias query.
771 const Value *U = GetUnderlyingObjCPtr(S);
773 return AliasAnalysis::pointsToConstantMemory(Location(U), OrLocal);
775 // If that failed, fail. We don't need to chain here, since that's covered
776 // by the earlier precise query.
780 AliasAnalysis::ModRefBehavior
781 ObjCARCAliasAnalysis::getModRefBehavior(ImmutableCallSite CS) {
782 // We have nothing to do. Just chain to the next AliasAnalysis.
783 return AliasAnalysis::getModRefBehavior(CS);
786 AliasAnalysis::ModRefBehavior
787 ObjCARCAliasAnalysis::getModRefBehavior(const Function *F) {
789 return AliasAnalysis::getModRefBehavior(F);
791 switch (GetFunctionClass(F)) {
793 return DoesNotAccessMemory;
798 return AliasAnalysis::getModRefBehavior(F);
801 AliasAnalysis::ModRefResult
802 ObjCARCAliasAnalysis::getModRefInfo(ImmutableCallSite CS, const Location &Loc) {
804 return AliasAnalysis::getModRefInfo(CS, Loc);
806 switch (GetBasicInstructionClass(CS.getInstruction())) {
810 case IC_AutoreleaseRV:
812 case IC_AutoreleasepoolPush:
813 case IC_FusedRetainAutorelease:
814 case IC_FusedRetainAutoreleaseRV:
815 // These functions don't access any memory visible to the compiler.
816 // Note that this doesn't include objc_retainBlock, because it updates
817 // pointers when it copies block data.
823 return AliasAnalysis::getModRefInfo(CS, Loc);
826 AliasAnalysis::ModRefResult
827 ObjCARCAliasAnalysis::getModRefInfo(ImmutableCallSite CS1,
828 ImmutableCallSite CS2) {
829 // TODO: Theoretically we could check for dependencies between objc_* calls
830 // and OnlyAccessesArgumentPointees calls or other well-behaved calls.
831 return AliasAnalysis::getModRefInfo(CS1, CS2);
834 //===----------------------------------------------------------------------===//
836 //===----------------------------------------------------------------------===//
838 #include "llvm/Support/InstIterator.h"
839 #include "llvm/Transforms/Scalar.h"
842 /// ObjCARCExpand - Early ARC transformations.
843 class ObjCARCExpand : public FunctionPass {
844 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
845 virtual bool doInitialization(Module &M);
846 virtual bool runOnFunction(Function &F);
848 /// Run - A flag indicating whether this optimization pass should run.
853 ObjCARCExpand() : FunctionPass(ID) {
854 initializeObjCARCExpandPass(*PassRegistry::getPassRegistry());
859 char ObjCARCExpand::ID = 0;
860 INITIALIZE_PASS(ObjCARCExpand,
861 "objc-arc-expand", "ObjC ARC expansion", false, false)
863 Pass *llvm::createObjCARCExpandPass() {
864 return new ObjCARCExpand();
867 void ObjCARCExpand::getAnalysisUsage(AnalysisUsage &AU) const {
868 AU.setPreservesCFG();
871 bool ObjCARCExpand::doInitialization(Module &M) {
872 Run = ModuleHasARC(M);
876 bool ObjCARCExpand::runOnFunction(Function &F) {
880 // If nothing in the Module uses ARC, don't do anything.
884 bool Changed = false;
886 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ++I) {
887 Instruction *Inst = &*I;
889 switch (GetBasicInstructionClass(Inst)) {
893 case IC_AutoreleaseRV:
894 case IC_FusedRetainAutorelease:
895 case IC_FusedRetainAutoreleaseRV:
896 // These calls return their argument verbatim, as a low-level
897 // optimization. However, this makes high-level optimizations
898 // harder. Undo any uses of this optimization that the front-end
899 // emitted here. We'll redo them in the contract pass.
901 Inst->replaceAllUsesWith(cast<CallInst>(Inst)->getArgOperand(0));
911 //===----------------------------------------------------------------------===//
912 // ARC autorelease pool elimination.
913 //===----------------------------------------------------------------------===//
915 #include "llvm/Constants.h"
916 #include "llvm/ADT/STLExtras.h"
919 /// ObjCARCAPElim - Autorelease pool elimination.
920 class ObjCARCAPElim : public ModulePass {
921 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
922 virtual bool runOnModule(Module &M);
924 static bool MayAutorelease(ImmutableCallSite CS, unsigned Depth = 0);
925 static bool OptimizeBB(BasicBlock *BB);
929 ObjCARCAPElim() : ModulePass(ID) {
930 initializeObjCARCAPElimPass(*PassRegistry::getPassRegistry());
935 char ObjCARCAPElim::ID = 0;
936 INITIALIZE_PASS(ObjCARCAPElim,
938 "ObjC ARC autorelease pool elimination",
941 Pass *llvm::createObjCARCAPElimPass() {
942 return new ObjCARCAPElim();
945 void ObjCARCAPElim::getAnalysisUsage(AnalysisUsage &AU) const {
946 AU.setPreservesCFG();
949 /// MayAutorelease - Interprocedurally determine if calls made by the
950 /// given call site can possibly produce autoreleases.
951 bool ObjCARCAPElim::MayAutorelease(ImmutableCallSite CS, unsigned Depth) {
952 if (const Function *Callee = CS.getCalledFunction()) {
953 if (Callee->isDeclaration() || Callee->mayBeOverridden())
955 for (Function::const_iterator I = Callee->begin(), E = Callee->end();
957 const BasicBlock *BB = I;
958 for (BasicBlock::const_iterator J = BB->begin(), F = BB->end();
960 if (ImmutableCallSite JCS = ImmutableCallSite(J))
961 // This recursion depth limit is arbitrary. It's just great
962 // enough to cover known interesting testcases.
964 !JCS.onlyReadsMemory() &&
965 MayAutorelease(JCS, Depth + 1))
974 bool ObjCARCAPElim::OptimizeBB(BasicBlock *BB) {
975 bool Changed = false;
977 Instruction *Push = 0;
978 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ) {
979 Instruction *Inst = I++;
980 switch (GetBasicInstructionClass(Inst)) {
981 case IC_AutoreleasepoolPush:
984 case IC_AutoreleasepoolPop:
985 // If this pop matches a push and nothing in between can autorelease,
987 if (Push && cast<CallInst>(Inst)->getArgOperand(0) == Push) {
989 Inst->eraseFromParent();
990 Push->eraseFromParent();
995 if (MayAutorelease(ImmutableCallSite(Inst)))
1006 bool ObjCARCAPElim::runOnModule(Module &M) {
1010 // If nothing in the Module uses ARC, don't do anything.
1011 if (!ModuleHasARC(M))
1014 // Find the llvm.global_ctors variable, as the first step in
1015 // identifying the global constructors. In theory, unnecessary autorelease
1016 // pools could occur anywhere, but in practice it's pretty rare. Global
1017 // ctors are a place where autorelease pools get inserted automatically,
1018 // so it's pretty common for them to be unnecessary, and it's pretty
1019 // profitable to eliminate them.
1020 GlobalVariable *GV = M.getGlobalVariable("llvm.global_ctors");
1024 assert(GV->hasDefinitiveInitializer() &&
1025 "llvm.global_ctors is uncooperative!");
1027 bool Changed = false;
1029 // Dig the constructor functions out of GV's initializer.
1030 ConstantArray *Init = cast<ConstantArray>(GV->getInitializer());
1031 for (User::op_iterator OI = Init->op_begin(), OE = Init->op_end();
1034 // llvm.global_ctors is an array of pairs where the second members
1035 // are constructor functions.
1036 Function *F = dyn_cast<Function>(cast<ConstantStruct>(Op)->getOperand(1));
1037 // If the user used a constructor function with the wrong signature and
1038 // it got bitcasted or whatever, look the other way.
1041 // Only look at function definitions.
1042 if (F->isDeclaration())
1044 // Only look at functions with one basic block.
1045 if (llvm::next(F->begin()) != F->end())
1047 // Ok, a single-block constructor function definition. Try to optimize it.
1048 Changed |= OptimizeBB(F->begin());
1054 //===----------------------------------------------------------------------===//
1055 // ARC optimization.
1056 //===----------------------------------------------------------------------===//
1058 // TODO: On code like this:
1061 // stuff_that_cannot_release()
1062 // objc_autorelease(%x)
1063 // stuff_that_cannot_release()
1065 // stuff_that_cannot_release()
1066 // objc_autorelease(%x)
1068 // The second retain and autorelease can be deleted.
1070 // TODO: It should be possible to delete
1071 // objc_autoreleasePoolPush and objc_autoreleasePoolPop
1072 // pairs if nothing is actually autoreleased between them. Also, autorelease
1073 // calls followed by objc_autoreleasePoolPop calls (perhaps in ObjC++ code
1074 // after inlining) can be turned into plain release calls.
1076 // TODO: Critical-edge splitting. If the optimial insertion point is
1077 // a critical edge, the current algorithm has to fail, because it doesn't
1078 // know how to split edges. It should be possible to make the optimizer
1079 // think in terms of edges, rather than blocks, and then split critical
1082 // TODO: OptimizeSequences could generalized to be Interprocedural.
1084 // TODO: Recognize that a bunch of other objc runtime calls have
1085 // non-escaping arguments and non-releasing arguments, and may be
1086 // non-autoreleasing.
1088 // TODO: Sink autorelease calls as far as possible. Unfortunately we
1089 // usually can't sink them past other calls, which would be the main
1090 // case where it would be useful.
1092 // TODO: The pointer returned from objc_loadWeakRetained is retained.
1094 // TODO: Delete release+retain pairs (rare).
1096 #include "llvm/LLVMContext.h"
1097 #include "llvm/Support/CFG.h"
1098 #include "llvm/ADT/Statistic.h"
1099 #include "llvm/ADT/SmallPtrSet.h"
1101 STATISTIC(NumNoops, "Number of no-op objc calls eliminated");
1102 STATISTIC(NumPartialNoops, "Number of partially no-op objc calls eliminated");
1103 STATISTIC(NumAutoreleases,"Number of autoreleases converted to releases");
1104 STATISTIC(NumRets, "Number of return value forwarding "
1105 "retain+autoreleaes eliminated");
1106 STATISTIC(NumRRs, "Number of retain+release paths eliminated");
1107 STATISTIC(NumPeeps, "Number of calls peephole-optimized");
1110 /// ProvenanceAnalysis - This is similar to BasicAliasAnalysis, and it
1111 /// uses many of the same techniques, except it uses special ObjC-specific
1112 /// reasoning about pointer relationships.
1113 class ProvenanceAnalysis {
1116 typedef std::pair<const Value *, const Value *> ValuePairTy;
1117 typedef DenseMap<ValuePairTy, bool> CachedResultsTy;
1118 CachedResultsTy CachedResults;
1120 bool relatedCheck(const Value *A, const Value *B);
1121 bool relatedSelect(const SelectInst *A, const Value *B);
1122 bool relatedPHI(const PHINode *A, const Value *B);
1124 void operator=(const ProvenanceAnalysis &) LLVM_DELETED_FUNCTION;
1125 ProvenanceAnalysis(const ProvenanceAnalysis &) LLVM_DELETED_FUNCTION;
1128 ProvenanceAnalysis() {}
1130 void setAA(AliasAnalysis *aa) { AA = aa; }
1132 AliasAnalysis *getAA() const { return AA; }
1134 bool related(const Value *A, const Value *B);
1137 CachedResults.clear();
1142 bool ProvenanceAnalysis::relatedSelect(const SelectInst *A, const Value *B) {
1143 // If the values are Selects with the same condition, we can do a more precise
1144 // check: just check for relations between the values on corresponding arms.
1145 if (const SelectInst *SB = dyn_cast<SelectInst>(B))
1146 if (A->getCondition() == SB->getCondition())
1147 return related(A->getTrueValue(), SB->getTrueValue()) ||
1148 related(A->getFalseValue(), SB->getFalseValue());
1150 // Check both arms of the Select node individually.
1151 return related(A->getTrueValue(), B) ||
1152 related(A->getFalseValue(), B);
1155 bool ProvenanceAnalysis::relatedPHI(const PHINode *A, const Value *B) {
1156 // If the values are PHIs in the same block, we can do a more precise as well
1157 // as efficient check: just check for relations between the values on
1158 // corresponding edges.
1159 if (const PHINode *PNB = dyn_cast<PHINode>(B))
1160 if (PNB->getParent() == A->getParent()) {
1161 for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i)
1162 if (related(A->getIncomingValue(i),
1163 PNB->getIncomingValueForBlock(A->getIncomingBlock(i))))
1168 // Check each unique source of the PHI node against B.
1169 SmallPtrSet<const Value *, 4> UniqueSrc;
1170 for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i) {
1171 const Value *PV1 = A->getIncomingValue(i);
1172 if (UniqueSrc.insert(PV1) && related(PV1, B))
1176 // All of the arms checked out.
1180 /// isStoredObjCPointer - Test if the value of P, or any value covered by its
1181 /// provenance, is ever stored within the function (not counting callees).
1182 static bool isStoredObjCPointer(const Value *P) {
1183 SmallPtrSet<const Value *, 8> Visited;
1184 SmallVector<const Value *, 8> Worklist;
1185 Worklist.push_back(P);
1188 P = Worklist.pop_back_val();
1189 for (Value::const_use_iterator UI = P->use_begin(), UE = P->use_end();
1191 const User *Ur = *UI;
1192 if (isa<StoreInst>(Ur)) {
1193 if (UI.getOperandNo() == 0)
1194 // The pointer is stored.
1196 // The pointed is stored through.
1199 if (isa<CallInst>(Ur))
1200 // The pointer is passed as an argument, ignore this.
1202 if (isa<PtrToIntInst>(P))
1203 // Assume the worst.
1205 if (Visited.insert(Ur))
1206 Worklist.push_back(Ur);
1208 } while (!Worklist.empty());
1210 // Everything checked out.
1214 bool ProvenanceAnalysis::relatedCheck(const Value *A, const Value *B) {
1215 // Skip past provenance pass-throughs.
1216 A = GetUnderlyingObjCPtr(A);
1217 B = GetUnderlyingObjCPtr(B);
1223 // Ask regular AliasAnalysis, for a first approximation.
1224 switch (AA->alias(A, B)) {
1225 case AliasAnalysis::NoAlias:
1227 case AliasAnalysis::MustAlias:
1228 case AliasAnalysis::PartialAlias:
1230 case AliasAnalysis::MayAlias:
1234 bool AIsIdentified = IsObjCIdentifiedObject(A);
1235 bool BIsIdentified = IsObjCIdentifiedObject(B);
1237 // An ObjC-Identified object can't alias a load if it is never locally stored.
1238 if (AIsIdentified) {
1239 // Check for an obvious escape.
1240 if (isa<LoadInst>(B))
1241 return isStoredObjCPointer(A);
1242 if (BIsIdentified) {
1243 // Check for an obvious escape.
1244 if (isa<LoadInst>(A))
1245 return isStoredObjCPointer(B);
1246 // Both pointers are identified and escapes aren't an evident problem.
1249 } else if (BIsIdentified) {
1250 // Check for an obvious escape.
1251 if (isa<LoadInst>(A))
1252 return isStoredObjCPointer(B);
1255 // Special handling for PHI and Select.
1256 if (const PHINode *PN = dyn_cast<PHINode>(A))
1257 return relatedPHI(PN, B);
1258 if (const PHINode *PN = dyn_cast<PHINode>(B))
1259 return relatedPHI(PN, A);
1260 if (const SelectInst *S = dyn_cast<SelectInst>(A))
1261 return relatedSelect(S, B);
1262 if (const SelectInst *S = dyn_cast<SelectInst>(B))
1263 return relatedSelect(S, A);
1269 bool ProvenanceAnalysis::related(const Value *A, const Value *B) {
1270 // Begin by inserting a conservative value into the map. If the insertion
1271 // fails, we have the answer already. If it succeeds, leave it there until we
1272 // compute the real answer to guard against recursive queries.
1273 if (A > B) std::swap(A, B);
1274 std::pair<CachedResultsTy::iterator, bool> Pair =
1275 CachedResults.insert(std::make_pair(ValuePairTy(A, B), true));
1277 return Pair.first->second;
1279 bool Result = relatedCheck(A, B);
1280 CachedResults[ValuePairTy(A, B)] = Result;
1285 // Sequence - A sequence of states that a pointer may go through in which an
1286 // objc_retain and objc_release are actually needed.
1289 S_Retain, ///< objc_retain(x)
1290 S_CanRelease, ///< foo(x) -- x could possibly see a ref count decrement
1291 S_Use, ///< any use of x
1292 S_Stop, ///< like S_Release, but code motion is stopped
1293 S_Release, ///< objc_release(x)
1294 S_MovableRelease ///< objc_release(x), !clang.imprecise_release
1298 static Sequence MergeSeqs(Sequence A, Sequence B, bool TopDown) {
1302 if (A == S_None || B == S_None)
1305 if (A > B) std::swap(A, B);
1307 // Choose the side which is further along in the sequence.
1308 if ((A == S_Retain || A == S_CanRelease) &&
1309 (B == S_CanRelease || B == S_Use))
1312 // Choose the side which is further along in the sequence.
1313 if ((A == S_Use || A == S_CanRelease) &&
1314 (B == S_Use || B == S_Release || B == S_Stop || B == S_MovableRelease))
1316 // If both sides are releases, choose the more conservative one.
1317 if (A == S_Stop && (B == S_Release || B == S_MovableRelease))
1319 if (A == S_Release && B == S_MovableRelease)
1327 /// RRInfo - Unidirectional information about either a
1328 /// retain-decrement-use-release sequence or release-use-decrement-retain
1329 /// reverese sequence.
1331 /// KnownSafe - After an objc_retain, the reference count of the referenced
1332 /// object is known to be positive. Similarly, before an objc_release, the
1333 /// reference count of the referenced object is known to be positive. If
1334 /// there are retain-release pairs in code regions where the retain count
1335 /// is known to be positive, they can be eliminated, regardless of any side
1336 /// effects between them.
1338 /// Also, a retain+release pair nested within another retain+release
1339 /// pair all on the known same pointer value can be eliminated, regardless
1340 /// of any intervening side effects.
1342 /// KnownSafe is true when either of these conditions is satisfied.
1345 /// IsRetainBlock - True if the Calls are objc_retainBlock calls (as
1346 /// opposed to objc_retain calls).
1349 /// IsTailCallRelease - True of the objc_release calls are all marked
1350 /// with the "tail" keyword.
1351 bool IsTailCallRelease;
1353 /// ReleaseMetadata - If the Calls are objc_release calls and they all have
1354 /// a clang.imprecise_release tag, this is the metadata tag.
1355 MDNode *ReleaseMetadata;
1357 /// Calls - For a top-down sequence, the set of objc_retains or
1358 /// objc_retainBlocks. For bottom-up, the set of objc_releases.
1359 SmallPtrSet<Instruction *, 2> Calls;
1361 /// ReverseInsertPts - The set of optimal insert positions for
1362 /// moving calls in the opposite sequence.
1363 SmallPtrSet<Instruction *, 2> ReverseInsertPts;
1366 KnownSafe(false), IsRetainBlock(false),
1367 IsTailCallRelease(false),
1368 ReleaseMetadata(0) {}
1374 void RRInfo::clear() {
1376 IsRetainBlock = false;
1377 IsTailCallRelease = false;
1378 ReleaseMetadata = 0;
1380 ReverseInsertPts.clear();
1384 /// PtrState - This class summarizes several per-pointer runtime properties
1385 /// which are propogated through the flow graph.
1387 /// KnownPositiveRefCount - True if the reference count is known to
1389 bool KnownPositiveRefCount;
1391 /// Partial - True of we've seen an opportunity for partial RR elimination,
1392 /// such as pushing calls into a CFG triangle or into one side of a
1396 /// Seq - The current position in the sequence.
1400 /// RRI - Unidirectional information about the current sequence.
1401 /// TODO: Encapsulate this better.
1404 PtrState() : KnownPositiveRefCount(false), Partial(false),
1407 void SetKnownPositiveRefCount() {
1408 KnownPositiveRefCount = true;
1411 void ClearRefCount() {
1412 KnownPositiveRefCount = false;
1415 bool IsKnownIncremented() const {
1416 return KnownPositiveRefCount;
1419 void SetSeq(Sequence NewSeq) {
1423 Sequence GetSeq() const {
1427 void ClearSequenceProgress() {
1428 ResetSequenceProgress(S_None);
1431 void ResetSequenceProgress(Sequence NewSeq) {
1437 void Merge(const PtrState &Other, bool TopDown);
1442 PtrState::Merge(const PtrState &Other, bool TopDown) {
1443 Seq = MergeSeqs(Seq, Other.Seq, TopDown);
1444 KnownPositiveRefCount = KnownPositiveRefCount && Other.KnownPositiveRefCount;
1446 // We can't merge a plain objc_retain with an objc_retainBlock.
1447 if (RRI.IsRetainBlock != Other.RRI.IsRetainBlock)
1450 // If we're not in a sequence (anymore), drop all associated state.
1451 if (Seq == S_None) {
1454 } else if (Partial || Other.Partial) {
1455 // If we're doing a merge on a path that's previously seen a partial
1456 // merge, conservatively drop the sequence, to avoid doing partial
1457 // RR elimination. If the branch predicates for the two merge differ,
1458 // mixing them is unsafe.
1459 ClearSequenceProgress();
1461 // Conservatively merge the ReleaseMetadata information.
1462 if (RRI.ReleaseMetadata != Other.RRI.ReleaseMetadata)
1463 RRI.ReleaseMetadata = 0;
1465 RRI.KnownSafe = RRI.KnownSafe && Other.RRI.KnownSafe;
1466 RRI.IsTailCallRelease = RRI.IsTailCallRelease &&
1467 Other.RRI.IsTailCallRelease;
1468 RRI.Calls.insert(Other.RRI.Calls.begin(), Other.RRI.Calls.end());
1470 // Merge the insert point sets. If there are any differences,
1471 // that makes this a partial merge.
1472 Partial = RRI.ReverseInsertPts.size() != Other.RRI.ReverseInsertPts.size();
1473 for (SmallPtrSet<Instruction *, 2>::const_iterator
1474 I = Other.RRI.ReverseInsertPts.begin(),
1475 E = Other.RRI.ReverseInsertPts.end(); I != E; ++I)
1476 Partial |= RRI.ReverseInsertPts.insert(*I);
1481 /// BBState - Per-BasicBlock state.
1483 /// TopDownPathCount - The number of unique control paths from the entry
1484 /// which can reach this block.
1485 unsigned TopDownPathCount;
1487 /// BottomUpPathCount - The number of unique control paths to exits
1488 /// from this block.
1489 unsigned BottomUpPathCount;
1491 /// MapTy - A type for PerPtrTopDown and PerPtrBottomUp.
1492 typedef MapVector<const Value *, PtrState> MapTy;
1494 /// PerPtrTopDown - The top-down traversal uses this to record information
1495 /// known about a pointer at the bottom of each block.
1496 MapTy PerPtrTopDown;
1498 /// PerPtrBottomUp - The bottom-up traversal uses this to record information
1499 /// known about a pointer at the top of each block.
1500 MapTy PerPtrBottomUp;
1502 /// Preds, Succs - Effective successors and predecessors of the current
1503 /// block (this ignores ignorable edges and ignored backedges).
1504 SmallVector<BasicBlock *, 2> Preds;
1505 SmallVector<BasicBlock *, 2> Succs;
1508 BBState() : TopDownPathCount(0), BottomUpPathCount(0) {}
1510 typedef MapTy::iterator ptr_iterator;
1511 typedef MapTy::const_iterator ptr_const_iterator;
1513 ptr_iterator top_down_ptr_begin() { return PerPtrTopDown.begin(); }
1514 ptr_iterator top_down_ptr_end() { return PerPtrTopDown.end(); }
1515 ptr_const_iterator top_down_ptr_begin() const {
1516 return PerPtrTopDown.begin();
1518 ptr_const_iterator top_down_ptr_end() const {
1519 return PerPtrTopDown.end();
1522 ptr_iterator bottom_up_ptr_begin() { return PerPtrBottomUp.begin(); }
1523 ptr_iterator bottom_up_ptr_end() { return PerPtrBottomUp.end(); }
1524 ptr_const_iterator bottom_up_ptr_begin() const {
1525 return PerPtrBottomUp.begin();
1527 ptr_const_iterator bottom_up_ptr_end() const {
1528 return PerPtrBottomUp.end();
1531 /// SetAsEntry - Mark this block as being an entry block, which has one
1532 /// path from the entry by definition.
1533 void SetAsEntry() { TopDownPathCount = 1; }
1535 /// SetAsExit - Mark this block as being an exit block, which has one
1536 /// path to an exit by definition.
1537 void SetAsExit() { BottomUpPathCount = 1; }
1539 PtrState &getPtrTopDownState(const Value *Arg) {
1540 return PerPtrTopDown[Arg];
1543 PtrState &getPtrBottomUpState(const Value *Arg) {
1544 return PerPtrBottomUp[Arg];
1547 void clearBottomUpPointers() {
1548 PerPtrBottomUp.clear();
1551 void clearTopDownPointers() {
1552 PerPtrTopDown.clear();
1555 void InitFromPred(const BBState &Other);
1556 void InitFromSucc(const BBState &Other);
1557 void MergePred(const BBState &Other);
1558 void MergeSucc(const BBState &Other);
1560 /// GetAllPathCount - Return the number of possible unique paths from an
1561 /// entry to an exit which pass through this block. This is only valid
1562 /// after both the top-down and bottom-up traversals are complete.
1563 unsigned GetAllPathCount() const {
1564 assert(TopDownPathCount != 0);
1565 assert(BottomUpPathCount != 0);
1566 return TopDownPathCount * BottomUpPathCount;
1569 // Specialized CFG utilities.
1570 typedef SmallVectorImpl<BasicBlock *>::const_iterator edge_iterator;
1571 edge_iterator pred_begin() { return Preds.begin(); }
1572 edge_iterator pred_end() { return Preds.end(); }
1573 edge_iterator succ_begin() { return Succs.begin(); }
1574 edge_iterator succ_end() { return Succs.end(); }
1576 void addSucc(BasicBlock *Succ) { Succs.push_back(Succ); }
1577 void addPred(BasicBlock *Pred) { Preds.push_back(Pred); }
1579 bool isExit() const { return Succs.empty(); }
1583 void BBState::InitFromPred(const BBState &Other) {
1584 PerPtrTopDown = Other.PerPtrTopDown;
1585 TopDownPathCount = Other.TopDownPathCount;
1588 void BBState::InitFromSucc(const BBState &Other) {
1589 PerPtrBottomUp = Other.PerPtrBottomUp;
1590 BottomUpPathCount = Other.BottomUpPathCount;
1593 /// MergePred - The top-down traversal uses this to merge information about
1594 /// predecessors to form the initial state for a new block.
1595 void BBState::MergePred(const BBState &Other) {
1596 // Other.TopDownPathCount can be 0, in which case it is either dead or a
1597 // loop backedge. Loop backedges are special.
1598 TopDownPathCount += Other.TopDownPathCount;
1600 // Check for overflow. If we have overflow, fall back to conservative behavior.
1601 if (TopDownPathCount < Other.TopDownPathCount) {
1602 clearTopDownPointers();
1606 // For each entry in the other set, if our set has an entry with the same key,
1607 // merge the entries. Otherwise, copy the entry and merge it with an empty
1609 for (ptr_const_iterator MI = Other.top_down_ptr_begin(),
1610 ME = Other.top_down_ptr_end(); MI != ME; ++MI) {
1611 std::pair<ptr_iterator, bool> Pair = PerPtrTopDown.insert(*MI);
1612 Pair.first->second.Merge(Pair.second ? PtrState() : MI->second,
1616 // For each entry in our set, if the other set doesn't have an entry with the
1617 // same key, force it to merge with an empty entry.
1618 for (ptr_iterator MI = top_down_ptr_begin(),
1619 ME = top_down_ptr_end(); MI != ME; ++MI)
1620 if (Other.PerPtrTopDown.find(MI->first) == Other.PerPtrTopDown.end())
1621 MI->second.Merge(PtrState(), /*TopDown=*/true);
1624 /// MergeSucc - The bottom-up traversal uses this to merge information about
1625 /// successors to form the initial state for a new block.
1626 void BBState::MergeSucc(const BBState &Other) {
1627 // Other.BottomUpPathCount can be 0, in which case it is either dead or a
1628 // loop backedge. Loop backedges are special.
1629 BottomUpPathCount += Other.BottomUpPathCount;
1631 // Check for overflow. If we have overflow, fall back to conservative behavior.
1632 if (BottomUpPathCount < Other.BottomUpPathCount) {
1633 clearBottomUpPointers();
1637 // For each entry in the other set, if our set has an entry with the
1638 // same key, merge the entries. Otherwise, copy the entry and merge
1639 // it with an empty entry.
1640 for (ptr_const_iterator MI = Other.bottom_up_ptr_begin(),
1641 ME = Other.bottom_up_ptr_end(); MI != ME; ++MI) {
1642 std::pair<ptr_iterator, bool> Pair = PerPtrBottomUp.insert(*MI);
1643 Pair.first->second.Merge(Pair.second ? PtrState() : MI->second,
1647 // For each entry in our set, if the other set doesn't have an entry
1648 // with the same key, force it to merge with an empty entry.
1649 for (ptr_iterator MI = bottom_up_ptr_begin(),
1650 ME = bottom_up_ptr_end(); MI != ME; ++MI)
1651 if (Other.PerPtrBottomUp.find(MI->first) == Other.PerPtrBottomUp.end())
1652 MI->second.Merge(PtrState(), /*TopDown=*/false);
1656 /// ObjCARCOpt - The main ARC optimization pass.
1657 class ObjCARCOpt : public FunctionPass {
1659 ProvenanceAnalysis PA;
1661 /// Run - A flag indicating whether this optimization pass should run.
1664 /// RetainRVCallee, etc. - Declarations for ObjC runtime
1665 /// functions, for use in creating calls to them. These are initialized
1666 /// lazily to avoid cluttering up the Module with unused declarations.
1667 Constant *RetainRVCallee, *AutoreleaseRVCallee, *ReleaseCallee,
1668 *RetainCallee, *RetainBlockCallee, *AutoreleaseCallee;
1670 /// UsedInThisFunciton - Flags which determine whether each of the
1671 /// interesting runtine functions is in fact used in the current function.
1672 unsigned UsedInThisFunction;
1674 /// ImpreciseReleaseMDKind - The Metadata Kind for clang.imprecise_release
1676 unsigned ImpreciseReleaseMDKind;
1678 /// CopyOnEscapeMDKind - The Metadata Kind for clang.arc.copy_on_escape
1680 unsigned CopyOnEscapeMDKind;
1682 /// NoObjCARCExceptionsMDKind - The Metadata Kind for
1683 /// clang.arc.no_objc_arc_exceptions metadata.
1684 unsigned NoObjCARCExceptionsMDKind;
1686 Constant *getRetainRVCallee(Module *M);
1687 Constant *getAutoreleaseRVCallee(Module *M);
1688 Constant *getReleaseCallee(Module *M);
1689 Constant *getRetainCallee(Module *M);
1690 Constant *getRetainBlockCallee(Module *M);
1691 Constant *getAutoreleaseCallee(Module *M);
1693 bool IsRetainBlockOptimizable(const Instruction *Inst);
1695 void OptimizeRetainCall(Function &F, Instruction *Retain);
1696 bool OptimizeRetainRVCall(Function &F, Instruction *RetainRV);
1697 void OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV);
1698 void OptimizeIndividualCalls(Function &F);
1700 void CheckForCFGHazards(const BasicBlock *BB,
1701 DenseMap<const BasicBlock *, BBState> &BBStates,
1702 BBState &MyStates) const;
1703 bool VisitInstructionBottomUp(Instruction *Inst,
1705 MapVector<Value *, RRInfo> &Retains,
1707 bool VisitBottomUp(BasicBlock *BB,
1708 DenseMap<const BasicBlock *, BBState> &BBStates,
1709 MapVector<Value *, RRInfo> &Retains);
1710 bool VisitInstructionTopDown(Instruction *Inst,
1711 DenseMap<Value *, RRInfo> &Releases,
1713 bool VisitTopDown(BasicBlock *BB,
1714 DenseMap<const BasicBlock *, BBState> &BBStates,
1715 DenseMap<Value *, RRInfo> &Releases);
1716 bool Visit(Function &F,
1717 DenseMap<const BasicBlock *, BBState> &BBStates,
1718 MapVector<Value *, RRInfo> &Retains,
1719 DenseMap<Value *, RRInfo> &Releases);
1721 void MoveCalls(Value *Arg, RRInfo &RetainsToMove, RRInfo &ReleasesToMove,
1722 MapVector<Value *, RRInfo> &Retains,
1723 DenseMap<Value *, RRInfo> &Releases,
1724 SmallVectorImpl<Instruction *> &DeadInsts,
1727 bool PerformCodePlacement(DenseMap<const BasicBlock *, BBState> &BBStates,
1728 MapVector<Value *, RRInfo> &Retains,
1729 DenseMap<Value *, RRInfo> &Releases,
1732 void OptimizeWeakCalls(Function &F);
1734 bool OptimizeSequences(Function &F);
1736 void OptimizeReturns(Function &F);
1738 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
1739 virtual bool doInitialization(Module &M);
1740 virtual bool runOnFunction(Function &F);
1741 virtual void releaseMemory();
1745 ObjCARCOpt() : FunctionPass(ID) {
1746 initializeObjCARCOptPass(*PassRegistry::getPassRegistry());
1751 char ObjCARCOpt::ID = 0;
1752 INITIALIZE_PASS_BEGIN(ObjCARCOpt,
1753 "objc-arc", "ObjC ARC optimization", false, false)
1754 INITIALIZE_PASS_DEPENDENCY(ObjCARCAliasAnalysis)
1755 INITIALIZE_PASS_END(ObjCARCOpt,
1756 "objc-arc", "ObjC ARC optimization", false, false)
1758 Pass *llvm::createObjCARCOptPass() {
1759 return new ObjCARCOpt();
1762 void ObjCARCOpt::getAnalysisUsage(AnalysisUsage &AU) const {
1763 AU.addRequired<ObjCARCAliasAnalysis>();
1764 AU.addRequired<AliasAnalysis>();
1765 // ARC optimization doesn't currently split critical edges.
1766 AU.setPreservesCFG();
1769 bool ObjCARCOpt::IsRetainBlockOptimizable(const Instruction *Inst) {
1770 // Without the magic metadata tag, we have to assume this might be an
1771 // objc_retainBlock call inserted to convert a block pointer to an id,
1772 // in which case it really is needed.
1773 if (!Inst->getMetadata(CopyOnEscapeMDKind))
1776 // If the pointer "escapes" (not including being used in a call),
1777 // the copy may be needed.
1778 if (DoesObjCBlockEscape(Inst))
1781 // Otherwise, it's not needed.
1785 Constant *ObjCARCOpt::getRetainRVCallee(Module *M) {
1786 if (!RetainRVCallee) {
1787 LLVMContext &C = M->getContext();
1788 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
1789 Type *Params[] = { I8X };
1790 FunctionType *FTy = FunctionType::get(I8X, Params, /*isVarArg=*/false);
1791 Attributes::Builder B;
1792 B.addAttribute(Attributes::NoUnwind);
1793 AttrListPtr Attributes = AttrListPtr().addAttr(M->getContext(), ~0u,
1794 Attributes::get(B));
1796 M->getOrInsertFunction("objc_retainAutoreleasedReturnValue", FTy,
1799 return RetainRVCallee;
1802 Constant *ObjCARCOpt::getAutoreleaseRVCallee(Module *M) {
1803 if (!AutoreleaseRVCallee) {
1804 LLVMContext &C = M->getContext();
1805 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
1806 Type *Params[] = { I8X };
1807 FunctionType *FTy = FunctionType::get(I8X, Params, /*isVarArg=*/false);
1808 Attributes::Builder B;
1809 B.addAttribute(Attributes::NoUnwind);
1810 AttrListPtr Attributes = AttrListPtr().addAttr(M->getContext(), ~0u,
1811 Attributes::get(B));
1812 AutoreleaseRVCallee =
1813 M->getOrInsertFunction("objc_autoreleaseReturnValue", FTy,
1816 return AutoreleaseRVCallee;
1819 Constant *ObjCARCOpt::getReleaseCallee(Module *M) {
1820 if (!ReleaseCallee) {
1821 LLVMContext &C = M->getContext();
1822 Type *Params[] = { PointerType::getUnqual(Type::getInt8Ty(C)) };
1823 Attributes::Builder B;
1824 B.addAttribute(Attributes::NoUnwind);
1825 AttrListPtr Attributes = AttrListPtr().addAttr(M->getContext(), ~0u,
1826 Attributes::get(B));
1828 M->getOrInsertFunction(
1830 FunctionType::get(Type::getVoidTy(C), Params, /*isVarArg=*/false),
1833 return ReleaseCallee;
1836 Constant *ObjCARCOpt::getRetainCallee(Module *M) {
1837 if (!RetainCallee) {
1838 LLVMContext &C = M->getContext();
1839 Type *Params[] = { PointerType::getUnqual(Type::getInt8Ty(C)) };
1840 Attributes::Builder B;
1841 B.addAttribute(Attributes::NoUnwind);
1842 AttrListPtr Attributes = AttrListPtr().addAttr(M->getContext(), ~0u,
1843 Attributes::get(B));
1845 M->getOrInsertFunction(
1847 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
1850 return RetainCallee;
1853 Constant *ObjCARCOpt::getRetainBlockCallee(Module *M) {
1854 if (!RetainBlockCallee) {
1855 LLVMContext &C = M->getContext();
1856 Type *Params[] = { PointerType::getUnqual(Type::getInt8Ty(C)) };
1857 // objc_retainBlock is not nounwind because it calls user copy constructors
1858 // which could theoretically throw.
1860 M->getOrInsertFunction(
1862 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
1865 return RetainBlockCallee;
1868 Constant *ObjCARCOpt::getAutoreleaseCallee(Module *M) {
1869 if (!AutoreleaseCallee) {
1870 LLVMContext &C = M->getContext();
1871 Type *Params[] = { PointerType::getUnqual(Type::getInt8Ty(C)) };
1872 Attributes::Builder B;
1873 B.addAttribute(Attributes::NoUnwind);
1874 AttrListPtr Attributes = AttrListPtr().addAttr(M->getContext(), ~0u,
1875 Attributes::get(B));
1877 M->getOrInsertFunction(
1879 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
1882 return AutoreleaseCallee;
1885 /// IsPotentialUse - Test whether the given value is possible a
1886 /// reference-counted pointer, including tests which utilize AliasAnalysis.
1887 static bool IsPotentialUse(const Value *Op, AliasAnalysis &AA) {
1888 // First make the rudimentary check.
1889 if (!IsPotentialUse(Op))
1892 // Objects in constant memory are not reference-counted.
1893 if (AA.pointsToConstantMemory(Op))
1896 // Pointers in constant memory are not pointing to reference-counted objects.
1897 if (const LoadInst *LI = dyn_cast<LoadInst>(Op))
1898 if (AA.pointsToConstantMemory(LI->getPointerOperand()))
1901 // Otherwise assume the worst.
1905 /// CanAlterRefCount - Test whether the given instruction can result in a
1906 /// reference count modification (positive or negative) for the pointer's
1909 CanAlterRefCount(const Instruction *Inst, const Value *Ptr,
1910 ProvenanceAnalysis &PA, InstructionClass Class) {
1912 case IC_Autorelease:
1913 case IC_AutoreleaseRV:
1915 // These operations never directly modify a reference count.
1920 ImmutableCallSite CS = static_cast<const Value *>(Inst);
1921 assert(CS && "Only calls can alter reference counts!");
1923 // See if AliasAnalysis can help us with the call.
1924 AliasAnalysis::ModRefBehavior MRB = PA.getAA()->getModRefBehavior(CS);
1925 if (AliasAnalysis::onlyReadsMemory(MRB))
1927 if (AliasAnalysis::onlyAccessesArgPointees(MRB)) {
1928 for (ImmutableCallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
1930 const Value *Op = *I;
1931 if (IsPotentialUse(Op, *PA.getAA()) && PA.related(Ptr, Op))
1937 // Assume the worst.
1941 /// CanUse - Test whether the given instruction can "use" the given pointer's
1942 /// object in a way that requires the reference count to be positive.
1944 CanUse(const Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA,
1945 InstructionClass Class) {
1946 // IC_Call operations (as opposed to IC_CallOrUser) never "use" objc pointers.
1947 if (Class == IC_Call)
1950 // Consider various instructions which may have pointer arguments which are
1952 if (const ICmpInst *ICI = dyn_cast<ICmpInst>(Inst)) {
1953 // Comparing a pointer with null, or any other constant, isn't really a use,
1954 // because we don't care what the pointer points to, or about the values
1955 // of any other dynamic reference-counted pointers.
1956 if (!IsPotentialUse(ICI->getOperand(1), *PA.getAA()))
1958 } else if (ImmutableCallSite CS = static_cast<const Value *>(Inst)) {
1959 // For calls, just check the arguments (and not the callee operand).
1960 for (ImmutableCallSite::arg_iterator OI = CS.arg_begin(),
1961 OE = CS.arg_end(); OI != OE; ++OI) {
1962 const Value *Op = *OI;
1963 if (IsPotentialUse(Op, *PA.getAA()) && PA.related(Ptr, Op))
1967 } else if (const StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
1968 // Special-case stores, because we don't care about the stored value, just
1969 // the store address.
1970 const Value *Op = GetUnderlyingObjCPtr(SI->getPointerOperand());
1971 // If we can't tell what the underlying object was, assume there is a
1973 return IsPotentialUse(Op, *PA.getAA()) && PA.related(Op, Ptr);
1976 // Check each operand for a match.
1977 for (User::const_op_iterator OI = Inst->op_begin(), OE = Inst->op_end();
1979 const Value *Op = *OI;
1980 if (IsPotentialUse(Op, *PA.getAA()) && PA.related(Ptr, Op))
1986 /// CanInterruptRV - Test whether the given instruction can autorelease
1987 /// any pointer or cause an autoreleasepool pop.
1989 CanInterruptRV(InstructionClass Class) {
1991 case IC_AutoreleasepoolPop:
1994 case IC_Autorelease:
1995 case IC_AutoreleaseRV:
1996 case IC_FusedRetainAutorelease:
1997 case IC_FusedRetainAutoreleaseRV:
2005 /// DependenceKind - There are several kinds of dependence-like concepts in
2007 enum DependenceKind {
2008 NeedsPositiveRetainCount,
2009 AutoreleasePoolBoundary,
2010 CanChangeRetainCount,
2011 RetainAutoreleaseDep, ///< Blocks objc_retainAutorelease.
2012 RetainAutoreleaseRVDep, ///< Blocks objc_retainAutoreleaseReturnValue.
2013 RetainRVDep ///< Blocks objc_retainAutoreleasedReturnValue.
2017 /// Depends - Test if there can be dependencies on Inst through Arg. This
2018 /// function only tests dependencies relevant for removing pairs of calls.
2020 Depends(DependenceKind Flavor, Instruction *Inst, const Value *Arg,
2021 ProvenanceAnalysis &PA) {
2022 // If we've reached the definition of Arg, stop.
2027 case NeedsPositiveRetainCount: {
2028 InstructionClass Class = GetInstructionClass(Inst);
2030 case IC_AutoreleasepoolPop:
2031 case IC_AutoreleasepoolPush:
2035 return CanUse(Inst, Arg, PA, Class);
2039 case AutoreleasePoolBoundary: {
2040 InstructionClass Class = GetInstructionClass(Inst);
2042 case IC_AutoreleasepoolPop:
2043 case IC_AutoreleasepoolPush:
2044 // These mark the end and begin of an autorelease pool scope.
2047 // Nothing else does this.
2052 case CanChangeRetainCount: {
2053 InstructionClass Class = GetInstructionClass(Inst);
2055 case IC_AutoreleasepoolPop:
2056 // Conservatively assume this can decrement any count.
2058 case IC_AutoreleasepoolPush:
2062 return CanAlterRefCount(Inst, Arg, PA, Class);
2066 case RetainAutoreleaseDep:
2067 switch (GetBasicInstructionClass(Inst)) {
2068 case IC_AutoreleasepoolPop:
2069 case IC_AutoreleasepoolPush:
2070 // Don't merge an objc_autorelease with an objc_retain inside a different
2071 // autoreleasepool scope.
2075 // Check for a retain of the same pointer for merging.
2076 return GetObjCArg(Inst) == Arg;
2078 // Nothing else matters for objc_retainAutorelease formation.
2082 case RetainAutoreleaseRVDep: {
2083 InstructionClass Class = GetBasicInstructionClass(Inst);
2087 // Check for a retain of the same pointer for merging.
2088 return GetObjCArg(Inst) == Arg;
2090 // Anything that can autorelease interrupts
2091 // retainAutoreleaseReturnValue formation.
2092 return CanInterruptRV(Class);
2097 return CanInterruptRV(GetBasicInstructionClass(Inst));
2100 llvm_unreachable("Invalid dependence flavor");
2103 /// FindDependencies - Walk up the CFG from StartPos (which is in StartBB) and
2104 /// find local and non-local dependencies on Arg.
2105 /// TODO: Cache results?
2107 FindDependencies(DependenceKind Flavor,
2109 BasicBlock *StartBB, Instruction *StartInst,
2110 SmallPtrSet<Instruction *, 4> &DependingInstructions,
2111 SmallPtrSet<const BasicBlock *, 4> &Visited,
2112 ProvenanceAnalysis &PA) {
2113 BasicBlock::iterator StartPos = StartInst;
2115 SmallVector<std::pair<BasicBlock *, BasicBlock::iterator>, 4> Worklist;
2116 Worklist.push_back(std::make_pair(StartBB, StartPos));
2118 std::pair<BasicBlock *, BasicBlock::iterator> Pair =
2119 Worklist.pop_back_val();
2120 BasicBlock *LocalStartBB = Pair.first;
2121 BasicBlock::iterator LocalStartPos = Pair.second;
2122 BasicBlock::iterator StartBBBegin = LocalStartBB->begin();
2124 if (LocalStartPos == StartBBBegin) {
2125 pred_iterator PI(LocalStartBB), PE(LocalStartBB, false);
2127 // If we've reached the function entry, produce a null dependence.
2128 DependingInstructions.insert(0);
2130 // Add the predecessors to the worklist.
2132 BasicBlock *PredBB = *PI;
2133 if (Visited.insert(PredBB))
2134 Worklist.push_back(std::make_pair(PredBB, PredBB->end()));
2135 } while (++PI != PE);
2139 Instruction *Inst = --LocalStartPos;
2140 if (Depends(Flavor, Inst, Arg, PA)) {
2141 DependingInstructions.insert(Inst);
2145 } while (!Worklist.empty());
2147 // Determine whether the original StartBB post-dominates all of the blocks we
2148 // visited. If not, insert a sentinal indicating that most optimizations are
2150 for (SmallPtrSet<const BasicBlock *, 4>::const_iterator I = Visited.begin(),
2151 E = Visited.end(); I != E; ++I) {
2152 const BasicBlock *BB = *I;
2155 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2156 for (succ_const_iterator SI(TI), SE(TI, false); SI != SE; ++SI) {
2157 const BasicBlock *Succ = *SI;
2158 if (Succ != StartBB && !Visited.count(Succ)) {
2159 DependingInstructions.insert(reinterpret_cast<Instruction *>(-1));
2166 static bool isNullOrUndef(const Value *V) {
2167 return isa<ConstantPointerNull>(V) || isa<UndefValue>(V);
2170 static bool isNoopInstruction(const Instruction *I) {
2171 return isa<BitCastInst>(I) ||
2172 (isa<GetElementPtrInst>(I) &&
2173 cast<GetElementPtrInst>(I)->hasAllZeroIndices());
2176 /// OptimizeRetainCall - Turn objc_retain into
2177 /// objc_retainAutoreleasedReturnValue if the operand is a return value.
2179 ObjCARCOpt::OptimizeRetainCall(Function &F, Instruction *Retain) {
2180 ImmutableCallSite CS(GetObjCArg(Retain));
2181 const Instruction *Call = CS.getInstruction();
2183 if (Call->getParent() != Retain->getParent()) return;
2185 // Check that the call is next to the retain.
2186 BasicBlock::const_iterator I = Call;
2188 while (isNoopInstruction(I)) ++I;
2192 // Turn it to an objc_retainAutoreleasedReturnValue..
2195 cast<CallInst>(Retain)->setCalledFunction(getRetainRVCallee(F.getParent()));
2198 /// OptimizeRetainRVCall - Turn objc_retainAutoreleasedReturnValue into
2199 /// objc_retain if the operand is not a return value. Or, if it can be paired
2200 /// with an objc_autoreleaseReturnValue, delete the pair and return true.
2202 ObjCARCOpt::OptimizeRetainRVCall(Function &F, Instruction *RetainRV) {
2203 // Check for the argument being from an immediately preceding call or invoke.
2204 const Value *Arg = GetObjCArg(RetainRV);
2205 ImmutableCallSite CS(Arg);
2206 if (const Instruction *Call = CS.getInstruction()) {
2207 if (Call->getParent() == RetainRV->getParent()) {
2208 BasicBlock::const_iterator I = Call;
2210 while (isNoopInstruction(I)) ++I;
2211 if (&*I == RetainRV)
2213 } else if (const InvokeInst *II = dyn_cast<InvokeInst>(Call)) {
2214 BasicBlock *RetainRVParent = RetainRV->getParent();
2215 if (II->getNormalDest() == RetainRVParent) {
2216 BasicBlock::const_iterator I = RetainRVParent->begin();
2217 while (isNoopInstruction(I)) ++I;
2218 if (&*I == RetainRV)
2224 // Check for being preceded by an objc_autoreleaseReturnValue on the same
2225 // pointer. In this case, we can delete the pair.
2226 BasicBlock::iterator I = RetainRV, Begin = RetainRV->getParent()->begin();
2228 do --I; while (I != Begin && isNoopInstruction(I));
2229 if (GetBasicInstructionClass(I) == IC_AutoreleaseRV &&
2230 GetObjCArg(I) == Arg) {
2233 EraseInstruction(I);
2234 EraseInstruction(RetainRV);
2239 // Turn it to a plain objc_retain.
2242 cast<CallInst>(RetainRV)->setCalledFunction(getRetainCallee(F.getParent()));
2246 /// OptimizeAutoreleaseRVCall - Turn objc_autoreleaseReturnValue into
2247 /// objc_autorelease if the result is not used as a return value.
2249 ObjCARCOpt::OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV) {
2250 // Check for a return of the pointer value.
2251 const Value *Ptr = GetObjCArg(AutoreleaseRV);
2252 SmallVector<const Value *, 2> Users;
2253 Users.push_back(Ptr);
2255 Ptr = Users.pop_back_val();
2256 for (Value::const_use_iterator UI = Ptr->use_begin(), UE = Ptr->use_end();
2258 const User *I = *UI;
2259 if (isa<ReturnInst>(I) || GetBasicInstructionClass(I) == IC_RetainRV)
2261 if (isa<BitCastInst>(I))
2264 } while (!Users.empty());
2268 cast<CallInst>(AutoreleaseRV)->
2269 setCalledFunction(getAutoreleaseCallee(F.getParent()));
2272 /// OptimizeIndividualCalls - Visit each call, one at a time, and make
2273 /// simplifications without doing any additional analysis.
2274 void ObjCARCOpt::OptimizeIndividualCalls(Function &F) {
2275 // Reset all the flags in preparation for recomputing them.
2276 UsedInThisFunction = 0;
2278 // Visit all objc_* calls in F.
2279 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
2280 Instruction *Inst = &*I++;
2281 InstructionClass Class = GetBasicInstructionClass(Inst);
2286 // Delete no-op casts. These function calls have special semantics, but
2287 // the semantics are entirely implemented via lowering in the front-end,
2288 // so by the time they reach the optimizer, they are just no-op calls
2289 // which return their argument.
2291 // There are gray areas here, as the ability to cast reference-counted
2292 // pointers to raw void* and back allows code to break ARC assumptions,
2293 // however these are currently considered to be unimportant.
2297 EraseInstruction(Inst);
2300 // If the pointer-to-weak-pointer is null, it's undefined behavior.
2303 case IC_LoadWeakRetained:
2305 case IC_DestroyWeak: {
2306 CallInst *CI = cast<CallInst>(Inst);
2307 if (isNullOrUndef(CI->getArgOperand(0))) {
2309 Type *Ty = CI->getArgOperand(0)->getType();
2310 new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
2311 Constant::getNullValue(Ty),
2313 CI->replaceAllUsesWith(UndefValue::get(CI->getType()));
2314 CI->eraseFromParent();
2321 CallInst *CI = cast<CallInst>(Inst);
2322 if (isNullOrUndef(CI->getArgOperand(0)) ||
2323 isNullOrUndef(CI->getArgOperand(1))) {
2325 Type *Ty = CI->getArgOperand(0)->getType();
2326 new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
2327 Constant::getNullValue(Ty),
2329 CI->replaceAllUsesWith(UndefValue::get(CI->getType()));
2330 CI->eraseFromParent();
2336 OptimizeRetainCall(F, Inst);
2339 if (OptimizeRetainRVCall(F, Inst))
2342 case IC_AutoreleaseRV:
2343 OptimizeAutoreleaseRVCall(F, Inst);
2347 // objc_autorelease(x) -> objc_release(x) if x is otherwise unused.
2348 if (IsAutorelease(Class) && Inst->use_empty()) {
2349 CallInst *Call = cast<CallInst>(Inst);
2350 const Value *Arg = Call->getArgOperand(0);
2351 Arg = FindSingleUseIdentifiedObject(Arg);
2356 // Create the declaration lazily.
2357 LLVMContext &C = Inst->getContext();
2359 CallInst::Create(getReleaseCallee(F.getParent()),
2360 Call->getArgOperand(0), "", Call);
2361 NewCall->setMetadata(ImpreciseReleaseMDKind,
2362 MDNode::get(C, ArrayRef<Value *>()));
2363 EraseInstruction(Call);
2369 // For functions which can never be passed stack arguments, add
2371 if (IsAlwaysTail(Class)) {
2373 cast<CallInst>(Inst)->setTailCall();
2376 // Set nounwind as needed.
2377 if (IsNoThrow(Class)) {
2379 cast<CallInst>(Inst)->setDoesNotThrow();
2382 if (!IsNoopOnNull(Class)) {
2383 UsedInThisFunction |= 1 << Class;
2387 const Value *Arg = GetObjCArg(Inst);
2389 // ARC calls with null are no-ops. Delete them.
2390 if (isNullOrUndef(Arg)) {
2393 EraseInstruction(Inst);
2397 // Keep track of which of retain, release, autorelease, and retain_block
2398 // are actually present in this function.
2399 UsedInThisFunction |= 1 << Class;
2401 // If Arg is a PHI, and one or more incoming values to the
2402 // PHI are null, and the call is control-equivalent to the PHI, and there
2403 // are no relevant side effects between the PHI and the call, the call
2404 // could be pushed up to just those paths with non-null incoming values.
2405 // For now, don't bother splitting critical edges for this.
2406 SmallVector<std::pair<Instruction *, const Value *>, 4> Worklist;
2407 Worklist.push_back(std::make_pair(Inst, Arg));
2409 std::pair<Instruction *, const Value *> Pair = Worklist.pop_back_val();
2413 const PHINode *PN = dyn_cast<PHINode>(Arg);
2416 // Determine if the PHI has any null operands, or any incoming
2418 bool HasNull = false;
2419 bool HasCriticalEdges = false;
2420 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
2422 StripPointerCastsAndObjCCalls(PN->getIncomingValue(i));
2423 if (isNullOrUndef(Incoming))
2425 else if (cast<TerminatorInst>(PN->getIncomingBlock(i)->back())
2426 .getNumSuccessors() != 1) {
2427 HasCriticalEdges = true;
2431 // If we have null operands and no critical edges, optimize.
2432 if (!HasCriticalEdges && HasNull) {
2433 SmallPtrSet<Instruction *, 4> DependingInstructions;
2434 SmallPtrSet<const BasicBlock *, 4> Visited;
2436 // Check that there is nothing that cares about the reference
2437 // count between the call and the phi.
2440 case IC_RetainBlock:
2441 // These can always be moved up.
2444 // These can't be moved across things that care about the retain
2446 FindDependencies(NeedsPositiveRetainCount, Arg,
2447 Inst->getParent(), Inst,
2448 DependingInstructions, Visited, PA);
2450 case IC_Autorelease:
2451 // These can't be moved across autorelease pool scope boundaries.
2452 FindDependencies(AutoreleasePoolBoundary, Arg,
2453 Inst->getParent(), Inst,
2454 DependingInstructions, Visited, PA);
2457 case IC_AutoreleaseRV:
2458 // Don't move these; the RV optimization depends on the autoreleaseRV
2459 // being tail called, and the retainRV being immediately after a call
2460 // (which might still happen if we get lucky with codegen layout, but
2461 // it's not worth taking the chance).
2464 llvm_unreachable("Invalid dependence flavor");
2467 if (DependingInstructions.size() == 1 &&
2468 *DependingInstructions.begin() == PN) {
2471 // Clone the call into each predecessor that has a non-null value.
2472 CallInst *CInst = cast<CallInst>(Inst);
2473 Type *ParamTy = CInst->getArgOperand(0)->getType();
2474 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
2476 StripPointerCastsAndObjCCalls(PN->getIncomingValue(i));
2477 if (!isNullOrUndef(Incoming)) {
2478 CallInst *Clone = cast<CallInst>(CInst->clone());
2479 Value *Op = PN->getIncomingValue(i);
2480 Instruction *InsertPos = &PN->getIncomingBlock(i)->back();
2481 if (Op->getType() != ParamTy)
2482 Op = new BitCastInst(Op, ParamTy, "", InsertPos);
2483 Clone->setArgOperand(0, Op);
2484 Clone->insertBefore(InsertPos);
2485 Worklist.push_back(std::make_pair(Clone, Incoming));
2488 // Erase the original call.
2489 EraseInstruction(CInst);
2493 } while (!Worklist.empty());
2497 /// CheckForCFGHazards - Check for critical edges, loop boundaries, irreducible
2498 /// control flow, or other CFG structures where moving code across the edge
2499 /// would result in it being executed more.
2501 ObjCARCOpt::CheckForCFGHazards(const BasicBlock *BB,
2502 DenseMap<const BasicBlock *, BBState> &BBStates,
2503 BBState &MyStates) const {
2504 // If any top-down local-use or possible-dec has a succ which is earlier in
2505 // the sequence, forget it.
2506 for (BBState::ptr_iterator I = MyStates.top_down_ptr_begin(),
2507 E = MyStates.top_down_ptr_end(); I != E; ++I)
2508 switch (I->second.GetSeq()) {
2511 const Value *Arg = I->first;
2512 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2513 bool SomeSuccHasSame = false;
2514 bool AllSuccsHaveSame = true;
2515 PtrState &S = I->second;
2516 succ_const_iterator SI(TI), SE(TI, false);
2518 // If the terminator is an invoke marked with the
2519 // clang.arc.no_objc_arc_exceptions metadata, the unwind edge can be
2520 // ignored, for ARC purposes.
2521 if (isa<InvokeInst>(TI) && TI->getMetadata(NoObjCARCExceptionsMDKind))
2524 for (; SI != SE; ++SI) {
2525 Sequence SuccSSeq = S_None;
2526 bool SuccSRRIKnownSafe = false;
2527 // If VisitBottomUp has pointer information for this successor, take
2528 // what we know about it.
2529 DenseMap<const BasicBlock *, BBState>::iterator BBI =
2531 assert(BBI != BBStates.end());
2532 const PtrState &SuccS = BBI->second.getPtrBottomUpState(Arg);
2533 SuccSSeq = SuccS.GetSeq();
2534 SuccSRRIKnownSafe = SuccS.RRI.KnownSafe;
2537 case S_CanRelease: {
2538 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe) {
2539 S.ClearSequenceProgress();
2545 SomeSuccHasSame = true;
2549 case S_MovableRelease:
2550 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe)
2551 AllSuccsHaveSame = false;
2554 llvm_unreachable("bottom-up pointer in retain state!");
2557 // If the state at the other end of any of the successor edges
2558 // matches the current state, require all edges to match. This
2559 // guards against loops in the middle of a sequence.
2560 if (SomeSuccHasSame && !AllSuccsHaveSame)
2561 S.ClearSequenceProgress();
2564 case S_CanRelease: {
2565 const Value *Arg = I->first;
2566 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2567 bool SomeSuccHasSame = false;
2568 bool AllSuccsHaveSame = true;
2569 PtrState &S = I->second;
2570 succ_const_iterator SI(TI), SE(TI, false);
2572 // If the terminator is an invoke marked with the
2573 // clang.arc.no_objc_arc_exceptions metadata, the unwind edge can be
2574 // ignored, for ARC purposes.
2575 if (isa<InvokeInst>(TI) && TI->getMetadata(NoObjCARCExceptionsMDKind))
2578 for (; SI != SE; ++SI) {
2579 Sequence SuccSSeq = S_None;
2580 bool SuccSRRIKnownSafe = false;
2581 // If VisitBottomUp has pointer information for this successor, take
2582 // what we know about it.
2583 DenseMap<const BasicBlock *, BBState>::iterator BBI =
2585 assert(BBI != BBStates.end());
2586 const PtrState &SuccS = BBI->second.getPtrBottomUpState(Arg);
2587 SuccSSeq = SuccS.GetSeq();
2588 SuccSRRIKnownSafe = SuccS.RRI.KnownSafe;
2591 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe) {
2592 S.ClearSequenceProgress();
2598 SomeSuccHasSame = true;
2602 case S_MovableRelease:
2604 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe)
2605 AllSuccsHaveSame = false;
2608 llvm_unreachable("bottom-up pointer in retain state!");
2611 // If the state at the other end of any of the successor edges
2612 // matches the current state, require all edges to match. This
2613 // guards against loops in the middle of a sequence.
2614 if (SomeSuccHasSame && !AllSuccsHaveSame)
2615 S.ClearSequenceProgress();
2622 ObjCARCOpt::VisitInstructionBottomUp(Instruction *Inst,
2624 MapVector<Value *, RRInfo> &Retains,
2625 BBState &MyStates) {
2626 bool NestingDetected = false;
2627 InstructionClass Class = GetInstructionClass(Inst);
2628 const Value *Arg = 0;
2632 Arg = GetObjCArg(Inst);
2634 PtrState &S = MyStates.getPtrBottomUpState(Arg);
2636 // If we see two releases in a row on the same pointer. If so, make
2637 // a note, and we'll cicle back to revisit it after we've
2638 // hopefully eliminated the second release, which may allow us to
2639 // eliminate the first release too.
2640 // Theoretically we could implement removal of nested retain+release
2641 // pairs by making PtrState hold a stack of states, but this is
2642 // simple and avoids adding overhead for the non-nested case.
2643 if (S.GetSeq() == S_Release || S.GetSeq() == S_MovableRelease)
2644 NestingDetected = true;
2646 MDNode *ReleaseMetadata = Inst->getMetadata(ImpreciseReleaseMDKind);
2647 S.ResetSequenceProgress(ReleaseMetadata ? S_MovableRelease : S_Release);
2648 S.RRI.ReleaseMetadata = ReleaseMetadata;
2649 S.RRI.KnownSafe = S.IsKnownIncremented();
2650 S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall();
2651 S.RRI.Calls.insert(Inst);
2653 S.SetKnownPositiveRefCount();
2656 case IC_RetainBlock:
2657 // An objc_retainBlock call with just a use may need to be kept,
2658 // because it may be copying a block from the stack to the heap.
2659 if (!IsRetainBlockOptimizable(Inst))
2664 Arg = GetObjCArg(Inst);
2666 PtrState &S = MyStates.getPtrBottomUpState(Arg);
2667 S.SetKnownPositiveRefCount();
2669 switch (S.GetSeq()) {
2672 case S_MovableRelease:
2674 S.RRI.ReverseInsertPts.clear();
2677 // Don't do retain+release tracking for IC_RetainRV, because it's
2678 // better to let it remain as the first instruction after a call.
2679 if (Class != IC_RetainRV) {
2680 S.RRI.IsRetainBlock = Class == IC_RetainBlock;
2681 Retains[Inst] = S.RRI;
2683 S.ClearSequenceProgress();
2688 llvm_unreachable("bottom-up pointer in retain state!");
2690 return NestingDetected;
2692 case IC_AutoreleasepoolPop:
2693 // Conservatively, clear MyStates for all known pointers.
2694 MyStates.clearBottomUpPointers();
2695 return NestingDetected;
2696 case IC_AutoreleasepoolPush:
2698 // These are irrelevant.
2699 return NestingDetected;
2704 // Consider any other possible effects of this instruction on each
2705 // pointer being tracked.
2706 for (BBState::ptr_iterator MI = MyStates.bottom_up_ptr_begin(),
2707 ME = MyStates.bottom_up_ptr_end(); MI != ME; ++MI) {
2708 const Value *Ptr = MI->first;
2710 continue; // Handled above.
2711 PtrState &S = MI->second;
2712 Sequence Seq = S.GetSeq();
2714 // Check for possible releases.
2715 if (CanAlterRefCount(Inst, Ptr, PA, Class)) {
2719 S.SetSeq(S_CanRelease);
2723 case S_MovableRelease:
2728 llvm_unreachable("bottom-up pointer in retain state!");
2732 // Check for possible direct uses.
2735 case S_MovableRelease:
2736 if (CanUse(Inst, Ptr, PA, Class)) {
2737 assert(S.RRI.ReverseInsertPts.empty());
2738 // If this is an invoke instruction, we're scanning it as part of
2739 // one of its successor blocks, since we can't insert code after it
2740 // in its own block, and we don't want to split critical edges.
2741 if (isa<InvokeInst>(Inst))
2742 S.RRI.ReverseInsertPts.insert(BB->getFirstInsertionPt());
2744 S.RRI.ReverseInsertPts.insert(llvm::next(BasicBlock::iterator(Inst)));
2746 } else if (Seq == S_Release &&
2747 (Class == IC_User || Class == IC_CallOrUser)) {
2748 // Non-movable releases depend on any possible objc pointer use.
2750 assert(S.RRI.ReverseInsertPts.empty());
2751 // As above; handle invoke specially.
2752 if (isa<InvokeInst>(Inst))
2753 S.RRI.ReverseInsertPts.insert(BB->getFirstInsertionPt());
2755 S.RRI.ReverseInsertPts.insert(llvm::next(BasicBlock::iterator(Inst)));
2759 if (CanUse(Inst, Ptr, PA, Class))
2767 llvm_unreachable("bottom-up pointer in retain state!");
2771 return NestingDetected;
2775 ObjCARCOpt::VisitBottomUp(BasicBlock *BB,
2776 DenseMap<const BasicBlock *, BBState> &BBStates,
2777 MapVector<Value *, RRInfo> &Retains) {
2778 bool NestingDetected = false;
2779 BBState &MyStates = BBStates[BB];
2781 // Merge the states from each successor to compute the initial state
2782 // for the current block.
2783 BBState::edge_iterator SI(MyStates.succ_begin()),
2784 SE(MyStates.succ_end());
2786 const BasicBlock *Succ = *SI;
2787 DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Succ);
2788 assert(I != BBStates.end());
2789 MyStates.InitFromSucc(I->second);
2791 for (; SI != SE; ++SI) {
2793 I = BBStates.find(Succ);
2794 assert(I != BBStates.end());
2795 MyStates.MergeSucc(I->second);
2799 // Visit all the instructions, bottom-up.
2800 for (BasicBlock::iterator I = BB->end(), E = BB->begin(); I != E; --I) {
2801 Instruction *Inst = llvm::prior(I);
2803 // Invoke instructions are visited as part of their successors (below).
2804 if (isa<InvokeInst>(Inst))
2807 NestingDetected |= VisitInstructionBottomUp(Inst, BB, Retains, MyStates);
2810 // If there's a predecessor with an invoke, visit the invoke as if it were
2811 // part of this block, since we can't insert code after an invoke in its own
2812 // block, and we don't want to split critical edges.
2813 for (BBState::edge_iterator PI(MyStates.pred_begin()),
2814 PE(MyStates.pred_end()); PI != PE; ++PI) {
2815 BasicBlock *Pred = *PI;
2816 if (InvokeInst *II = dyn_cast<InvokeInst>(&Pred->back()))
2817 NestingDetected |= VisitInstructionBottomUp(II, BB, Retains, MyStates);
2820 return NestingDetected;
2824 ObjCARCOpt::VisitInstructionTopDown(Instruction *Inst,
2825 DenseMap<Value *, RRInfo> &Releases,
2826 BBState &MyStates) {
2827 bool NestingDetected = false;
2828 InstructionClass Class = GetInstructionClass(Inst);
2829 const Value *Arg = 0;
2832 case IC_RetainBlock:
2833 // An objc_retainBlock call with just a use may need to be kept,
2834 // because it may be copying a block from the stack to the heap.
2835 if (!IsRetainBlockOptimizable(Inst))
2840 Arg = GetObjCArg(Inst);
2842 PtrState &S = MyStates.getPtrTopDownState(Arg);
2844 // Don't do retain+release tracking for IC_RetainRV, because it's
2845 // better to let it remain as the first instruction after a call.
2846 if (Class != IC_RetainRV) {
2847 // If we see two retains in a row on the same pointer. If so, make
2848 // a note, and we'll cicle back to revisit it after we've
2849 // hopefully eliminated the second retain, which may allow us to
2850 // eliminate the first retain too.
2851 // Theoretically we could implement removal of nested retain+release
2852 // pairs by making PtrState hold a stack of states, but this is
2853 // simple and avoids adding overhead for the non-nested case.
2854 if (S.GetSeq() == S_Retain)
2855 NestingDetected = true;
2857 S.ResetSequenceProgress(S_Retain);
2858 S.RRI.IsRetainBlock = Class == IC_RetainBlock;
2859 S.RRI.KnownSafe = S.IsKnownIncremented();
2860 S.RRI.Calls.insert(Inst);
2863 S.SetKnownPositiveRefCount();
2865 // A retain can be a potential use; procede to the generic checking
2870 Arg = GetObjCArg(Inst);
2872 PtrState &S = MyStates.getPtrTopDownState(Arg);
2875 switch (S.GetSeq()) {
2878 S.RRI.ReverseInsertPts.clear();
2881 S.RRI.ReleaseMetadata = Inst->getMetadata(ImpreciseReleaseMDKind);
2882 S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall();
2883 Releases[Inst] = S.RRI;
2884 S.ClearSequenceProgress();
2890 case S_MovableRelease:
2891 llvm_unreachable("top-down pointer in release state!");
2895 case IC_AutoreleasepoolPop:
2896 // Conservatively, clear MyStates for all known pointers.
2897 MyStates.clearTopDownPointers();
2898 return NestingDetected;
2899 case IC_AutoreleasepoolPush:
2901 // These are irrelevant.
2902 return NestingDetected;
2907 // Consider any other possible effects of this instruction on each
2908 // pointer being tracked.
2909 for (BBState::ptr_iterator MI = MyStates.top_down_ptr_begin(),
2910 ME = MyStates.top_down_ptr_end(); MI != ME; ++MI) {
2911 const Value *Ptr = MI->first;
2913 continue; // Handled above.
2914 PtrState &S = MI->second;
2915 Sequence Seq = S.GetSeq();
2917 // Check for possible releases.
2918 if (CanAlterRefCount(Inst, Ptr, PA, Class)) {
2922 S.SetSeq(S_CanRelease);
2923 assert(S.RRI.ReverseInsertPts.empty());
2924 S.RRI.ReverseInsertPts.insert(Inst);
2926 // One call can't cause a transition from S_Retain to S_CanRelease
2927 // and S_CanRelease to S_Use. If we've made the first transition,
2936 case S_MovableRelease:
2937 llvm_unreachable("top-down pointer in release state!");
2941 // Check for possible direct uses.
2944 if (CanUse(Inst, Ptr, PA, Class))
2953 case S_MovableRelease:
2954 llvm_unreachable("top-down pointer in release state!");
2958 return NestingDetected;
2962 ObjCARCOpt::VisitTopDown(BasicBlock *BB,
2963 DenseMap<const BasicBlock *, BBState> &BBStates,
2964 DenseMap<Value *, RRInfo> &Releases) {
2965 bool NestingDetected = false;
2966 BBState &MyStates = BBStates[BB];
2968 // Merge the states from each predecessor to compute the initial state
2969 // for the current block.
2970 BBState::edge_iterator PI(MyStates.pred_begin()),
2971 PE(MyStates.pred_end());
2973 const BasicBlock *Pred = *PI;
2974 DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Pred);
2975 assert(I != BBStates.end());
2976 MyStates.InitFromPred(I->second);
2978 for (; PI != PE; ++PI) {
2980 I = BBStates.find(Pred);
2981 assert(I != BBStates.end());
2982 MyStates.MergePred(I->second);
2986 // Visit all the instructions, top-down.
2987 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
2988 Instruction *Inst = I;
2989 NestingDetected |= VisitInstructionTopDown(Inst, Releases, MyStates);
2992 CheckForCFGHazards(BB, BBStates, MyStates);
2993 return NestingDetected;
2997 ComputePostOrders(Function &F,
2998 SmallVectorImpl<BasicBlock *> &PostOrder,
2999 SmallVectorImpl<BasicBlock *> &ReverseCFGPostOrder,
3000 unsigned NoObjCARCExceptionsMDKind,
3001 DenseMap<const BasicBlock *, BBState> &BBStates) {
3002 /// Visited - The visited set, for doing DFS walks.
3003 SmallPtrSet<BasicBlock *, 16> Visited;
3005 // Do DFS, computing the PostOrder.
3006 SmallPtrSet<BasicBlock *, 16> OnStack;
3007 SmallVector<std::pair<BasicBlock *, succ_iterator>, 16> SuccStack;
3009 // Functions always have exactly one entry block, and we don't have
3010 // any other block that we treat like an entry block.
3011 BasicBlock *EntryBB = &F.getEntryBlock();
3012 BBState &MyStates = BBStates[EntryBB];
3013 MyStates.SetAsEntry();
3014 TerminatorInst *EntryTI = cast<TerminatorInst>(&EntryBB->back());
3015 SuccStack.push_back(std::make_pair(EntryBB, succ_iterator(EntryTI)));
3016 Visited.insert(EntryBB);
3017 OnStack.insert(EntryBB);
3020 BasicBlock *CurrBB = SuccStack.back().first;
3021 TerminatorInst *TI = cast<TerminatorInst>(&CurrBB->back());
3022 succ_iterator SE(TI, false);
3024 // If the terminator is an invoke marked with the
3025 // clang.arc.no_objc_arc_exceptions metadata, the unwind edge can be
3026 // ignored, for ARC purposes.
3027 if (isa<InvokeInst>(TI) && TI->getMetadata(NoObjCARCExceptionsMDKind))
3030 while (SuccStack.back().second != SE) {
3031 BasicBlock *SuccBB = *SuccStack.back().second++;
3032 if (Visited.insert(SuccBB)) {
3033 TerminatorInst *TI = cast<TerminatorInst>(&SuccBB->back());
3034 SuccStack.push_back(std::make_pair(SuccBB, succ_iterator(TI)));
3035 BBStates[CurrBB].addSucc(SuccBB);
3036 BBState &SuccStates = BBStates[SuccBB];
3037 SuccStates.addPred(CurrBB);
3038 OnStack.insert(SuccBB);
3042 if (!OnStack.count(SuccBB)) {
3043 BBStates[CurrBB].addSucc(SuccBB);
3044 BBStates[SuccBB].addPred(CurrBB);
3047 OnStack.erase(CurrBB);
3048 PostOrder.push_back(CurrBB);
3049 SuccStack.pop_back();
3050 } while (!SuccStack.empty());
3054 // Do reverse-CFG DFS, computing the reverse-CFG PostOrder.
3055 // Functions may have many exits, and there also blocks which we treat
3056 // as exits due to ignored edges.
3057 SmallVector<std::pair<BasicBlock *, BBState::edge_iterator>, 16> PredStack;
3058 for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I) {
3059 BasicBlock *ExitBB = I;
3060 BBState &MyStates = BBStates[ExitBB];
3061 if (!MyStates.isExit())
3064 MyStates.SetAsExit();
3066 PredStack.push_back(std::make_pair(ExitBB, MyStates.pred_begin()));
3067 Visited.insert(ExitBB);
3068 while (!PredStack.empty()) {
3069 reverse_dfs_next_succ:
3070 BBState::edge_iterator PE = BBStates[PredStack.back().first].pred_end();
3071 while (PredStack.back().second != PE) {
3072 BasicBlock *BB = *PredStack.back().second++;
3073 if (Visited.insert(BB)) {
3074 PredStack.push_back(std::make_pair(BB, BBStates[BB].pred_begin()));
3075 goto reverse_dfs_next_succ;
3078 ReverseCFGPostOrder.push_back(PredStack.pop_back_val().first);
3083 // Visit - Visit the function both top-down and bottom-up.
3085 ObjCARCOpt::Visit(Function &F,
3086 DenseMap<const BasicBlock *, BBState> &BBStates,
3087 MapVector<Value *, RRInfo> &Retains,
3088 DenseMap<Value *, RRInfo> &Releases) {
3090 // Use reverse-postorder traversals, because we magically know that loops
3091 // will be well behaved, i.e. they won't repeatedly call retain on a single
3092 // pointer without doing a release. We can't use the ReversePostOrderTraversal
3093 // class here because we want the reverse-CFG postorder to consider each
3094 // function exit point, and we want to ignore selected cycle edges.
3095 SmallVector<BasicBlock *, 16> PostOrder;
3096 SmallVector<BasicBlock *, 16> ReverseCFGPostOrder;
3097 ComputePostOrders(F, PostOrder, ReverseCFGPostOrder,
3098 NoObjCARCExceptionsMDKind,
3101 // Use reverse-postorder on the reverse CFG for bottom-up.
3102 bool BottomUpNestingDetected = false;
3103 for (SmallVectorImpl<BasicBlock *>::const_reverse_iterator I =
3104 ReverseCFGPostOrder.rbegin(), E = ReverseCFGPostOrder.rend();
3106 BottomUpNestingDetected |= VisitBottomUp(*I, BBStates, Retains);
3108 // Use reverse-postorder for top-down.
3109 bool TopDownNestingDetected = false;
3110 for (SmallVectorImpl<BasicBlock *>::const_reverse_iterator I =
3111 PostOrder.rbegin(), E = PostOrder.rend();
3113 TopDownNestingDetected |= VisitTopDown(*I, BBStates, Releases);
3115 return TopDownNestingDetected && BottomUpNestingDetected;
3118 /// MoveCalls - Move the calls in RetainsToMove and ReleasesToMove.
3119 void ObjCARCOpt::MoveCalls(Value *Arg,
3120 RRInfo &RetainsToMove,
3121 RRInfo &ReleasesToMove,
3122 MapVector<Value *, RRInfo> &Retains,
3123 DenseMap<Value *, RRInfo> &Releases,
3124 SmallVectorImpl<Instruction *> &DeadInsts,
3126 Type *ArgTy = Arg->getType();
3127 Type *ParamTy = PointerType::getUnqual(Type::getInt8Ty(ArgTy->getContext()));
3129 // Insert the new retain and release calls.
3130 for (SmallPtrSet<Instruction *, 2>::const_iterator
3131 PI = ReleasesToMove.ReverseInsertPts.begin(),
3132 PE = ReleasesToMove.ReverseInsertPts.end(); PI != PE; ++PI) {
3133 Instruction *InsertPt = *PI;
3134 Value *MyArg = ArgTy == ParamTy ? Arg :
3135 new BitCastInst(Arg, ParamTy, "", InsertPt);
3137 CallInst::Create(RetainsToMove.IsRetainBlock ?
3138 getRetainBlockCallee(M) : getRetainCallee(M),
3139 MyArg, "", InsertPt);
3140 Call->setDoesNotThrow();
3141 if (RetainsToMove.IsRetainBlock)
3142 Call->setMetadata(CopyOnEscapeMDKind,
3143 MDNode::get(M->getContext(), ArrayRef<Value *>()));
3145 Call->setTailCall();
3147 for (SmallPtrSet<Instruction *, 2>::const_iterator
3148 PI = RetainsToMove.ReverseInsertPts.begin(),
3149 PE = RetainsToMove.ReverseInsertPts.end(); PI != PE; ++PI) {
3150 Instruction *InsertPt = *PI;
3151 Value *MyArg = ArgTy == ParamTy ? Arg :
3152 new BitCastInst(Arg, ParamTy, "", InsertPt);
3153 CallInst *Call = CallInst::Create(getReleaseCallee(M), MyArg,
3155 // Attach a clang.imprecise_release metadata tag, if appropriate.
3156 if (MDNode *M = ReleasesToMove.ReleaseMetadata)
3157 Call->setMetadata(ImpreciseReleaseMDKind, M);
3158 Call->setDoesNotThrow();
3159 if (ReleasesToMove.IsTailCallRelease)
3160 Call->setTailCall();
3163 // Delete the original retain and release calls.
3164 for (SmallPtrSet<Instruction *, 2>::const_iterator
3165 AI = RetainsToMove.Calls.begin(),
3166 AE = RetainsToMove.Calls.end(); AI != AE; ++AI) {
3167 Instruction *OrigRetain = *AI;
3168 Retains.blot(OrigRetain);
3169 DeadInsts.push_back(OrigRetain);
3171 for (SmallPtrSet<Instruction *, 2>::const_iterator
3172 AI = ReleasesToMove.Calls.begin(),
3173 AE = ReleasesToMove.Calls.end(); AI != AE; ++AI) {
3174 Instruction *OrigRelease = *AI;
3175 Releases.erase(OrigRelease);
3176 DeadInsts.push_back(OrigRelease);
3180 /// PerformCodePlacement - Identify pairings between the retains and releases,
3181 /// and delete and/or move them.
3183 ObjCARCOpt::PerformCodePlacement(DenseMap<const BasicBlock *, BBState>
3185 MapVector<Value *, RRInfo> &Retains,
3186 DenseMap<Value *, RRInfo> &Releases,
3188 bool AnyPairsCompletelyEliminated = false;
3189 RRInfo RetainsToMove;
3190 RRInfo ReleasesToMove;
3191 SmallVector<Instruction *, 4> NewRetains;
3192 SmallVector<Instruction *, 4> NewReleases;
3193 SmallVector<Instruction *, 8> DeadInsts;
3195 // Visit each retain.
3196 for (MapVector<Value *, RRInfo>::const_iterator I = Retains.begin(),
3197 E = Retains.end(); I != E; ++I) {
3198 Value *V = I->first;
3199 if (!V) continue; // blotted
3201 Instruction *Retain = cast<Instruction>(V);
3202 Value *Arg = GetObjCArg(Retain);
3204 // If the object being released is in static or stack storage, we know it's
3205 // not being managed by ObjC reference counting, so we can delete pairs
3206 // regardless of what possible decrements or uses lie between them.
3207 bool KnownSafe = isa<Constant>(Arg) || isa<AllocaInst>(Arg);
3209 // A constant pointer can't be pointing to an object on the heap. It may
3210 // be reference-counted, but it won't be deleted.
3211 if (const LoadInst *LI = dyn_cast<LoadInst>(Arg))
3212 if (const GlobalVariable *GV =
3213 dyn_cast<GlobalVariable>(
3214 StripPointerCastsAndObjCCalls(LI->getPointerOperand())))
3215 if (GV->isConstant())
3218 // If a pair happens in a region where it is known that the reference count
3219 // is already incremented, we can similarly ignore possible decrements.
3220 bool KnownSafeTD = true, KnownSafeBU = true;
3222 // Connect the dots between the top-down-collected RetainsToMove and
3223 // bottom-up-collected ReleasesToMove to form sets of related calls.
3224 // This is an iterative process so that we connect multiple releases
3225 // to multiple retains if needed.
3226 unsigned OldDelta = 0;
3227 unsigned NewDelta = 0;
3228 unsigned OldCount = 0;
3229 unsigned NewCount = 0;
3230 bool FirstRelease = true;
3231 bool FirstRetain = true;
3232 NewRetains.push_back(Retain);
3234 for (SmallVectorImpl<Instruction *>::const_iterator
3235 NI = NewRetains.begin(), NE = NewRetains.end(); NI != NE; ++NI) {
3236 Instruction *NewRetain = *NI;
3237 MapVector<Value *, RRInfo>::const_iterator It = Retains.find(NewRetain);
3238 assert(It != Retains.end());
3239 const RRInfo &NewRetainRRI = It->second;
3240 KnownSafeTD &= NewRetainRRI.KnownSafe;
3241 for (SmallPtrSet<Instruction *, 2>::const_iterator
3242 LI = NewRetainRRI.Calls.begin(),
3243 LE = NewRetainRRI.Calls.end(); LI != LE; ++LI) {
3244 Instruction *NewRetainRelease = *LI;
3245 DenseMap<Value *, RRInfo>::const_iterator Jt =
3246 Releases.find(NewRetainRelease);
3247 if (Jt == Releases.end())
3249 const RRInfo &NewRetainReleaseRRI = Jt->second;
3250 assert(NewRetainReleaseRRI.Calls.count(NewRetain));
3251 if (ReleasesToMove.Calls.insert(NewRetainRelease)) {
3253 BBStates[NewRetainRelease->getParent()].GetAllPathCount();
3255 // Merge the ReleaseMetadata and IsTailCallRelease values.
3257 ReleasesToMove.ReleaseMetadata =
3258 NewRetainReleaseRRI.ReleaseMetadata;
3259 ReleasesToMove.IsTailCallRelease =
3260 NewRetainReleaseRRI.IsTailCallRelease;
3261 FirstRelease = false;
3263 if (ReleasesToMove.ReleaseMetadata !=
3264 NewRetainReleaseRRI.ReleaseMetadata)
3265 ReleasesToMove.ReleaseMetadata = 0;
3266 if (ReleasesToMove.IsTailCallRelease !=
3267 NewRetainReleaseRRI.IsTailCallRelease)
3268 ReleasesToMove.IsTailCallRelease = false;
3271 // Collect the optimal insertion points.
3273 for (SmallPtrSet<Instruction *, 2>::const_iterator
3274 RI = NewRetainReleaseRRI.ReverseInsertPts.begin(),
3275 RE = NewRetainReleaseRRI.ReverseInsertPts.end();
3277 Instruction *RIP = *RI;
3278 if (ReleasesToMove.ReverseInsertPts.insert(RIP))
3279 NewDelta -= BBStates[RIP->getParent()].GetAllPathCount();
3281 NewReleases.push_back(NewRetainRelease);
3286 if (NewReleases.empty()) break;
3288 // Back the other way.
3289 for (SmallVectorImpl<Instruction *>::const_iterator
3290 NI = NewReleases.begin(), NE = NewReleases.end(); NI != NE; ++NI) {
3291 Instruction *NewRelease = *NI;
3292 DenseMap<Value *, RRInfo>::const_iterator It =
3293 Releases.find(NewRelease);
3294 assert(It != Releases.end());
3295 const RRInfo &NewReleaseRRI = It->second;
3296 KnownSafeBU &= NewReleaseRRI.KnownSafe;
3297 for (SmallPtrSet<Instruction *, 2>::const_iterator
3298 LI = NewReleaseRRI.Calls.begin(),
3299 LE = NewReleaseRRI.Calls.end(); LI != LE; ++LI) {
3300 Instruction *NewReleaseRetain = *LI;
3301 MapVector<Value *, RRInfo>::const_iterator Jt =
3302 Retains.find(NewReleaseRetain);
3303 if (Jt == Retains.end())
3305 const RRInfo &NewReleaseRetainRRI = Jt->second;
3306 assert(NewReleaseRetainRRI.Calls.count(NewRelease));
3307 if (RetainsToMove.Calls.insert(NewReleaseRetain)) {
3308 unsigned PathCount =
3309 BBStates[NewReleaseRetain->getParent()].GetAllPathCount();
3310 OldDelta += PathCount;
3311 OldCount += PathCount;
3313 // Merge the IsRetainBlock values.
3315 RetainsToMove.IsRetainBlock = NewReleaseRetainRRI.IsRetainBlock;
3316 FirstRetain = false;
3317 } else if (ReleasesToMove.IsRetainBlock !=
3318 NewReleaseRetainRRI.IsRetainBlock)
3319 // It's not possible to merge the sequences if one uses
3320 // objc_retain and the other uses objc_retainBlock.
3323 // Collect the optimal insertion points.
3325 for (SmallPtrSet<Instruction *, 2>::const_iterator
3326 RI = NewReleaseRetainRRI.ReverseInsertPts.begin(),
3327 RE = NewReleaseRetainRRI.ReverseInsertPts.end();
3329 Instruction *RIP = *RI;
3330 if (RetainsToMove.ReverseInsertPts.insert(RIP)) {
3331 PathCount = BBStates[RIP->getParent()].GetAllPathCount();
3332 NewDelta += PathCount;
3333 NewCount += PathCount;
3336 NewRetains.push_back(NewReleaseRetain);
3340 NewReleases.clear();
3341 if (NewRetains.empty()) break;
3344 // If the pointer is known incremented or nested, we can safely delete the
3345 // pair regardless of what's between them.
3346 if (KnownSafeTD || KnownSafeBU) {
3347 RetainsToMove.ReverseInsertPts.clear();
3348 ReleasesToMove.ReverseInsertPts.clear();
3351 // Determine whether the new insertion points we computed preserve the
3352 // balance of retain and release calls through the program.
3353 // TODO: If the fully aggressive solution isn't valid, try to find a
3354 // less aggressive solution which is.
3359 // Determine whether the original call points are balanced in the retain and
3360 // release calls through the program. If not, conservatively don't touch
3362 // TODO: It's theoretically possible to do code motion in this case, as
3363 // long as the existing imbalances are maintained.
3367 // Ok, everything checks out and we're all set. Let's move some code!
3369 assert(OldCount != 0 && "Unreachable code?");
3370 AnyPairsCompletelyEliminated = NewCount == 0;
3371 NumRRs += OldCount - NewCount;
3372 MoveCalls(Arg, RetainsToMove, ReleasesToMove,
3373 Retains, Releases, DeadInsts, M);
3376 NewReleases.clear();
3378 RetainsToMove.clear();
3379 ReleasesToMove.clear();
3382 // Now that we're done moving everything, we can delete the newly dead
3383 // instructions, as we no longer need them as insert points.
3384 while (!DeadInsts.empty())
3385 EraseInstruction(DeadInsts.pop_back_val());
3387 return AnyPairsCompletelyEliminated;
3390 /// OptimizeWeakCalls - Weak pointer optimizations.
3391 void ObjCARCOpt::OptimizeWeakCalls(Function &F) {
3392 // First, do memdep-style RLE and S2L optimizations. We can't use memdep
3393 // itself because it uses AliasAnalysis and we need to do provenance
3395 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
3396 Instruction *Inst = &*I++;
3397 InstructionClass Class = GetBasicInstructionClass(Inst);
3398 if (Class != IC_LoadWeak && Class != IC_LoadWeakRetained)
3401 // Delete objc_loadWeak calls with no users.
3402 if (Class == IC_LoadWeak && Inst->use_empty()) {
3403 Inst->eraseFromParent();
3407 // TODO: For now, just look for an earlier available version of this value
3408 // within the same block. Theoretically, we could do memdep-style non-local
3409 // analysis too, but that would want caching. A better approach would be to
3410 // use the technique that EarlyCSE uses.
3411 inst_iterator Current = llvm::prior(I);
3412 BasicBlock *CurrentBB = Current.getBasicBlockIterator();
3413 for (BasicBlock::iterator B = CurrentBB->begin(),
3414 J = Current.getInstructionIterator();
3416 Instruction *EarlierInst = &*llvm::prior(J);
3417 InstructionClass EarlierClass = GetInstructionClass(EarlierInst);
3418 switch (EarlierClass) {
3420 case IC_LoadWeakRetained: {
3421 // If this is loading from the same pointer, replace this load's value
3423 CallInst *Call = cast<CallInst>(Inst);
3424 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
3425 Value *Arg = Call->getArgOperand(0);
3426 Value *EarlierArg = EarlierCall->getArgOperand(0);
3427 switch (PA.getAA()->alias(Arg, EarlierArg)) {
3428 case AliasAnalysis::MustAlias:
3430 // If the load has a builtin retain, insert a plain retain for it.
3431 if (Class == IC_LoadWeakRetained) {
3433 CallInst::Create(getRetainCallee(F.getParent()), EarlierCall,
3437 // Zap the fully redundant load.
3438 Call->replaceAllUsesWith(EarlierCall);
3439 Call->eraseFromParent();
3441 case AliasAnalysis::MayAlias:
3442 case AliasAnalysis::PartialAlias:
3444 case AliasAnalysis::NoAlias:
3451 // If this is storing to the same pointer and has the same size etc.
3452 // replace this load's value with the stored value.
3453 CallInst *Call = cast<CallInst>(Inst);
3454 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
3455 Value *Arg = Call->getArgOperand(0);
3456 Value *EarlierArg = EarlierCall->getArgOperand(0);
3457 switch (PA.getAA()->alias(Arg, EarlierArg)) {
3458 case AliasAnalysis::MustAlias:
3460 // If the load has a builtin retain, insert a plain retain for it.
3461 if (Class == IC_LoadWeakRetained) {
3463 CallInst::Create(getRetainCallee(F.getParent()), EarlierCall,
3467 // Zap the fully redundant load.
3468 Call->replaceAllUsesWith(EarlierCall->getArgOperand(1));
3469 Call->eraseFromParent();
3471 case AliasAnalysis::MayAlias:
3472 case AliasAnalysis::PartialAlias:
3474 case AliasAnalysis::NoAlias:
3481 // TOOD: Grab the copied value.
3483 case IC_AutoreleasepoolPush:
3486 // Weak pointers are only modified through the weak entry points
3487 // (and arbitrary calls, which could call the weak entry points).
3490 // Anything else could modify the weak pointer.
3497 // Then, for each destroyWeak with an alloca operand, check to see if
3498 // the alloca and all its users can be zapped.
3499 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
3500 Instruction *Inst = &*I++;
3501 InstructionClass Class = GetBasicInstructionClass(Inst);
3502 if (Class != IC_DestroyWeak)
3505 CallInst *Call = cast<CallInst>(Inst);
3506 Value *Arg = Call->getArgOperand(0);
3507 if (AllocaInst *Alloca = dyn_cast<AllocaInst>(Arg)) {
3508 for (Value::use_iterator UI = Alloca->use_begin(),
3509 UE = Alloca->use_end(); UI != UE; ++UI) {
3510 const Instruction *UserInst = cast<Instruction>(*UI);
3511 switch (GetBasicInstructionClass(UserInst)) {
3514 case IC_DestroyWeak:
3521 for (Value::use_iterator UI = Alloca->use_begin(),
3522 UE = Alloca->use_end(); UI != UE; ) {
3523 CallInst *UserInst = cast<CallInst>(*UI++);
3524 switch (GetBasicInstructionClass(UserInst)) {
3527 // These functions return their second argument.
3528 UserInst->replaceAllUsesWith(UserInst->getArgOperand(1));
3530 case IC_DestroyWeak:
3534 llvm_unreachable("alloca really is used!");
3536 UserInst->eraseFromParent();
3538 Alloca->eraseFromParent();
3544 /// OptimizeSequences - Identify program paths which execute sequences of
3545 /// retains and releases which can be eliminated.
3546 bool ObjCARCOpt::OptimizeSequences(Function &F) {
3547 /// Releases, Retains - These are used to store the results of the main flow
3548 /// analysis. These use Value* as the key instead of Instruction* so that the
3549 /// map stays valid when we get around to rewriting code and calls get
3550 /// replaced by arguments.
3551 DenseMap<Value *, RRInfo> Releases;
3552 MapVector<Value *, RRInfo> Retains;
3554 /// BBStates, This is used during the traversal of the function to track the
3555 /// states for each identified object at each block.
3556 DenseMap<const BasicBlock *, BBState> BBStates;
3558 // Analyze the CFG of the function, and all instructions.
3559 bool NestingDetected = Visit(F, BBStates, Retains, Releases);
3562 return PerformCodePlacement(BBStates, Retains, Releases, F.getParent()) &&
3566 /// OptimizeReturns - Look for this pattern:
3568 /// %call = call i8* @something(...)
3569 /// %2 = call i8* @objc_retain(i8* %call)
3570 /// %3 = call i8* @objc_autorelease(i8* %2)
3573 /// And delete the retain and autorelease.
3575 /// Otherwise if it's just this:
3577 /// %3 = call i8* @objc_autorelease(i8* %2)
3580 /// convert the autorelease to autoreleaseRV.
3581 void ObjCARCOpt::OptimizeReturns(Function &F) {
3582 if (!F.getReturnType()->isPointerTy())
3585 SmallPtrSet<Instruction *, 4> DependingInstructions;
3586 SmallPtrSet<const BasicBlock *, 4> Visited;
3587 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI) {
3588 BasicBlock *BB = FI;
3589 ReturnInst *Ret = dyn_cast<ReturnInst>(&BB->back());
3592 const Value *Arg = StripPointerCastsAndObjCCalls(Ret->getOperand(0));
3593 FindDependencies(NeedsPositiveRetainCount, Arg,
3594 BB, Ret, DependingInstructions, Visited, PA);
3595 if (DependingInstructions.size() != 1)
3599 CallInst *Autorelease =
3600 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3603 InstructionClass AutoreleaseClass = GetBasicInstructionClass(Autorelease);
3604 if (!IsAutorelease(AutoreleaseClass))
3606 if (GetObjCArg(Autorelease) != Arg)
3609 DependingInstructions.clear();
3612 // Check that there is nothing that can affect the reference
3613 // count between the autorelease and the retain.
3614 FindDependencies(CanChangeRetainCount, Arg,
3615 BB, Autorelease, DependingInstructions, Visited, PA);
3616 if (DependingInstructions.size() != 1)
3621 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3623 // Check that we found a retain with the same argument.
3625 !IsRetain(GetBasicInstructionClass(Retain)) ||
3626 GetObjCArg(Retain) != Arg)
3629 DependingInstructions.clear();
3632 // Convert the autorelease to an autoreleaseRV, since it's
3633 // returning the value.
3634 if (AutoreleaseClass == IC_Autorelease) {
3635 Autorelease->setCalledFunction(getAutoreleaseRVCallee(F.getParent()));
3636 AutoreleaseClass = IC_AutoreleaseRV;
3639 // Check that there is nothing that can affect the reference
3640 // count between the retain and the call.
3641 // Note that Retain need not be in BB.
3642 FindDependencies(CanChangeRetainCount, Arg, Retain->getParent(), Retain,
3643 DependingInstructions, Visited, PA);
3644 if (DependingInstructions.size() != 1)
3649 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3651 // Check that the pointer is the return value of the call.
3652 if (!Call || Arg != Call)
3655 // Check that the call is a regular call.
3656 InstructionClass Class = GetBasicInstructionClass(Call);
3657 if (Class != IC_CallOrUser && Class != IC_Call)
3660 // If so, we can zap the retain and autorelease.
3663 EraseInstruction(Retain);
3664 EraseInstruction(Autorelease);
3670 DependingInstructions.clear();
3675 bool ObjCARCOpt::doInitialization(Module &M) {
3679 // If nothing in the Module uses ARC, don't do anything.
3680 Run = ModuleHasARC(M);
3684 // Identify the imprecise release metadata kind.
3685 ImpreciseReleaseMDKind =
3686 M.getContext().getMDKindID("clang.imprecise_release");
3687 CopyOnEscapeMDKind =
3688 M.getContext().getMDKindID("clang.arc.copy_on_escape");
3689 NoObjCARCExceptionsMDKind =
3690 M.getContext().getMDKindID("clang.arc.no_objc_arc_exceptions");
3692 // Intuitively, objc_retain and others are nocapture, however in practice
3693 // they are not, because they return their argument value. And objc_release
3694 // calls finalizers which can have arbitrary side effects.
3696 // These are initialized lazily.
3698 AutoreleaseRVCallee = 0;
3701 RetainBlockCallee = 0;
3702 AutoreleaseCallee = 0;
3707 bool ObjCARCOpt::runOnFunction(Function &F) {
3711 // If nothing in the Module uses ARC, don't do anything.
3717 PA.setAA(&getAnalysis<AliasAnalysis>());
3719 // This pass performs several distinct transformations. As a compile-time aid
3720 // when compiling code that isn't ObjC, skip these if the relevant ObjC
3721 // library functions aren't declared.
3723 // Preliminary optimizations. This also computs UsedInThisFunction.
3724 OptimizeIndividualCalls(F);
3726 // Optimizations for weak pointers.
3727 if (UsedInThisFunction & ((1 << IC_LoadWeak) |
3728 (1 << IC_LoadWeakRetained) |
3729 (1 << IC_StoreWeak) |
3730 (1 << IC_InitWeak) |
3731 (1 << IC_CopyWeak) |
3732 (1 << IC_MoveWeak) |
3733 (1 << IC_DestroyWeak)))
3734 OptimizeWeakCalls(F);
3736 // Optimizations for retain+release pairs.
3737 if (UsedInThisFunction & ((1 << IC_Retain) |
3738 (1 << IC_RetainRV) |
3739 (1 << IC_RetainBlock)))
3740 if (UsedInThisFunction & (1 << IC_Release))
3741 // Run OptimizeSequences until it either stops making changes or
3742 // no retain+release pair nesting is detected.
3743 while (OptimizeSequences(F)) {}
3745 // Optimizations if objc_autorelease is used.
3746 if (UsedInThisFunction & ((1 << IC_Autorelease) |
3747 (1 << IC_AutoreleaseRV)))
3753 void ObjCARCOpt::releaseMemory() {
3757 //===----------------------------------------------------------------------===//
3759 //===----------------------------------------------------------------------===//
3761 // TODO: ObjCARCContract could insert PHI nodes when uses aren't
3762 // dominated by single calls.
3764 #include "llvm/Operator.h"
3765 #include "llvm/InlineAsm.h"
3766 #include "llvm/Analysis/Dominators.h"
3768 STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed");
3771 /// ObjCARCContract - Late ARC optimizations. These change the IR in a way
3772 /// that makes it difficult to be analyzed by ObjCARCOpt, so it's run late.
3773 class ObjCARCContract : public FunctionPass {
3777 ProvenanceAnalysis PA;
3779 /// Run - A flag indicating whether this optimization pass should run.
3782 /// StoreStrongCallee, etc. - Declarations for ObjC runtime
3783 /// functions, for use in creating calls to them. These are initialized
3784 /// lazily to avoid cluttering up the Module with unused declarations.
3785 Constant *StoreStrongCallee,
3786 *RetainAutoreleaseCallee, *RetainAutoreleaseRVCallee;
3788 /// RetainRVMarker - The inline asm string to insert between calls and
3789 /// RetainRV calls to make the optimization work on targets which need it.
3790 const MDString *RetainRVMarker;
3792 /// StoreStrongCalls - The set of inserted objc_storeStrong calls. If
3793 /// at the end of walking the function we have found no alloca
3794 /// instructions, these calls can be marked "tail".
3795 SmallPtrSet<CallInst *, 8> StoreStrongCalls;
3797 Constant *getStoreStrongCallee(Module *M);
3798 Constant *getRetainAutoreleaseCallee(Module *M);
3799 Constant *getRetainAutoreleaseRVCallee(Module *M);
3801 bool ContractAutorelease(Function &F, Instruction *Autorelease,
3802 InstructionClass Class,
3803 SmallPtrSet<Instruction *, 4>
3804 &DependingInstructions,
3805 SmallPtrSet<const BasicBlock *, 4>
3808 void ContractRelease(Instruction *Release,
3809 inst_iterator &Iter);
3811 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
3812 virtual bool doInitialization(Module &M);
3813 virtual bool runOnFunction(Function &F);
3817 ObjCARCContract() : FunctionPass(ID) {
3818 initializeObjCARCContractPass(*PassRegistry::getPassRegistry());
3823 char ObjCARCContract::ID = 0;
3824 INITIALIZE_PASS_BEGIN(ObjCARCContract,
3825 "objc-arc-contract", "ObjC ARC contraction", false, false)
3826 INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
3827 INITIALIZE_PASS_DEPENDENCY(DominatorTree)
3828 INITIALIZE_PASS_END(ObjCARCContract,
3829 "objc-arc-contract", "ObjC ARC contraction", false, false)
3831 Pass *llvm::createObjCARCContractPass() {
3832 return new ObjCARCContract();
3835 void ObjCARCContract::getAnalysisUsage(AnalysisUsage &AU) const {
3836 AU.addRequired<AliasAnalysis>();
3837 AU.addRequired<DominatorTree>();
3838 AU.setPreservesCFG();
3841 Constant *ObjCARCContract::getStoreStrongCallee(Module *M) {
3842 if (!StoreStrongCallee) {
3843 LLVMContext &C = M->getContext();
3844 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3845 Type *I8XX = PointerType::getUnqual(I8X);
3846 Type *Params[] = { I8XX, I8X };
3848 Attributes::Builder BNoUnwind;
3849 BNoUnwind.addAttribute(Attributes::NoUnwind);
3850 Attributes::Builder BNoCapture;
3851 BNoCapture.addAttribute(Attributes::NoCapture);
3852 AttrListPtr Attributes = AttrListPtr()
3853 .addAttr(M->getContext(), ~0u, Attributes::get(BNoUnwind))
3854 .addAttr(M->getContext(), 1, Attributes::get(BNoCapture));
3857 M->getOrInsertFunction(
3859 FunctionType::get(Type::getVoidTy(C), Params, /*isVarArg=*/false),
3862 return StoreStrongCallee;
3865 Constant *ObjCARCContract::getRetainAutoreleaseCallee(Module *M) {
3866 if (!RetainAutoreleaseCallee) {
3867 LLVMContext &C = M->getContext();
3868 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3869 Type *Params[] = { I8X };
3870 FunctionType *FTy = FunctionType::get(I8X, Params, /*isVarArg=*/false);
3871 Attributes::Builder B;
3872 B.addAttribute(Attributes::NoUnwind);
3873 AttrListPtr Attributes = AttrListPtr().addAttr(M->getContext(), ~0u,
3874 Attributes::get(B));
3875 RetainAutoreleaseCallee =
3876 M->getOrInsertFunction("objc_retainAutorelease", FTy, Attributes);
3878 return RetainAutoreleaseCallee;
3881 Constant *ObjCARCContract::getRetainAutoreleaseRVCallee(Module *M) {
3882 if (!RetainAutoreleaseRVCallee) {
3883 LLVMContext &C = M->getContext();
3884 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3885 Type *Params[] = { I8X };
3886 FunctionType *FTy = FunctionType::get(I8X, Params, /*isVarArg=*/false);
3887 Attributes::Builder B;
3888 B.addAttribute(Attributes::NoUnwind);
3889 AttrListPtr Attributes = AttrListPtr().addAttr(M->getContext(), ~0u,
3890 Attributes::get(B));
3891 RetainAutoreleaseRVCallee =
3892 M->getOrInsertFunction("objc_retainAutoreleaseReturnValue", FTy,
3895 return RetainAutoreleaseRVCallee;
3898 /// ContractAutorelease - Merge an autorelease with a retain into a fused call.
3900 ObjCARCContract::ContractAutorelease(Function &F, Instruction *Autorelease,
3901 InstructionClass Class,
3902 SmallPtrSet<Instruction *, 4>
3903 &DependingInstructions,
3904 SmallPtrSet<const BasicBlock *, 4>
3906 const Value *Arg = GetObjCArg(Autorelease);
3908 // Check that there are no instructions between the retain and the autorelease
3909 // (such as an autorelease_pop) which may change the count.
3910 CallInst *Retain = 0;
3911 if (Class == IC_AutoreleaseRV)
3912 FindDependencies(RetainAutoreleaseRVDep, Arg,
3913 Autorelease->getParent(), Autorelease,
3914 DependingInstructions, Visited, PA);
3916 FindDependencies(RetainAutoreleaseDep, Arg,
3917 Autorelease->getParent(), Autorelease,
3918 DependingInstructions, Visited, PA);
3921 if (DependingInstructions.size() != 1) {
3922 DependingInstructions.clear();
3926 Retain = dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3927 DependingInstructions.clear();
3930 GetBasicInstructionClass(Retain) != IC_Retain ||
3931 GetObjCArg(Retain) != Arg)
3937 if (Class == IC_AutoreleaseRV)
3938 Retain->setCalledFunction(getRetainAutoreleaseRVCallee(F.getParent()));
3940 Retain->setCalledFunction(getRetainAutoreleaseCallee(F.getParent()));
3942 EraseInstruction(Autorelease);
3946 /// ContractRelease - Attempt to merge an objc_release with a store, load, and
3947 /// objc_retain to form an objc_storeStrong. This can be a little tricky because
3948 /// the instructions don't always appear in order, and there may be unrelated
3949 /// intervening instructions.
3950 void ObjCARCContract::ContractRelease(Instruction *Release,
3951 inst_iterator &Iter) {
3952 LoadInst *Load = dyn_cast<LoadInst>(GetObjCArg(Release));
3953 if (!Load || !Load->isSimple()) return;
3955 // For now, require everything to be in one basic block.
3956 BasicBlock *BB = Release->getParent();
3957 if (Load->getParent() != BB) return;
3959 // Walk down to find the store and the release, which may be in either order.
3960 BasicBlock::iterator I = Load, End = BB->end();
3962 AliasAnalysis::Location Loc = AA->getLocation(Load);
3963 StoreInst *Store = 0;
3964 bool SawRelease = false;
3965 for (; !Store || !SawRelease; ++I) {
3969 Instruction *Inst = I;
3970 if (Inst == Release) {
3975 InstructionClass Class = GetBasicInstructionClass(Inst);
3977 // Unrelated retains are harmless.
3978 if (IsRetain(Class))
3982 // The store is the point where we're going to put the objc_storeStrong,
3983 // so make sure there are no uses after it.
3984 if (CanUse(Inst, Load, PA, Class))
3986 } else if (AA->getModRefInfo(Inst, Loc) & AliasAnalysis::Mod) {
3987 // We are moving the load down to the store, so check for anything
3988 // else which writes to the memory between the load and the store.
3989 Store = dyn_cast<StoreInst>(Inst);
3990 if (!Store || !Store->isSimple()) return;
3991 if (Store->getPointerOperand() != Loc.Ptr) return;
3995 Value *New = StripPointerCastsAndObjCCalls(Store->getValueOperand());
3997 // Walk up to find the retain.
3999 BasicBlock::iterator Begin = BB->begin();
4000 while (I != Begin && GetBasicInstructionClass(I) != IC_Retain)
4002 Instruction *Retain = I;
4003 if (GetBasicInstructionClass(Retain) != IC_Retain) return;
4004 if (GetObjCArg(Retain) != New) return;
4009 LLVMContext &C = Release->getContext();
4010 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
4011 Type *I8XX = PointerType::getUnqual(I8X);
4013 Value *Args[] = { Load->getPointerOperand(), New };
4014 if (Args[0]->getType() != I8XX)
4015 Args[0] = new BitCastInst(Args[0], I8XX, "", Store);
4016 if (Args[1]->getType() != I8X)
4017 Args[1] = new BitCastInst(Args[1], I8X, "", Store);
4018 CallInst *StoreStrong =
4019 CallInst::Create(getStoreStrongCallee(BB->getParent()->getParent()),
4021 StoreStrong->setDoesNotThrow();
4022 StoreStrong->setDebugLoc(Store->getDebugLoc());
4024 // We can't set the tail flag yet, because we haven't yet determined
4025 // whether there are any escaping allocas. Remember this call, so that
4026 // we can set the tail flag once we know it's safe.
4027 StoreStrongCalls.insert(StoreStrong);
4029 if (&*Iter == Store) ++Iter;
4030 Store->eraseFromParent();
4031 Release->eraseFromParent();
4032 EraseInstruction(Retain);
4033 if (Load->use_empty())
4034 Load->eraseFromParent();
4037 bool ObjCARCContract::doInitialization(Module &M) {
4038 // If nothing in the Module uses ARC, don't do anything.
4039 Run = ModuleHasARC(M);
4043 // These are initialized lazily.
4044 StoreStrongCallee = 0;
4045 RetainAutoreleaseCallee = 0;
4046 RetainAutoreleaseRVCallee = 0;
4048 // Initialize RetainRVMarker.
4050 if (NamedMDNode *NMD =
4051 M.getNamedMetadata("clang.arc.retainAutoreleasedReturnValueMarker"))
4052 if (NMD->getNumOperands() == 1) {
4053 const MDNode *N = NMD->getOperand(0);
4054 if (N->getNumOperands() == 1)
4055 if (const MDString *S = dyn_cast<MDString>(N->getOperand(0)))
4062 bool ObjCARCContract::runOnFunction(Function &F) {
4066 // If nothing in the Module uses ARC, don't do anything.
4071 AA = &getAnalysis<AliasAnalysis>();
4072 DT = &getAnalysis<DominatorTree>();
4074 PA.setAA(&getAnalysis<AliasAnalysis>());
4076 // Track whether it's ok to mark objc_storeStrong calls with the "tail"
4077 // keyword. Be conservative if the function has variadic arguments.
4078 // It seems that functions which "return twice" are also unsafe for the
4079 // "tail" argument, because they are setjmp, which could need to
4080 // return to an earlier stack state.
4081 bool TailOkForStoreStrongs = !F.isVarArg() &&
4082 !F.callsFunctionThatReturnsTwice();
4084 // For ObjC library calls which return their argument, replace uses of the
4085 // argument with uses of the call return value, if it dominates the use. This
4086 // reduces register pressure.
4087 SmallPtrSet<Instruction *, 4> DependingInstructions;
4088 SmallPtrSet<const BasicBlock *, 4> Visited;
4089 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
4090 Instruction *Inst = &*I++;
4092 // Only these library routines return their argument. In particular,
4093 // objc_retainBlock does not necessarily return its argument.
4094 InstructionClass Class = GetBasicInstructionClass(Inst);
4097 case IC_FusedRetainAutorelease:
4098 case IC_FusedRetainAutoreleaseRV:
4100 case IC_Autorelease:
4101 case IC_AutoreleaseRV:
4102 if (ContractAutorelease(F, Inst, Class, DependingInstructions, Visited))
4106 // If we're compiling for a target which needs a special inline-asm
4107 // marker to do the retainAutoreleasedReturnValue optimization,
4109 if (!RetainRVMarker)
4111 BasicBlock::iterator BBI = Inst;
4112 BasicBlock *InstParent = Inst->getParent();
4114 // Step up to see if the call immediately precedes the RetainRV call.
4115 // If it's an invoke, we have to cross a block boundary. And we have
4116 // to carefully dodge no-op instructions.
4118 if (&*BBI == InstParent->begin()) {
4119 BasicBlock *Pred = InstParent->getSinglePredecessor();
4121 goto decline_rv_optimization;
4122 BBI = Pred->getTerminator();
4126 } while (isNoopInstruction(BBI));
4128 if (&*BBI == GetObjCArg(Inst)) {
4131 InlineAsm::get(FunctionType::get(Type::getVoidTy(Inst->getContext()),
4132 /*isVarArg=*/false),
4133 RetainRVMarker->getString(),
4134 /*Constraints=*/"", /*hasSideEffects=*/true);
4135 CallInst::Create(IA, "", Inst);
4137 decline_rv_optimization:
4141 // objc_initWeak(p, null) => *p = null
4142 CallInst *CI = cast<CallInst>(Inst);
4143 if (isNullOrUndef(CI->getArgOperand(1))) {
4145 ConstantPointerNull::get(cast<PointerType>(CI->getType()));
4147 new StoreInst(Null, CI->getArgOperand(0), CI);
4148 CI->replaceAllUsesWith(Null);
4149 CI->eraseFromParent();
4154 ContractRelease(Inst, I);
4157 // Be conservative if the function has any alloca instructions.
4158 // Technically we only care about escaping alloca instructions,
4159 // but this is sufficient to handle some interesting cases.
4160 if (isa<AllocaInst>(Inst))
4161 TailOkForStoreStrongs = false;
4167 // Don't use GetObjCArg because we don't want to look through bitcasts
4168 // and such; to do the replacement, the argument must have type i8*.
4169 const Value *Arg = cast<CallInst>(Inst)->getArgOperand(0);
4171 // If we're compiling bugpointed code, don't get in trouble.
4172 if (!isa<Instruction>(Arg) && !isa<Argument>(Arg))
4174 // Look through the uses of the pointer.
4175 for (Value::const_use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
4177 Use &U = UI.getUse();
4178 unsigned OperandNo = UI.getOperandNo();
4179 ++UI; // Increment UI now, because we may unlink its element.
4181 // If the call's return value dominates a use of the call's argument
4182 // value, rewrite the use to use the return value. We check for
4183 // reachability here because an unreachable call is considered to
4184 // trivially dominate itself, which would lead us to rewriting its
4185 // argument in terms of its return value, which would lead to
4186 // infinite loops in GetObjCArg.
4187 if (DT->isReachableFromEntry(U) && DT->dominates(Inst, U)) {
4189 Instruction *Replacement = Inst;
4190 Type *UseTy = U.get()->getType();
4191 if (PHINode *PHI = dyn_cast<PHINode>(U.getUser())) {
4192 // For PHI nodes, insert the bitcast in the predecessor block.
4193 unsigned ValNo = PHINode::getIncomingValueNumForOperand(OperandNo);
4194 BasicBlock *BB = PHI->getIncomingBlock(ValNo);
4195 if (Replacement->getType() != UseTy)
4196 Replacement = new BitCastInst(Replacement, UseTy, "",
4198 // While we're here, rewrite all edges for this PHI, rather
4199 // than just one use at a time, to minimize the number of
4200 // bitcasts we emit.
4201 for (unsigned i = 0, e = PHI->getNumIncomingValues(); i != e; ++i)
4202 if (PHI->getIncomingBlock(i) == BB) {
4203 // Keep the UI iterator valid.
4204 if (&PHI->getOperandUse(
4205 PHINode::getOperandNumForIncomingValue(i)) ==
4208 PHI->setIncomingValue(i, Replacement);
4211 if (Replacement->getType() != UseTy)
4212 Replacement = new BitCastInst(Replacement, UseTy, "",
4213 cast<Instruction>(U.getUser()));
4219 // If Arg is a no-op casted pointer, strip one level of casts and iterate.
4220 if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg))
4221 Arg = BI->getOperand(0);
4222 else if (isa<GEPOperator>(Arg) &&
4223 cast<GEPOperator>(Arg)->hasAllZeroIndices())
4224 Arg = cast<GEPOperator>(Arg)->getPointerOperand();
4225 else if (isa<GlobalAlias>(Arg) &&
4226 !cast<GlobalAlias>(Arg)->mayBeOverridden())
4227 Arg = cast<GlobalAlias>(Arg)->getAliasee();
4233 // If this function has no escaping allocas or suspicious vararg usage,
4234 // objc_storeStrong calls can be marked with the "tail" keyword.
4235 if (TailOkForStoreStrongs)
4236 for (SmallPtrSet<CallInst *, 8>::iterator I = StoreStrongCalls.begin(),
4237 E = StoreStrongCalls.end(); I != E; ++I)
4238 (*I)->setTailCall();
4239 StoreStrongCalls.clear();