1 //===- ObjCARC.cpp - ObjC ARC Optimization --------------------------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines ObjC ARC optimizations. ARC stands for
11 // Automatic Reference Counting and is a system for managing reference counts
12 // for objects in Objective C.
14 // The optimizations performed include elimination of redundant, partially
15 // redundant, and inconsequential reference count operations, elimination of
16 // redundant weak pointer operations, pattern-matching and replacement of
17 // low-level operations into higher-level operations, and numerous minor
20 // This file also defines a simple ARC-aware AliasAnalysis.
22 // WARNING: This file knows about certain library functions. It recognizes them
23 // by name, and hardwires knowedge of their semantics.
25 // WARNING: This file knows about how certain Objective-C library functions are
26 // used. Naive LLVM IR transformations which would otherwise be
27 // behavior-preserving may break these assumptions.
29 //===----------------------------------------------------------------------===//
31 #define DEBUG_TYPE "objc-arc"
32 #include "llvm/Function.h"
33 #include "llvm/Intrinsics.h"
34 #include "llvm/GlobalVariable.h"
35 #include "llvm/DerivedTypes.h"
36 #include "llvm/Module.h"
37 #include "llvm/Analysis/ValueTracking.h"
38 #include "llvm/Transforms/Utils/Local.h"
39 #include "llvm/Support/CallSite.h"
40 #include "llvm/Support/CommandLine.h"
41 #include "llvm/ADT/StringSwitch.h"
42 #include "llvm/ADT/DenseMap.h"
43 #include "llvm/ADT/STLExtras.h"
46 // A handy option to enable/disable all optimizations in this file.
47 static cl::opt<bool> EnableARCOpts("enable-objc-arc-opts", cl::init(true));
49 //===----------------------------------------------------------------------===//
51 //===----------------------------------------------------------------------===//
54 /// MapVector - An associative container with fast insertion-order
55 /// (deterministic) iteration over its elements. Plus the special
57 template<class KeyT, class ValueT>
59 /// Map - Map keys to indices in Vector.
60 typedef DenseMap<KeyT, size_t> MapTy;
63 /// Vector - Keys and values.
64 typedef std::vector<std::pair<KeyT, ValueT> > VectorTy;
68 typedef typename VectorTy::iterator iterator;
69 typedef typename VectorTy::const_iterator const_iterator;
70 iterator begin() { return Vector.begin(); }
71 iterator end() { return Vector.end(); }
72 const_iterator begin() const { return Vector.begin(); }
73 const_iterator end() const { return Vector.end(); }
77 assert(Vector.size() >= Map.size()); // May differ due to blotting.
78 for (typename MapTy::const_iterator I = Map.begin(), E = Map.end();
80 assert(I->second < Vector.size());
81 assert(Vector[I->second].first == I->first);
83 for (typename VectorTy::const_iterator I = Vector.begin(),
84 E = Vector.end(); I != E; ++I)
86 (Map.count(I->first) &&
87 Map[I->first] == size_t(I - Vector.begin())));
91 ValueT &operator[](KeyT Arg) {
92 std::pair<typename MapTy::iterator, bool> Pair =
93 Map.insert(std::make_pair(Arg, size_t(0)));
95 Pair.first->second = Vector.size();
96 Vector.push_back(std::make_pair(Arg, ValueT()));
97 return Vector.back().second;
99 return Vector[Pair.first->second].second;
102 std::pair<iterator, bool>
103 insert(const std::pair<KeyT, ValueT> &InsertPair) {
104 std::pair<typename MapTy::iterator, bool> Pair =
105 Map.insert(std::make_pair(InsertPair.first, size_t(0)));
107 Pair.first->second = Vector.size();
108 Vector.push_back(InsertPair);
109 return std::make_pair(llvm::prior(Vector.end()), true);
111 return std::make_pair(Vector.begin() + Pair.first->second, false);
114 const_iterator find(KeyT Key) const {
115 typename MapTy::const_iterator It = Map.find(Key);
116 if (It == Map.end()) return Vector.end();
117 return Vector.begin() + It->second;
120 /// blot - This is similar to erase, but instead of removing the element
121 /// from the vector, it just zeros out the key in the vector. This leaves
122 /// iterators intact, but clients must be prepared for zeroed-out keys when
124 void blot(KeyT Key) {
125 typename MapTy::iterator It = Map.find(Key);
126 if (It == Map.end()) return;
127 Vector[It->second].first = KeyT();
138 //===----------------------------------------------------------------------===//
140 //===----------------------------------------------------------------------===//
143 /// InstructionClass - A simple classification for instructions.
144 enum InstructionClass {
145 IC_Retain, ///< objc_retain
146 IC_RetainRV, ///< objc_retainAutoreleasedReturnValue
147 IC_RetainBlock, ///< objc_retainBlock
148 IC_Release, ///< objc_release
149 IC_Autorelease, ///< objc_autorelease
150 IC_AutoreleaseRV, ///< objc_autoreleaseReturnValue
151 IC_AutoreleasepoolPush, ///< objc_autoreleasePoolPush
152 IC_AutoreleasepoolPop, ///< objc_autoreleasePoolPop
153 IC_NoopCast, ///< objc_retainedObject, etc.
154 IC_FusedRetainAutorelease, ///< objc_retainAutorelease
155 IC_FusedRetainAutoreleaseRV, ///< objc_retainAutoreleaseReturnValue
156 IC_LoadWeakRetained, ///< objc_loadWeakRetained (primitive)
157 IC_StoreWeak, ///< objc_storeWeak (primitive)
158 IC_InitWeak, ///< objc_initWeak (derived)
159 IC_LoadWeak, ///< objc_loadWeak (derived)
160 IC_MoveWeak, ///< objc_moveWeak (derived)
161 IC_CopyWeak, ///< objc_copyWeak (derived)
162 IC_DestroyWeak, ///< objc_destroyWeak (derived)
163 IC_CallOrUser, ///< could call objc_release and/or "use" pointers
164 IC_Call, ///< could call objc_release
165 IC_User, ///< could "use" a pointer
166 IC_None ///< anything else
170 /// IsPotentialUse - Test whether the given value is possible a
171 /// reference-counted pointer.
172 static bool IsPotentialUse(const Value *Op) {
173 // Pointers to static or stack storage are not reference-counted pointers.
174 if (isa<Constant>(Op) || isa<AllocaInst>(Op))
176 // Special arguments are not reference-counted.
177 if (const Argument *Arg = dyn_cast<Argument>(Op))
178 if (Arg->hasByValAttr() ||
179 Arg->hasNestAttr() ||
180 Arg->hasStructRetAttr())
182 // Only consider values with pointer types, and not function pointers.
183 PointerType *Ty = dyn_cast<PointerType>(Op->getType());
184 if (!Ty || isa<FunctionType>(Ty->getElementType()))
186 // Conservatively assume anything else is a potential use.
190 /// GetCallSiteClass - Helper for GetInstructionClass. Determines what kind
191 /// of construct CS is.
192 static InstructionClass GetCallSiteClass(ImmutableCallSite CS) {
193 for (ImmutableCallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
195 if (IsPotentialUse(*I))
196 return CS.onlyReadsMemory() ? IC_User : IC_CallOrUser;
198 return CS.onlyReadsMemory() ? IC_None : IC_Call;
201 /// GetFunctionClass - Determine if F is one of the special known Functions.
202 /// If it isn't, return IC_CallOrUser.
203 static InstructionClass GetFunctionClass(const Function *F) {
204 Function::const_arg_iterator AI = F->arg_begin(), AE = F->arg_end();
208 return StringSwitch<InstructionClass>(F->getName())
209 .Case("objc_autoreleasePoolPush", IC_AutoreleasepoolPush)
210 .Default(IC_CallOrUser);
213 const Argument *A0 = AI++;
215 // Argument is a pointer.
216 if (PointerType *PTy = dyn_cast<PointerType>(A0->getType())) {
217 Type *ETy = PTy->getElementType();
219 if (ETy->isIntegerTy(8))
220 return StringSwitch<InstructionClass>(F->getName())
221 .Case("objc_retain", IC_Retain)
222 .Case("objc_retainAutoreleasedReturnValue", IC_RetainRV)
223 .Case("objc_retainBlock", IC_RetainBlock)
224 .Case("objc_release", IC_Release)
225 .Case("objc_autorelease", IC_Autorelease)
226 .Case("objc_autoreleaseReturnValue", IC_AutoreleaseRV)
227 .Case("objc_autoreleasePoolPop", IC_AutoreleasepoolPop)
228 .Case("objc_retainedObject", IC_NoopCast)
229 .Case("objc_unretainedObject", IC_NoopCast)
230 .Case("objc_unretainedPointer", IC_NoopCast)
231 .Case("objc_retain_autorelease", IC_FusedRetainAutorelease)
232 .Case("objc_retainAutorelease", IC_FusedRetainAutorelease)
233 .Case("objc_retainAutoreleaseReturnValue",IC_FusedRetainAutoreleaseRV)
234 .Default(IC_CallOrUser);
237 if (PointerType *Pte = dyn_cast<PointerType>(ETy))
238 if (Pte->getElementType()->isIntegerTy(8))
239 return StringSwitch<InstructionClass>(F->getName())
240 .Case("objc_loadWeakRetained", IC_LoadWeakRetained)
241 .Case("objc_loadWeak", IC_LoadWeak)
242 .Case("objc_destroyWeak", IC_DestroyWeak)
243 .Default(IC_CallOrUser);
246 // Two arguments, first is i8**.
247 const Argument *A1 = AI++;
249 if (PointerType *PTy = dyn_cast<PointerType>(A0->getType()))
250 if (PointerType *Pte = dyn_cast<PointerType>(PTy->getElementType()))
251 if (Pte->getElementType()->isIntegerTy(8))
252 if (PointerType *PTy1 = dyn_cast<PointerType>(A1->getType())) {
253 Type *ETy1 = PTy1->getElementType();
254 // Second argument is i8*
255 if (ETy1->isIntegerTy(8))
256 return StringSwitch<InstructionClass>(F->getName())
257 .Case("objc_storeWeak", IC_StoreWeak)
258 .Case("objc_initWeak", IC_InitWeak)
259 .Default(IC_CallOrUser);
260 // Second argument is i8**.
261 if (PointerType *Pte1 = dyn_cast<PointerType>(ETy1))
262 if (Pte1->getElementType()->isIntegerTy(8))
263 return StringSwitch<InstructionClass>(F->getName())
264 .Case("objc_moveWeak", IC_MoveWeak)
265 .Case("objc_copyWeak", IC_CopyWeak)
266 .Default(IC_CallOrUser);
270 return IC_CallOrUser;
273 /// GetInstructionClass - Determine what kind of construct V is.
274 static InstructionClass GetInstructionClass(const Value *V) {
275 if (const Instruction *I = dyn_cast<Instruction>(V)) {
276 // Any instruction other than bitcast and gep with a pointer operand have a
277 // use of an objc pointer. Bitcasts, GEPs, Selects, PHIs transfer a pointer
278 // to a subsequent use, rather than using it themselves, in this sense.
279 // As a short cut, several other opcodes are known to have no pointer
280 // operands of interest. And ret is never followed by a release, so it's
281 // not interesting to examine.
282 switch (I->getOpcode()) {
283 case Instruction::Call: {
284 const CallInst *CI = cast<CallInst>(I);
285 // Check for calls to special functions.
286 if (const Function *F = CI->getCalledFunction()) {
287 InstructionClass Class = GetFunctionClass(F);
288 if (Class != IC_CallOrUser)
291 // None of the intrinsic functions do objc_release. For intrinsics, the
292 // only question is whether or not they may be users.
293 switch (F->getIntrinsicID()) {
295 case Intrinsic::bswap: case Intrinsic::ctpop:
296 case Intrinsic::ctlz: case Intrinsic::cttz:
297 case Intrinsic::returnaddress: case Intrinsic::frameaddress:
298 case Intrinsic::stacksave: case Intrinsic::stackrestore:
299 case Intrinsic::vastart: case Intrinsic::vacopy: case Intrinsic::vaend:
300 // Don't let dbg info affect our results.
301 case Intrinsic::dbg_declare: case Intrinsic::dbg_value:
302 // Short cut: Some intrinsics obviously don't use ObjC pointers.
305 for (Function::const_arg_iterator AI = F->arg_begin(),
306 AE = F->arg_end(); AI != AE; ++AI)
307 if (IsPotentialUse(AI))
312 return GetCallSiteClass(CI);
314 case Instruction::Invoke:
315 return GetCallSiteClass(cast<InvokeInst>(I));
316 case Instruction::BitCast:
317 case Instruction::GetElementPtr:
318 case Instruction::Select: case Instruction::PHI:
319 case Instruction::Ret: case Instruction::Br:
320 case Instruction::Switch: case Instruction::IndirectBr:
321 case Instruction::Alloca: case Instruction::VAArg:
322 case Instruction::Add: case Instruction::FAdd:
323 case Instruction::Sub: case Instruction::FSub:
324 case Instruction::Mul: case Instruction::FMul:
325 case Instruction::SDiv: case Instruction::UDiv: case Instruction::FDiv:
326 case Instruction::SRem: case Instruction::URem: case Instruction::FRem:
327 case Instruction::Shl: case Instruction::LShr: case Instruction::AShr:
328 case Instruction::And: case Instruction::Or: case Instruction::Xor:
329 case Instruction::SExt: case Instruction::ZExt: case Instruction::Trunc:
330 case Instruction::IntToPtr: case Instruction::FCmp:
331 case Instruction::FPTrunc: case Instruction::FPExt:
332 case Instruction::FPToUI: case Instruction::FPToSI:
333 case Instruction::UIToFP: case Instruction::SIToFP:
334 case Instruction::InsertElement: case Instruction::ExtractElement:
335 case Instruction::ShuffleVector:
336 case Instruction::ExtractValue:
338 case Instruction::ICmp:
339 // Comparing a pointer with null, or any other constant, isn't an
340 // interesting use, because we don't care what the pointer points to, or
341 // about the values of any other dynamic reference-counted pointers.
342 if (IsPotentialUse(I->getOperand(1)))
346 // For anything else, check all the operands.
347 for (User::const_op_iterator OI = I->op_begin(), OE = I->op_end();
349 if (IsPotentialUse(*OI))
354 // Otherwise, it's totally inert for ARC purposes.
358 /// GetBasicInstructionClass - Determine what kind of construct V is. This is
359 /// similar to GetInstructionClass except that it only detects objc runtine
360 /// calls. This allows it to be faster.
361 static InstructionClass GetBasicInstructionClass(const Value *V) {
362 if (const CallInst *CI = dyn_cast<CallInst>(V)) {
363 if (const Function *F = CI->getCalledFunction())
364 return GetFunctionClass(F);
365 // Otherwise, be conservative.
366 return IC_CallOrUser;
369 // Otherwise, be conservative.
373 /// IsRetain - Test if the the given class is objc_retain or
375 static bool IsRetain(InstructionClass Class) {
376 return Class == IC_Retain ||
377 Class == IC_RetainRV;
380 /// IsAutorelease - Test if the the given class is objc_autorelease or
382 static bool IsAutorelease(InstructionClass Class) {
383 return Class == IC_Autorelease ||
384 Class == IC_AutoreleaseRV;
387 /// IsForwarding - Test if the given class represents instructions which return
388 /// their argument verbatim.
389 static bool IsForwarding(InstructionClass Class) {
390 // objc_retainBlock technically doesn't always return its argument
391 // verbatim, but it doesn't matter for our purposes here.
392 return Class == IC_Retain ||
393 Class == IC_RetainRV ||
394 Class == IC_Autorelease ||
395 Class == IC_AutoreleaseRV ||
396 Class == IC_RetainBlock ||
397 Class == IC_NoopCast;
400 /// IsNoopOnNull - Test if the given class represents instructions which do
401 /// nothing if passed a null pointer.
402 static bool IsNoopOnNull(InstructionClass Class) {
403 return Class == IC_Retain ||
404 Class == IC_RetainRV ||
405 Class == IC_Release ||
406 Class == IC_Autorelease ||
407 Class == IC_AutoreleaseRV ||
408 Class == IC_RetainBlock;
411 /// IsAlwaysTail - Test if the given class represents instructions which are
412 /// always safe to mark with the "tail" keyword.
413 static bool IsAlwaysTail(InstructionClass Class) {
414 // IC_RetainBlock may be given a stack argument.
415 return Class == IC_Retain ||
416 Class == IC_RetainRV ||
417 Class == IC_Autorelease ||
418 Class == IC_AutoreleaseRV;
421 /// IsNoThrow - Test if the given class represents instructions which are always
422 /// safe to mark with the nounwind attribute..
423 static bool IsNoThrow(InstructionClass Class) {
424 return Class == IC_Retain ||
425 Class == IC_RetainRV ||
426 Class == IC_RetainBlock ||
427 Class == IC_Release ||
428 Class == IC_Autorelease ||
429 Class == IC_AutoreleaseRV ||
430 Class == IC_AutoreleasepoolPush ||
431 Class == IC_AutoreleasepoolPop;
434 /// EraseInstruction - Erase the given instruction. ObjC calls return their
435 /// argument verbatim, so if it's such a call and the return value has users,
436 /// replace them with the argument value.
437 static void EraseInstruction(Instruction *CI) {
438 Value *OldArg = cast<CallInst>(CI)->getArgOperand(0);
440 bool Unused = CI->use_empty();
443 // Replace the return value with the argument.
444 assert(IsForwarding(GetBasicInstructionClass(CI)) &&
445 "Can't delete non-forwarding instruction with users!");
446 CI->replaceAllUsesWith(OldArg);
449 CI->eraseFromParent();
452 RecursivelyDeleteTriviallyDeadInstructions(OldArg);
455 /// GetUnderlyingObjCPtr - This is a wrapper around getUnderlyingObject which
456 /// also knows how to look through objc_retain and objc_autorelease calls, which
457 /// we know to return their argument verbatim.
458 static const Value *GetUnderlyingObjCPtr(const Value *V) {
460 V = GetUnderlyingObject(V);
461 if (!IsForwarding(GetBasicInstructionClass(V)))
463 V = cast<CallInst>(V)->getArgOperand(0);
469 /// StripPointerCastsAndObjCCalls - This is a wrapper around
470 /// Value::stripPointerCasts which also knows how to look through objc_retain
471 /// and objc_autorelease calls, which we know to return their argument verbatim.
472 static const Value *StripPointerCastsAndObjCCalls(const Value *V) {
474 V = V->stripPointerCasts();
475 if (!IsForwarding(GetBasicInstructionClass(V)))
477 V = cast<CallInst>(V)->getArgOperand(0);
482 /// StripPointerCastsAndObjCCalls - This is a wrapper around
483 /// Value::stripPointerCasts which also knows how to look through objc_retain
484 /// and objc_autorelease calls, which we know to return their argument verbatim.
485 static Value *StripPointerCastsAndObjCCalls(Value *V) {
487 V = V->stripPointerCasts();
488 if (!IsForwarding(GetBasicInstructionClass(V)))
490 V = cast<CallInst>(V)->getArgOperand(0);
495 /// GetObjCArg - Assuming the given instruction is one of the special calls such
496 /// as objc_retain or objc_release, return the argument value, stripped of no-op
497 /// casts and forwarding calls.
498 static Value *GetObjCArg(Value *Inst) {
499 return StripPointerCastsAndObjCCalls(cast<CallInst>(Inst)->getArgOperand(0));
502 /// IsObjCIdentifiedObject - This is similar to AliasAnalysis'
503 /// isObjCIdentifiedObject, except that it uses special knowledge of
504 /// ObjC conventions...
505 static bool IsObjCIdentifiedObject(const Value *V) {
506 // Assume that call results and arguments have their own "provenance".
507 // Constants (including GlobalVariables) and Allocas are never
508 // reference-counted.
509 if (isa<CallInst>(V) || isa<InvokeInst>(V) ||
510 isa<Argument>(V) || isa<Constant>(V) ||
514 if (const LoadInst *LI = dyn_cast<LoadInst>(V)) {
515 const Value *Pointer =
516 StripPointerCastsAndObjCCalls(LI->getPointerOperand());
517 if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(Pointer)) {
518 StringRef Name = GV->getName();
519 // These special variables are known to hold values which are not
520 // reference-counted pointers.
521 if (Name.startswith("\01L_OBJC_SELECTOR_REFERENCES_") ||
522 Name.startswith("\01L_OBJC_CLASSLIST_REFERENCES_") ||
523 Name.startswith("\01L_OBJC_CLASSLIST_SUP_REFS_$_") ||
524 Name.startswith("\01L_OBJC_METH_VAR_NAME_") ||
525 Name.startswith("\01l_objc_msgSend_fixup_"))
533 /// FindSingleUseIdentifiedObject - This is similar to
534 /// StripPointerCastsAndObjCCalls but it stops as soon as it finds a value
535 /// with multiple uses.
536 static const Value *FindSingleUseIdentifiedObject(const Value *Arg) {
537 if (Arg->hasOneUse()) {
538 if (const BitCastInst *BC = dyn_cast<BitCastInst>(Arg))
539 return FindSingleUseIdentifiedObject(BC->getOperand(0));
540 if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Arg))
541 if (GEP->hasAllZeroIndices())
542 return FindSingleUseIdentifiedObject(GEP->getPointerOperand());
543 if (IsForwarding(GetBasicInstructionClass(Arg)))
544 return FindSingleUseIdentifiedObject(
545 cast<CallInst>(Arg)->getArgOperand(0));
546 if (!IsObjCIdentifiedObject(Arg))
551 // If we found an identifiable object but it has multiple uses, but they
552 // are trivial uses, we can still consider this to be a single-use
554 if (IsObjCIdentifiedObject(Arg)) {
555 for (Value::const_use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
558 if (!U->use_empty() || StripPointerCastsAndObjCCalls(U) != Arg)
568 /// ModuleHasARC - Test if the given module looks interesting to run ARC
570 static bool ModuleHasARC(const Module &M) {
572 M.getNamedValue("objc_retain") ||
573 M.getNamedValue("objc_release") ||
574 M.getNamedValue("objc_autorelease") ||
575 M.getNamedValue("objc_retainAutoreleasedReturnValue") ||
576 M.getNamedValue("objc_retainBlock") ||
577 M.getNamedValue("objc_autoreleaseReturnValue") ||
578 M.getNamedValue("objc_autoreleasePoolPush") ||
579 M.getNamedValue("objc_loadWeakRetained") ||
580 M.getNamedValue("objc_loadWeak") ||
581 M.getNamedValue("objc_destroyWeak") ||
582 M.getNamedValue("objc_storeWeak") ||
583 M.getNamedValue("objc_initWeak") ||
584 M.getNamedValue("objc_moveWeak") ||
585 M.getNamedValue("objc_copyWeak") ||
586 M.getNamedValue("objc_retainedObject") ||
587 M.getNamedValue("objc_unretainedObject") ||
588 M.getNamedValue("objc_unretainedPointer");
591 //===----------------------------------------------------------------------===//
592 // ARC AliasAnalysis.
593 //===----------------------------------------------------------------------===//
595 #include "llvm/Pass.h"
596 #include "llvm/Analysis/AliasAnalysis.h"
597 #include "llvm/Analysis/Passes.h"
600 /// ObjCARCAliasAnalysis - This is a simple alias analysis
601 /// implementation that uses knowledge of ARC constructs to answer queries.
603 /// TODO: This class could be generalized to know about other ObjC-specific
604 /// tricks. Such as knowing that ivars in the non-fragile ABI are non-aliasing
605 /// even though their offsets are dynamic.
606 class ObjCARCAliasAnalysis : public ImmutablePass,
607 public AliasAnalysis {
609 static char ID; // Class identification, replacement for typeinfo
610 ObjCARCAliasAnalysis() : ImmutablePass(ID) {
611 initializeObjCARCAliasAnalysisPass(*PassRegistry::getPassRegistry());
615 virtual void initializePass() {
616 InitializeAliasAnalysis(this);
619 /// getAdjustedAnalysisPointer - This method is used when a pass implements
620 /// an analysis interface through multiple inheritance. If needed, it
621 /// should override this to adjust the this pointer as needed for the
622 /// specified pass info.
623 virtual void *getAdjustedAnalysisPointer(const void *PI) {
624 if (PI == &AliasAnalysis::ID)
625 return (AliasAnalysis*)this;
629 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
630 virtual AliasResult alias(const Location &LocA, const Location &LocB);
631 virtual bool pointsToConstantMemory(const Location &Loc, bool OrLocal);
632 virtual ModRefBehavior getModRefBehavior(ImmutableCallSite CS);
633 virtual ModRefBehavior getModRefBehavior(const Function *F);
634 virtual ModRefResult getModRefInfo(ImmutableCallSite CS,
635 const Location &Loc);
636 virtual ModRefResult getModRefInfo(ImmutableCallSite CS1,
637 ImmutableCallSite CS2);
639 } // End of anonymous namespace
641 // Register this pass...
642 char ObjCARCAliasAnalysis::ID = 0;
643 INITIALIZE_AG_PASS(ObjCARCAliasAnalysis, AliasAnalysis, "objc-arc-aa",
644 "ObjC-ARC-Based Alias Analysis", false, true, false)
646 ImmutablePass *llvm::createObjCARCAliasAnalysisPass() {
647 return new ObjCARCAliasAnalysis();
651 ObjCARCAliasAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
652 AU.setPreservesAll();
653 AliasAnalysis::getAnalysisUsage(AU);
656 AliasAnalysis::AliasResult
657 ObjCARCAliasAnalysis::alias(const Location &LocA, const Location &LocB) {
659 return AliasAnalysis::alias(LocA, LocB);
661 // First, strip off no-ops, including ObjC-specific no-ops, and try making a
662 // precise alias query.
663 const Value *SA = StripPointerCastsAndObjCCalls(LocA.Ptr);
664 const Value *SB = StripPointerCastsAndObjCCalls(LocB.Ptr);
666 AliasAnalysis::alias(Location(SA, LocA.Size, LocA.TBAATag),
667 Location(SB, LocB.Size, LocB.TBAATag));
668 if (Result != MayAlias)
671 // If that failed, climb to the underlying object, including climbing through
672 // ObjC-specific no-ops, and try making an imprecise alias query.
673 const Value *UA = GetUnderlyingObjCPtr(SA);
674 const Value *UB = GetUnderlyingObjCPtr(SB);
675 if (UA != SA || UB != SB) {
676 Result = AliasAnalysis::alias(Location(UA), Location(UB));
677 // We can't use MustAlias or PartialAlias results here because
678 // GetUnderlyingObjCPtr may return an offsetted pointer value.
679 if (Result == NoAlias)
683 // If that failed, fail. We don't need to chain here, since that's covered
684 // by the earlier precise query.
689 ObjCARCAliasAnalysis::pointsToConstantMemory(const Location &Loc,
692 return AliasAnalysis::pointsToConstantMemory(Loc, OrLocal);
694 // First, strip off no-ops, including ObjC-specific no-ops, and try making
695 // a precise alias query.
696 const Value *S = StripPointerCastsAndObjCCalls(Loc.Ptr);
697 if (AliasAnalysis::pointsToConstantMemory(Location(S, Loc.Size, Loc.TBAATag),
701 // If that failed, climb to the underlying object, including climbing through
702 // ObjC-specific no-ops, and try making an imprecise alias query.
703 const Value *U = GetUnderlyingObjCPtr(S);
705 return AliasAnalysis::pointsToConstantMemory(Location(U), OrLocal);
707 // If that failed, fail. We don't need to chain here, since that's covered
708 // by the earlier precise query.
712 AliasAnalysis::ModRefBehavior
713 ObjCARCAliasAnalysis::getModRefBehavior(ImmutableCallSite CS) {
714 // We have nothing to do. Just chain to the next AliasAnalysis.
715 return AliasAnalysis::getModRefBehavior(CS);
718 AliasAnalysis::ModRefBehavior
719 ObjCARCAliasAnalysis::getModRefBehavior(const Function *F) {
721 return AliasAnalysis::getModRefBehavior(F);
723 switch (GetFunctionClass(F)) {
725 return DoesNotAccessMemory;
730 return AliasAnalysis::getModRefBehavior(F);
733 AliasAnalysis::ModRefResult
734 ObjCARCAliasAnalysis::getModRefInfo(ImmutableCallSite CS, const Location &Loc) {
736 return AliasAnalysis::getModRefInfo(CS, Loc);
738 switch (GetBasicInstructionClass(CS.getInstruction())) {
743 case IC_AutoreleaseRV:
745 case IC_AutoreleasepoolPush:
746 case IC_FusedRetainAutorelease:
747 case IC_FusedRetainAutoreleaseRV:
748 // These functions don't access any memory visible to the compiler.
754 return AliasAnalysis::getModRefInfo(CS, Loc);
757 AliasAnalysis::ModRefResult
758 ObjCARCAliasAnalysis::getModRefInfo(ImmutableCallSite CS1,
759 ImmutableCallSite CS2) {
760 // TODO: Theoretically we could check for dependencies between objc_* calls
761 // and OnlyAccessesArgumentPointees calls or other well-behaved calls.
762 return AliasAnalysis::getModRefInfo(CS1, CS2);
765 //===----------------------------------------------------------------------===//
767 //===----------------------------------------------------------------------===//
769 #include "llvm/Support/InstIterator.h"
770 #include "llvm/Transforms/Scalar.h"
773 /// ObjCARCExpand - Early ARC transformations.
774 class ObjCARCExpand : public FunctionPass {
775 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
776 virtual bool doInitialization(Module &M);
777 virtual bool runOnFunction(Function &F);
779 /// Run - A flag indicating whether this optimization pass should run.
784 ObjCARCExpand() : FunctionPass(ID) {
785 initializeObjCARCExpandPass(*PassRegistry::getPassRegistry());
790 char ObjCARCExpand::ID = 0;
791 INITIALIZE_PASS(ObjCARCExpand,
792 "objc-arc-expand", "ObjC ARC expansion", false, false)
794 Pass *llvm::createObjCARCExpandPass() {
795 return new ObjCARCExpand();
798 void ObjCARCExpand::getAnalysisUsage(AnalysisUsage &AU) const {
799 AU.setPreservesCFG();
802 bool ObjCARCExpand::doInitialization(Module &M) {
803 Run = ModuleHasARC(M);
807 bool ObjCARCExpand::runOnFunction(Function &F) {
811 // If nothing in the Module uses ARC, don't do anything.
815 bool Changed = false;
817 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ++I) {
818 Instruction *Inst = &*I;
820 switch (GetBasicInstructionClass(Inst)) {
824 case IC_AutoreleaseRV:
825 case IC_FusedRetainAutorelease:
826 case IC_FusedRetainAutoreleaseRV:
827 // These calls return their argument verbatim, as a low-level
828 // optimization. However, this makes high-level optimizations
829 // harder. Undo any uses of this optimization that the front-end
830 // emitted here. We'll redo them in a later pass.
832 Inst->replaceAllUsesWith(cast<CallInst>(Inst)->getArgOperand(0));
842 //===----------------------------------------------------------------------===//
844 //===----------------------------------------------------------------------===//
846 // TODO: On code like this:
849 // stuff_that_cannot_release()
850 // objc_autorelease(%x)
851 // stuff_that_cannot_release()
853 // stuff_that_cannot_release()
854 // objc_autorelease(%x)
856 // The second retain and autorelease can be deleted.
858 // TODO: It should be possible to delete
859 // objc_autoreleasePoolPush and objc_autoreleasePoolPop
860 // pairs if nothing is actually autoreleased between them. Also, autorelease
861 // calls followed by objc_autoreleasePoolPop calls (perhaps in ObjC++ code
862 // after inlining) can be turned into plain release calls.
864 // TODO: Critical-edge splitting. If the optimial insertion point is
865 // a critical edge, the current algorithm has to fail, because it doesn't
866 // know how to split edges. It should be possible to make the optimizer
867 // think in terms of edges, rather than blocks, and then split critical
870 // TODO: OptimizeSequences could generalized to be Interprocedural.
872 // TODO: Recognize that a bunch of other objc runtime calls have
873 // non-escaping arguments and non-releasing arguments, and may be
874 // non-autoreleasing.
876 // TODO: Sink autorelease calls as far as possible. Unfortunately we
877 // usually can't sink them past other calls, which would be the main
878 // case where it would be useful.
880 /// TODO: The pointer returned from objc_loadWeakRetained is retained.
882 #include "llvm/GlobalAlias.h"
883 #include "llvm/Constants.h"
884 #include "llvm/LLVMContext.h"
885 #include "llvm/Support/ErrorHandling.h"
886 #include "llvm/Support/CFG.h"
887 #include "llvm/ADT/PostOrderIterator.h"
888 #include "llvm/ADT/Statistic.h"
890 STATISTIC(NumNoops, "Number of no-op objc calls eliminated");
891 STATISTIC(NumPartialNoops, "Number of partially no-op objc calls eliminated");
892 STATISTIC(NumAutoreleases,"Number of autoreleases converted to releases");
893 STATISTIC(NumRets, "Number of return value forwarding "
894 "retain+autoreleaes eliminated");
895 STATISTIC(NumRRs, "Number of retain+release paths eliminated");
896 STATISTIC(NumPeeps, "Number of calls peephole-optimized");
899 /// ProvenanceAnalysis - This is similar to BasicAliasAnalysis, and it
900 /// uses many of the same techniques, except it uses special ObjC-specific
901 /// reasoning about pointer relationships.
902 class ProvenanceAnalysis {
905 typedef std::pair<const Value *, const Value *> ValuePairTy;
906 typedef DenseMap<ValuePairTy, bool> CachedResultsTy;
907 CachedResultsTy CachedResults;
909 bool relatedCheck(const Value *A, const Value *B);
910 bool relatedSelect(const SelectInst *A, const Value *B);
911 bool relatedPHI(const PHINode *A, const Value *B);
914 void operator=(const ProvenanceAnalysis &);
915 ProvenanceAnalysis(const ProvenanceAnalysis &);
918 ProvenanceAnalysis() {}
920 void setAA(AliasAnalysis *aa) { AA = aa; }
922 AliasAnalysis *getAA() const { return AA; }
924 bool related(const Value *A, const Value *B);
927 CachedResults.clear();
932 bool ProvenanceAnalysis::relatedSelect(const SelectInst *A, const Value *B) {
933 // If the values are Selects with the same condition, we can do a more precise
934 // check: just check for relations between the values on corresponding arms.
935 if (const SelectInst *SB = dyn_cast<SelectInst>(B))
936 if (A->getCondition() == SB->getCondition()) {
937 if (related(A->getTrueValue(), SB->getTrueValue()))
939 if (related(A->getFalseValue(), SB->getFalseValue()))
944 // Check both arms of the Select node individually.
945 if (related(A->getTrueValue(), B))
947 if (related(A->getFalseValue(), B))
950 // The arms both checked out.
954 bool ProvenanceAnalysis::relatedPHI(const PHINode *A, const Value *B) {
955 // If the values are PHIs in the same block, we can do a more precise as well
956 // as efficient check: just check for relations between the values on
957 // corresponding edges.
958 if (const PHINode *PNB = dyn_cast<PHINode>(B))
959 if (PNB->getParent() == A->getParent()) {
960 for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i)
961 if (related(A->getIncomingValue(i),
962 PNB->getIncomingValueForBlock(A->getIncomingBlock(i))))
967 // Check each unique source of the PHI node against B.
968 SmallPtrSet<const Value *, 4> UniqueSrc;
969 for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i) {
970 const Value *PV1 = A->getIncomingValue(i);
971 if (UniqueSrc.insert(PV1) && related(PV1, B))
975 // All of the arms checked out.
979 /// isStoredObjCPointer - Test if the value of P, or any value covered by its
980 /// provenance, is ever stored within the function (not counting callees).
981 static bool isStoredObjCPointer(const Value *P) {
982 SmallPtrSet<const Value *, 8> Visited;
983 SmallVector<const Value *, 8> Worklist;
984 Worklist.push_back(P);
987 P = Worklist.pop_back_val();
988 for (Value::const_use_iterator UI = P->use_begin(), UE = P->use_end();
990 const User *Ur = *UI;
991 if (isa<StoreInst>(Ur)) {
992 if (UI.getOperandNo() == 0)
993 // The pointer is stored.
995 // The pointed is stored through.
998 if (isa<CallInst>(Ur))
999 // The pointer is passed as an argument, ignore this.
1001 if (isa<PtrToIntInst>(P))
1002 // Assume the worst.
1004 if (Visited.insert(Ur))
1005 Worklist.push_back(Ur);
1007 } while (!Worklist.empty());
1009 // Everything checked out.
1013 bool ProvenanceAnalysis::relatedCheck(const Value *A, const Value *B) {
1014 // Skip past provenance pass-throughs.
1015 A = GetUnderlyingObjCPtr(A);
1016 B = GetUnderlyingObjCPtr(B);
1022 // Ask regular AliasAnalysis, for a first approximation.
1023 switch (AA->alias(A, B)) {
1024 case AliasAnalysis::NoAlias:
1026 case AliasAnalysis::MustAlias:
1027 case AliasAnalysis::PartialAlias:
1029 case AliasAnalysis::MayAlias:
1033 bool AIsIdentified = IsObjCIdentifiedObject(A);
1034 bool BIsIdentified = IsObjCIdentifiedObject(B);
1036 // An ObjC-Identified object can't alias a load if it is never locally stored.
1037 if (AIsIdentified) {
1038 if (BIsIdentified) {
1039 // If both pointers have provenance, they can be directly compared.
1043 if (isa<LoadInst>(B))
1044 return isStoredObjCPointer(A);
1047 if (BIsIdentified && isa<LoadInst>(A))
1048 return isStoredObjCPointer(B);
1051 // Special handling for PHI and Select.
1052 if (const PHINode *PN = dyn_cast<PHINode>(A))
1053 return relatedPHI(PN, B);
1054 if (const PHINode *PN = dyn_cast<PHINode>(B))
1055 return relatedPHI(PN, A);
1056 if (const SelectInst *S = dyn_cast<SelectInst>(A))
1057 return relatedSelect(S, B);
1058 if (const SelectInst *S = dyn_cast<SelectInst>(B))
1059 return relatedSelect(S, A);
1065 bool ProvenanceAnalysis::related(const Value *A, const Value *B) {
1066 // Begin by inserting a conservative value into the map. If the insertion
1067 // fails, we have the answer already. If it succeeds, leave it there until we
1068 // compute the real answer to guard against recursive queries.
1069 if (A > B) std::swap(A, B);
1070 std::pair<CachedResultsTy::iterator, bool> Pair =
1071 CachedResults.insert(std::make_pair(ValuePairTy(A, B), true));
1073 return Pair.first->second;
1075 bool Result = relatedCheck(A, B);
1076 CachedResults[ValuePairTy(A, B)] = Result;
1081 // Sequence - A sequence of states that a pointer may go through in which an
1082 // objc_retain and objc_release are actually needed.
1085 S_Retain, ///< objc_retain(x)
1086 S_CanRelease, ///< foo(x) -- x could possibly see a ref count decrement
1087 S_Use, ///< any use of x
1088 S_Stop, ///< like S_Release, but code motion is stopped
1089 S_Release, ///< objc_release(x)
1090 S_MovableRelease ///< objc_release(x), !clang.imprecise_release
1094 static Sequence MergeSeqs(Sequence A, Sequence B, bool TopDown) {
1098 if (A == S_None || B == S_None)
1101 if (A > B) std::swap(A, B);
1103 // Choose the side which is further along in the sequence.
1104 if ((A == S_Retain || A == S_CanRelease) &&
1105 (B == S_CanRelease || B == S_Use))
1108 // Choose the side which is further along in the sequence.
1109 if ((A == S_Use || A == S_CanRelease) &&
1110 (B == S_Use || B == S_Release || B == S_Stop || B == S_MovableRelease))
1112 // If both sides are releases, choose the more conservative one.
1113 if (A == S_Stop && (B == S_Release || B == S_MovableRelease))
1115 if (A == S_Release && B == S_MovableRelease)
1123 /// RRInfo - Unidirectional information about either a
1124 /// retain-decrement-use-release sequence or release-use-decrement-retain
1125 /// reverese sequence.
1127 /// KnownIncremented - After an objc_retain, the reference count of the
1128 /// referenced object is known to be positive. Similarly, before an
1129 /// objc_release, the reference count of the referenced object is known to
1130 /// be positive. If there are retain-release pairs in code regions where the
1131 /// retain count is known to be positive, they can be eliminated, regardless
1132 /// of any side effects between them.
1133 bool KnownIncremented;
1135 /// IsRetainBlock - True if the Calls are objc_retainBlock calls (as
1136 /// opposed to objc_retain calls).
1139 /// IsTailCallRelease - True of the objc_release calls are all marked
1140 /// with the "tail" keyword.
1141 bool IsTailCallRelease;
1143 /// ReleaseMetadata - If the Calls are objc_release calls and they all have
1144 /// a clang.imprecise_release tag, this is the metadata tag.
1145 MDNode *ReleaseMetadata;
1147 /// Calls - For a top-down sequence, the set of objc_retains or
1148 /// objc_retainBlocks. For bottom-up, the set of objc_releases.
1149 SmallPtrSet<Instruction *, 2> Calls;
1151 /// ReverseInsertPts - The set of optimal insert positions for
1152 /// moving calls in the opposite sequence.
1153 SmallPtrSet<Instruction *, 2> ReverseInsertPts;
1156 KnownIncremented(false), IsRetainBlock(false), IsTailCallRelease(false),
1157 ReleaseMetadata(0) {}
1163 void RRInfo::clear() {
1164 KnownIncremented = false;
1165 IsRetainBlock = false;
1166 IsTailCallRelease = false;
1167 ReleaseMetadata = 0;
1169 ReverseInsertPts.clear();
1173 /// PtrState - This class summarizes several per-pointer runtime properties
1174 /// which are propogated through the flow graph.
1176 /// RefCount - The known minimum number of reference count increments.
1179 /// Seq - The current position in the sequence.
1183 /// RRI - Unidirectional information about the current sequence.
1184 /// TODO: Encapsulate this better.
1187 PtrState() : RefCount(0), Seq(S_None) {}
1189 void SetAtLeastOneRefCount() {
1190 if (RefCount == 0) RefCount = 1;
1193 void IncrementRefCount() {
1194 if (RefCount != UINT_MAX) ++RefCount;
1197 void DecrementRefCount() {
1198 if (RefCount != 0) --RefCount;
1201 bool IsKnownIncremented() const {
1202 return RefCount > 0;
1205 void SetSeq(Sequence NewSeq) {
1209 void SetSeqToRelease(MDNode *M) {
1210 if (Seq == S_None || Seq == S_Use) {
1211 Seq = M ? S_MovableRelease : S_Release;
1212 RRI.ReleaseMetadata = M;
1213 } else if (Seq != S_MovableRelease || RRI.ReleaseMetadata != M) {
1215 RRI.ReleaseMetadata = 0;
1219 Sequence GetSeq() const {
1223 void ClearSequenceProgress() {
1228 void Merge(const PtrState &Other, bool TopDown);
1233 PtrState::Merge(const PtrState &Other, bool TopDown) {
1234 Seq = MergeSeqs(Seq, Other.Seq, TopDown);
1235 RefCount = std::min(RefCount, Other.RefCount);
1237 // We can't merge a plain objc_retain with an objc_retainBlock.
1238 if (RRI.IsRetainBlock != Other.RRI.IsRetainBlock)
1241 if (Seq == S_None) {
1244 // Conservatively merge the ReleaseMetadata information.
1245 if (RRI.ReleaseMetadata != Other.RRI.ReleaseMetadata)
1246 RRI.ReleaseMetadata = 0;
1248 RRI.KnownIncremented = RRI.KnownIncremented && Other.RRI.KnownIncremented;
1249 RRI.IsTailCallRelease = RRI.IsTailCallRelease && Other.RRI.IsTailCallRelease;
1250 RRI.Calls.insert(Other.RRI.Calls.begin(), Other.RRI.Calls.end());
1251 RRI.ReverseInsertPts.insert(Other.RRI.ReverseInsertPts.begin(),
1252 Other.RRI.ReverseInsertPts.end());
1257 /// BBState - Per-BasicBlock state.
1259 /// TopDownPathCount - The number of unique control paths from the entry
1260 /// which can reach this block.
1261 unsigned TopDownPathCount;
1263 /// BottomUpPathCount - The number of unique control paths to exits
1264 /// from this block.
1265 unsigned BottomUpPathCount;
1267 /// MapTy - A type for PerPtrTopDown and PerPtrBottomUp.
1268 typedef MapVector<const Value *, PtrState> MapTy;
1270 /// PerPtrTopDown - The top-down traversal uses this to record information
1271 /// known about a pointer at the bottom of each block.
1272 MapTy PerPtrTopDown;
1274 /// PerPtrBottomUp - The bottom-up traversal uses this to record information
1275 /// known about a pointer at the top of each block.
1276 MapTy PerPtrBottomUp;
1279 BBState() : TopDownPathCount(0), BottomUpPathCount(0) {}
1281 typedef MapTy::iterator ptr_iterator;
1282 typedef MapTy::const_iterator ptr_const_iterator;
1284 ptr_iterator top_down_ptr_begin() { return PerPtrTopDown.begin(); }
1285 ptr_iterator top_down_ptr_end() { return PerPtrTopDown.end(); }
1286 ptr_const_iterator top_down_ptr_begin() const {
1287 return PerPtrTopDown.begin();
1289 ptr_const_iterator top_down_ptr_end() const {
1290 return PerPtrTopDown.end();
1293 ptr_iterator bottom_up_ptr_begin() { return PerPtrBottomUp.begin(); }
1294 ptr_iterator bottom_up_ptr_end() { return PerPtrBottomUp.end(); }
1295 ptr_const_iterator bottom_up_ptr_begin() const {
1296 return PerPtrBottomUp.begin();
1298 ptr_const_iterator bottom_up_ptr_end() const {
1299 return PerPtrBottomUp.end();
1302 /// SetAsEntry - Mark this block as being an entry block, which has one
1303 /// path from the entry by definition.
1304 void SetAsEntry() { TopDownPathCount = 1; }
1306 /// SetAsExit - Mark this block as being an exit block, which has one
1307 /// path to an exit by definition.
1308 void SetAsExit() { BottomUpPathCount = 1; }
1310 PtrState &getPtrTopDownState(const Value *Arg) {
1311 return PerPtrTopDown[Arg];
1314 PtrState &getPtrBottomUpState(const Value *Arg) {
1315 return PerPtrBottomUp[Arg];
1318 void clearBottomUpPointers() {
1319 PerPtrBottomUp.clear();
1322 void clearTopDownPointers() {
1323 PerPtrTopDown.clear();
1326 void InitFromPred(const BBState &Other);
1327 void InitFromSucc(const BBState &Other);
1328 void MergePred(const BBState &Other);
1329 void MergeSucc(const BBState &Other);
1331 /// GetAllPathCount - Return the number of possible unique paths from an
1332 /// entry to an exit which pass through this block. This is only valid
1333 /// after both the top-down and bottom-up traversals are complete.
1334 unsigned GetAllPathCount() const {
1335 return TopDownPathCount * BottomUpPathCount;
1338 /// IsVisitedTopDown - Test whether the block for this BBState has been
1339 /// visited by the top-down portion of the algorithm.
1340 bool isVisitedTopDown() const {
1341 return TopDownPathCount != 0;
1346 void BBState::InitFromPred(const BBState &Other) {
1347 PerPtrTopDown = Other.PerPtrTopDown;
1348 TopDownPathCount = Other.TopDownPathCount;
1351 void BBState::InitFromSucc(const BBState &Other) {
1352 PerPtrBottomUp = Other.PerPtrBottomUp;
1353 BottomUpPathCount = Other.BottomUpPathCount;
1356 /// MergePred - The top-down traversal uses this to merge information about
1357 /// predecessors to form the initial state for a new block.
1358 void BBState::MergePred(const BBState &Other) {
1359 // Other.TopDownPathCount can be 0, in which case it is either dead or a
1360 // loop backedge. Loop backedges are special.
1361 TopDownPathCount += Other.TopDownPathCount;
1363 // For each entry in the other set, if our set has an entry with the same key,
1364 // merge the entries. Otherwise, copy the entry and merge it with an empty
1366 for (ptr_const_iterator MI = Other.top_down_ptr_begin(),
1367 ME = Other.top_down_ptr_end(); MI != ME; ++MI) {
1368 std::pair<ptr_iterator, bool> Pair = PerPtrTopDown.insert(*MI);
1369 Pair.first->second.Merge(Pair.second ? PtrState() : MI->second,
1373 // For each entry in our set, if the other set doesn't have an entry with the
1374 // same key, force it to merge with an empty entry.
1375 for (ptr_iterator MI = top_down_ptr_begin(),
1376 ME = top_down_ptr_end(); MI != ME; ++MI)
1377 if (Other.PerPtrTopDown.find(MI->first) == Other.PerPtrTopDown.end())
1378 MI->second.Merge(PtrState(), /*TopDown=*/true);
1381 /// MergeSucc - The bottom-up traversal uses this to merge information about
1382 /// successors to form the initial state for a new block.
1383 void BBState::MergeSucc(const BBState &Other) {
1384 // Other.BottomUpPathCount can be 0, in which case it is either dead or a
1385 // loop backedge. Loop backedges are special.
1386 BottomUpPathCount += Other.BottomUpPathCount;
1388 // For each entry in the other set, if our set has an entry with the
1389 // same key, merge the entries. Otherwise, copy the entry and merge
1390 // it with an empty entry.
1391 for (ptr_const_iterator MI = Other.bottom_up_ptr_begin(),
1392 ME = Other.bottom_up_ptr_end(); MI != ME; ++MI) {
1393 std::pair<ptr_iterator, bool> Pair = PerPtrBottomUp.insert(*MI);
1394 Pair.first->second.Merge(Pair.second ? PtrState() : MI->second,
1398 // For each entry in our set, if the other set doesn't have an entry
1399 // with the same key, force it to merge with an empty entry.
1400 for (ptr_iterator MI = bottom_up_ptr_begin(),
1401 ME = bottom_up_ptr_end(); MI != ME; ++MI)
1402 if (Other.PerPtrBottomUp.find(MI->first) == Other.PerPtrBottomUp.end())
1403 MI->second.Merge(PtrState(), /*TopDown=*/false);
1407 /// ObjCARCOpt - The main ARC optimization pass.
1408 class ObjCARCOpt : public FunctionPass {
1410 ProvenanceAnalysis PA;
1412 /// Run - A flag indicating whether this optimization pass should run.
1415 /// RetainRVCallee, etc. - Declarations for ObjC runtime
1416 /// functions, for use in creating calls to them. These are initialized
1417 /// lazily to avoid cluttering up the Module with unused declarations.
1418 Constant *RetainRVCallee, *AutoreleaseRVCallee, *ReleaseCallee,
1419 *RetainCallee, *RetainBlockCallee, *AutoreleaseCallee;
1421 /// UsedInThisFunciton - Flags which determine whether each of the
1422 /// interesting runtine functions is in fact used in the current function.
1423 unsigned UsedInThisFunction;
1425 /// ImpreciseReleaseMDKind - The Metadata Kind for clang.imprecise_release
1427 unsigned ImpreciseReleaseMDKind;
1429 Constant *getRetainRVCallee(Module *M);
1430 Constant *getAutoreleaseRVCallee(Module *M);
1431 Constant *getReleaseCallee(Module *M);
1432 Constant *getRetainCallee(Module *M);
1433 Constant *getRetainBlockCallee(Module *M);
1434 Constant *getAutoreleaseCallee(Module *M);
1436 void OptimizeRetainCall(Function &F, Instruction *Retain);
1437 bool OptimizeRetainRVCall(Function &F, Instruction *RetainRV);
1438 void OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV);
1439 void OptimizeIndividualCalls(Function &F);
1441 void CheckForCFGHazards(const BasicBlock *BB,
1442 DenseMap<const BasicBlock *, BBState> &BBStates,
1443 BBState &MyStates) const;
1444 bool VisitBottomUp(BasicBlock *BB,
1445 DenseMap<const BasicBlock *, BBState> &BBStates,
1446 MapVector<Value *, RRInfo> &Retains);
1447 bool VisitTopDown(BasicBlock *BB,
1448 DenseMap<const BasicBlock *, BBState> &BBStates,
1449 DenseMap<Value *, RRInfo> &Releases);
1450 bool Visit(Function &F,
1451 DenseMap<const BasicBlock *, BBState> &BBStates,
1452 MapVector<Value *, RRInfo> &Retains,
1453 DenseMap<Value *, RRInfo> &Releases);
1455 void MoveCalls(Value *Arg, RRInfo &RetainsToMove, RRInfo &ReleasesToMove,
1456 MapVector<Value *, RRInfo> &Retains,
1457 DenseMap<Value *, RRInfo> &Releases,
1458 SmallVectorImpl<Instruction *> &DeadInsts,
1461 bool PerformCodePlacement(DenseMap<const BasicBlock *, BBState> &BBStates,
1462 MapVector<Value *, RRInfo> &Retains,
1463 DenseMap<Value *, RRInfo> &Releases,
1466 void OptimizeWeakCalls(Function &F);
1468 bool OptimizeSequences(Function &F);
1470 void OptimizeReturns(Function &F);
1472 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
1473 virtual bool doInitialization(Module &M);
1474 virtual bool runOnFunction(Function &F);
1475 virtual void releaseMemory();
1479 ObjCARCOpt() : FunctionPass(ID) {
1480 initializeObjCARCOptPass(*PassRegistry::getPassRegistry());
1485 char ObjCARCOpt::ID = 0;
1486 INITIALIZE_PASS_BEGIN(ObjCARCOpt,
1487 "objc-arc", "ObjC ARC optimization", false, false)
1488 INITIALIZE_PASS_DEPENDENCY(ObjCARCAliasAnalysis)
1489 INITIALIZE_PASS_END(ObjCARCOpt,
1490 "objc-arc", "ObjC ARC optimization", false, false)
1492 Pass *llvm::createObjCARCOptPass() {
1493 return new ObjCARCOpt();
1496 void ObjCARCOpt::getAnalysisUsage(AnalysisUsage &AU) const {
1497 AU.addRequired<ObjCARCAliasAnalysis>();
1498 AU.addRequired<AliasAnalysis>();
1499 // ARC optimization doesn't currently split critical edges.
1500 AU.setPreservesCFG();
1503 Constant *ObjCARCOpt::getRetainRVCallee(Module *M) {
1504 if (!RetainRVCallee) {
1505 LLVMContext &C = M->getContext();
1506 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
1507 std::vector<Type *> Params;
1508 Params.push_back(I8X);
1510 FunctionType::get(I8X, Params, /*isVarArg=*/false);
1511 AttrListPtr Attributes;
1512 Attributes.addAttr(~0u, Attribute::NoUnwind);
1514 M->getOrInsertFunction("objc_retainAutoreleasedReturnValue", FTy,
1517 return RetainRVCallee;
1520 Constant *ObjCARCOpt::getAutoreleaseRVCallee(Module *M) {
1521 if (!AutoreleaseRVCallee) {
1522 LLVMContext &C = M->getContext();
1523 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
1524 std::vector<Type *> Params;
1525 Params.push_back(I8X);
1527 FunctionType::get(I8X, Params, /*isVarArg=*/false);
1528 AttrListPtr Attributes;
1529 Attributes.addAttr(~0u, Attribute::NoUnwind);
1530 AutoreleaseRVCallee =
1531 M->getOrInsertFunction("objc_autoreleaseReturnValue", FTy,
1534 return AutoreleaseRVCallee;
1537 Constant *ObjCARCOpt::getReleaseCallee(Module *M) {
1538 if (!ReleaseCallee) {
1539 LLVMContext &C = M->getContext();
1540 std::vector<Type *> Params;
1541 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C)));
1542 AttrListPtr Attributes;
1543 Attributes.addAttr(~0u, Attribute::NoUnwind);
1545 M->getOrInsertFunction(
1547 FunctionType::get(Type::getVoidTy(C), Params, /*isVarArg=*/false),
1550 return ReleaseCallee;
1553 Constant *ObjCARCOpt::getRetainCallee(Module *M) {
1554 if (!RetainCallee) {
1555 LLVMContext &C = M->getContext();
1556 std::vector<Type *> Params;
1557 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C)));
1558 AttrListPtr Attributes;
1559 Attributes.addAttr(~0u, Attribute::NoUnwind);
1561 M->getOrInsertFunction(
1563 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
1566 return RetainCallee;
1569 Constant *ObjCARCOpt::getRetainBlockCallee(Module *M) {
1570 if (!RetainBlockCallee) {
1571 LLVMContext &C = M->getContext();
1572 std::vector<Type *> Params;
1573 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C)));
1574 AttrListPtr Attributes;
1575 Attributes.addAttr(~0u, Attribute::NoUnwind);
1577 M->getOrInsertFunction(
1579 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
1582 return RetainBlockCallee;
1585 Constant *ObjCARCOpt::getAutoreleaseCallee(Module *M) {
1586 if (!AutoreleaseCallee) {
1587 LLVMContext &C = M->getContext();
1588 std::vector<Type *> Params;
1589 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C)));
1590 AttrListPtr Attributes;
1591 Attributes.addAttr(~0u, Attribute::NoUnwind);
1593 M->getOrInsertFunction(
1595 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
1598 return AutoreleaseCallee;
1601 /// CanAlterRefCount - Test whether the given instruction can result in a
1602 /// reference count modification (positive or negative) for the pointer's
1605 CanAlterRefCount(const Instruction *Inst, const Value *Ptr,
1606 ProvenanceAnalysis &PA, InstructionClass Class) {
1608 case IC_Autorelease:
1609 case IC_AutoreleaseRV:
1611 // These operations never directly modify a reference count.
1616 ImmutableCallSite CS = static_cast<const Value *>(Inst);
1617 assert(CS && "Only calls can alter reference counts!");
1619 // See if AliasAnalysis can help us with the call.
1620 AliasAnalysis::ModRefBehavior MRB = PA.getAA()->getModRefBehavior(CS);
1621 if (AliasAnalysis::onlyReadsMemory(MRB))
1623 if (AliasAnalysis::onlyAccessesArgPointees(MRB)) {
1624 for (ImmutableCallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
1626 const Value *Op = *I;
1627 if (IsPotentialUse(Op) && PA.related(Ptr, Op))
1633 // Assume the worst.
1637 /// CanUse - Test whether the given instruction can "use" the given pointer's
1638 /// object in a way that requires the reference count to be positive.
1640 CanUse(const Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA,
1641 InstructionClass Class) {
1642 // IC_Call operations (as opposed to IC_CallOrUser) never "use" objc pointers.
1643 if (Class == IC_Call)
1646 // Consider various instructions which may have pointer arguments which are
1648 if (const ICmpInst *ICI = dyn_cast<ICmpInst>(Inst)) {
1649 // Comparing a pointer with null, or any other constant, isn't really a use,
1650 // because we don't care what the pointer points to, or about the values
1651 // of any other dynamic reference-counted pointers.
1652 if (!IsPotentialUse(ICI->getOperand(1)))
1654 } else if (ImmutableCallSite CS = static_cast<const Value *>(Inst)) {
1655 // For calls, just check the arguments (and not the callee operand).
1656 for (ImmutableCallSite::arg_iterator OI = CS.arg_begin(),
1657 OE = CS.arg_end(); OI != OE; ++OI) {
1658 const Value *Op = *OI;
1659 if (IsPotentialUse(Op) && PA.related(Ptr, Op))
1663 } else if (const StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
1664 // Special-case stores, because we don't care about the stored value, just
1665 // the store address.
1666 const Value *Op = GetUnderlyingObjCPtr(SI->getPointerOperand());
1667 // If we can't tell what the underlying object was, assume there is a
1669 return IsPotentialUse(Op) && PA.related(Op, Ptr);
1672 // Check each operand for a match.
1673 for (User::const_op_iterator OI = Inst->op_begin(), OE = Inst->op_end();
1675 const Value *Op = *OI;
1676 if (IsPotentialUse(Op) && PA.related(Ptr, Op))
1682 /// CanInterruptRV - Test whether the given instruction can autorelease
1683 /// any pointer or cause an autoreleasepool pop.
1685 CanInterruptRV(InstructionClass Class) {
1687 case IC_AutoreleasepoolPop:
1690 case IC_Autorelease:
1691 case IC_AutoreleaseRV:
1692 case IC_FusedRetainAutorelease:
1693 case IC_FusedRetainAutoreleaseRV:
1701 /// DependenceKind - There are several kinds of dependence-like concepts in
1703 enum DependenceKind {
1704 NeedsPositiveRetainCount,
1705 CanChangeRetainCount,
1706 RetainAutoreleaseDep, ///< Blocks objc_retainAutorelease.
1707 RetainAutoreleaseRVDep, ///< Blocks objc_retainAutoreleaseReturnValue.
1708 RetainRVDep ///< Blocks objc_retainAutoreleasedReturnValue.
1712 /// Depends - Test if there can be dependencies on Inst through Arg. This
1713 /// function only tests dependencies relevant for removing pairs of calls.
1715 Depends(DependenceKind Flavor, Instruction *Inst, const Value *Arg,
1716 ProvenanceAnalysis &PA) {
1717 // If we've reached the definition of Arg, stop.
1722 case NeedsPositiveRetainCount: {
1723 InstructionClass Class = GetInstructionClass(Inst);
1725 case IC_AutoreleasepoolPop:
1726 case IC_AutoreleasepoolPush:
1730 return CanUse(Inst, Arg, PA, Class);
1734 case CanChangeRetainCount: {
1735 InstructionClass Class = GetInstructionClass(Inst);
1737 case IC_AutoreleasepoolPop:
1738 // Conservatively assume this can decrement any count.
1740 case IC_AutoreleasepoolPush:
1744 return CanAlterRefCount(Inst, Arg, PA, Class);
1748 case RetainAutoreleaseDep:
1749 switch (GetBasicInstructionClass(Inst)) {
1750 case IC_AutoreleasepoolPop:
1751 // Don't merge an objc_autorelease with an objc_retain inside a different
1752 // autoreleasepool scope.
1756 // Check for a retain of the same pointer for merging.
1757 return GetObjCArg(Inst) == Arg;
1759 // Nothing else matters for objc_retainAutorelease formation.
1764 case RetainAutoreleaseRVDep: {
1765 InstructionClass Class = GetBasicInstructionClass(Inst);
1769 // Check for a retain of the same pointer for merging.
1770 return GetObjCArg(Inst) == Arg;
1772 // Anything that can autorelease interrupts
1773 // retainAutoreleaseReturnValue formation.
1774 return CanInterruptRV(Class);
1780 return CanInterruptRV(GetBasicInstructionClass(Inst));
1783 llvm_unreachable("Invalid dependence flavor");
1787 /// FindDependencies - Walk up the CFG from StartPos (which is in StartBB) and
1788 /// find local and non-local dependencies on Arg.
1789 /// TODO: Cache results?
1791 FindDependencies(DependenceKind Flavor,
1793 BasicBlock *StartBB, Instruction *StartInst,
1794 SmallPtrSet<Instruction *, 4> &DependingInstructions,
1795 SmallPtrSet<const BasicBlock *, 4> &Visited,
1796 ProvenanceAnalysis &PA) {
1797 BasicBlock::iterator StartPos = StartInst;
1799 SmallVector<std::pair<BasicBlock *, BasicBlock::iterator>, 4> Worklist;
1800 Worklist.push_back(std::make_pair(StartBB, StartPos));
1802 std::pair<BasicBlock *, BasicBlock::iterator> Pair =
1803 Worklist.pop_back_val();
1804 BasicBlock *LocalStartBB = Pair.first;
1805 BasicBlock::iterator LocalStartPos = Pair.second;
1806 BasicBlock::iterator StartBBBegin = LocalStartBB->begin();
1808 if (LocalStartPos == StartBBBegin) {
1809 pred_iterator PI(LocalStartBB), PE(LocalStartBB, false);
1811 // If we've reached the function entry, produce a null dependence.
1812 DependingInstructions.insert(0);
1814 // Add the predecessors to the worklist.
1816 BasicBlock *PredBB = *PI;
1817 if (Visited.insert(PredBB))
1818 Worklist.push_back(std::make_pair(PredBB, PredBB->end()));
1819 } while (++PI != PE);
1823 Instruction *Inst = --LocalStartPos;
1824 if (Depends(Flavor, Inst, Arg, PA)) {
1825 DependingInstructions.insert(Inst);
1829 } while (!Worklist.empty());
1831 // Determine whether the original StartBB post-dominates all of the blocks we
1832 // visited. If not, insert a sentinal indicating that most optimizations are
1834 for (SmallPtrSet<const BasicBlock *, 4>::const_iterator I = Visited.begin(),
1835 E = Visited.end(); I != E; ++I) {
1836 const BasicBlock *BB = *I;
1839 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
1840 for (succ_const_iterator SI(TI), SE(TI, false); SI != SE; ++SI) {
1841 const BasicBlock *Succ = *SI;
1842 if (Succ != StartBB && !Visited.count(Succ)) {
1843 DependingInstructions.insert(reinterpret_cast<Instruction *>(-1));
1850 static bool isNullOrUndef(const Value *V) {
1851 return isa<ConstantPointerNull>(V) || isa<UndefValue>(V);
1854 static bool isNoopInstruction(const Instruction *I) {
1855 return isa<BitCastInst>(I) ||
1856 (isa<GetElementPtrInst>(I) &&
1857 cast<GetElementPtrInst>(I)->hasAllZeroIndices());
1860 /// OptimizeRetainCall - Turn objc_retain into
1861 /// objc_retainAutoreleasedReturnValue if the operand is a return value.
1863 ObjCARCOpt::OptimizeRetainCall(Function &F, Instruction *Retain) {
1864 CallSite CS(GetObjCArg(Retain));
1865 Instruction *Call = CS.getInstruction();
1867 if (Call->getParent() != Retain->getParent()) return;
1869 // Check that the call is next to the retain.
1870 BasicBlock::iterator I = Call;
1872 while (isNoopInstruction(I)) ++I;
1876 // Turn it to an objc_retainAutoreleasedReturnValue..
1879 cast<CallInst>(Retain)->setCalledFunction(getRetainRVCallee(F.getParent()));
1882 /// OptimizeRetainRVCall - Turn objc_retainAutoreleasedReturnValue into
1883 /// objc_retain if the operand is not a return value. Or, if it can be
1884 /// paired with an objc_autoreleaseReturnValue, delete the pair and
1887 ObjCARCOpt::OptimizeRetainRVCall(Function &F, Instruction *RetainRV) {
1888 // Check for the argument being from an immediately preceding call.
1889 Value *Arg = GetObjCArg(RetainRV);
1891 if (Instruction *Call = CS.getInstruction())
1892 if (Call->getParent() == RetainRV->getParent()) {
1893 BasicBlock::iterator I = Call;
1895 while (isNoopInstruction(I)) ++I;
1896 if (&*I == RetainRV)
1900 // Check for being preceded by an objc_autoreleaseReturnValue on the same
1901 // pointer. In this case, we can delete the pair.
1902 BasicBlock::iterator I = RetainRV, Begin = RetainRV->getParent()->begin();
1904 do --I; while (I != Begin && isNoopInstruction(I));
1905 if (GetBasicInstructionClass(I) == IC_AutoreleaseRV &&
1906 GetObjCArg(I) == Arg) {
1909 EraseInstruction(I);
1910 EraseInstruction(RetainRV);
1915 // Turn it to a plain objc_retain.
1918 cast<CallInst>(RetainRV)->setCalledFunction(getRetainCallee(F.getParent()));
1922 /// OptimizeAutoreleaseRVCall - Turn objc_autoreleaseReturnValue into
1923 /// objc_autorelease if the result is not used as a return value.
1925 ObjCARCOpt::OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV) {
1926 // Check for a return of the pointer value.
1927 const Value *Ptr = GetObjCArg(AutoreleaseRV);
1928 SmallVector<const Value *, 2> Users;
1929 Users.push_back(Ptr);
1931 Ptr = Users.pop_back_val();
1932 for (Value::const_use_iterator UI = Ptr->use_begin(), UE = Ptr->use_end();
1934 const User *I = *UI;
1935 if (isa<ReturnInst>(I) || GetBasicInstructionClass(I) == IC_RetainRV)
1937 if (isa<BitCastInst>(I))
1940 } while (!Users.empty());
1944 cast<CallInst>(AutoreleaseRV)->
1945 setCalledFunction(getAutoreleaseCallee(F.getParent()));
1948 /// OptimizeIndividualCalls - Visit each call, one at a time, and make
1949 /// simplifications without doing any additional analysis.
1950 void ObjCARCOpt::OptimizeIndividualCalls(Function &F) {
1951 // Reset all the flags in preparation for recomputing them.
1952 UsedInThisFunction = 0;
1954 // Visit all objc_* calls in F.
1955 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
1956 Instruction *Inst = &*I++;
1957 InstructionClass Class = GetBasicInstructionClass(Inst);
1962 // Delete no-op casts. These function calls have special semantics, but
1963 // the semantics are entirely implemented via lowering in the front-end,
1964 // so by the time they reach the optimizer, they are just no-op calls
1965 // which return their argument.
1967 // There are gray areas here, as the ability to cast reference-counted
1968 // pointers to raw void* and back allows code to break ARC assumptions,
1969 // however these are currently considered to be unimportant.
1973 EraseInstruction(Inst);
1976 // If the pointer-to-weak-pointer is null, it's undefined behavior.
1979 case IC_LoadWeakRetained:
1981 case IC_DestroyWeak: {
1982 CallInst *CI = cast<CallInst>(Inst);
1983 if (isNullOrUndef(CI->getArgOperand(0))) {
1984 Type *Ty = CI->getArgOperand(0)->getType();
1985 new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
1986 Constant::getNullValue(Ty),
1988 CI->replaceAllUsesWith(UndefValue::get(CI->getType()));
1989 CI->eraseFromParent();
1996 CallInst *CI = cast<CallInst>(Inst);
1997 if (isNullOrUndef(CI->getArgOperand(0)) ||
1998 isNullOrUndef(CI->getArgOperand(1))) {
1999 Type *Ty = CI->getArgOperand(0)->getType();
2000 new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
2001 Constant::getNullValue(Ty),
2003 CI->replaceAllUsesWith(UndefValue::get(CI->getType()));
2004 CI->eraseFromParent();
2010 OptimizeRetainCall(F, Inst);
2013 if (OptimizeRetainRVCall(F, Inst))
2016 case IC_AutoreleaseRV:
2017 OptimizeAutoreleaseRVCall(F, Inst);
2021 // objc_autorelease(x) -> objc_release(x) if x is otherwise unused.
2022 if (IsAutorelease(Class) && Inst->use_empty()) {
2023 CallInst *Call = cast<CallInst>(Inst);
2024 const Value *Arg = Call->getArgOperand(0);
2025 Arg = FindSingleUseIdentifiedObject(Arg);
2030 // Create the declaration lazily.
2031 LLVMContext &C = Inst->getContext();
2033 CallInst::Create(getReleaseCallee(F.getParent()),
2034 Call->getArgOperand(0), "", Call);
2035 NewCall->setMetadata(ImpreciseReleaseMDKind,
2036 MDNode::get(C, ArrayRef<Value *>()));
2037 EraseInstruction(Call);
2043 // For functions which can never be passed stack arguments, add
2045 if (IsAlwaysTail(Class)) {
2047 cast<CallInst>(Inst)->setTailCall();
2050 // Set nounwind as needed.
2051 if (IsNoThrow(Class)) {
2053 cast<CallInst>(Inst)->setDoesNotThrow();
2056 if (!IsNoopOnNull(Class)) {
2057 UsedInThisFunction |= 1 << Class;
2061 const Value *Arg = GetObjCArg(Inst);
2063 // ARC calls with null are no-ops. Delete them.
2064 if (isNullOrUndef(Arg)) {
2067 EraseInstruction(Inst);
2071 // Keep track of which of retain, release, autorelease, and retain_block
2072 // are actually present in this function.
2073 UsedInThisFunction |= 1 << Class;
2075 // If Arg is a PHI, and one or more incoming values to the
2076 // PHI are null, and the call is control-equivalent to the PHI, and there
2077 // are no relevant side effects between the PHI and the call, the call
2078 // could be pushed up to just those paths with non-null incoming values.
2079 // For now, don't bother splitting critical edges for this.
2080 SmallVector<std::pair<Instruction *, const Value *>, 4> Worklist;
2081 Worklist.push_back(std::make_pair(Inst, Arg));
2083 std::pair<Instruction *, const Value *> Pair = Worklist.pop_back_val();
2087 const PHINode *PN = dyn_cast<PHINode>(Arg);
2090 // Determine if the PHI has any null operands, or any incoming
2092 bool HasNull = false;
2093 bool HasCriticalEdges = false;
2094 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
2096 StripPointerCastsAndObjCCalls(PN->getIncomingValue(i));
2097 if (isNullOrUndef(Incoming))
2099 else if (cast<TerminatorInst>(PN->getIncomingBlock(i)->back())
2100 .getNumSuccessors() != 1) {
2101 HasCriticalEdges = true;
2105 // If we have null operands and no critical edges, optimize.
2106 if (!HasCriticalEdges && HasNull) {
2107 SmallPtrSet<Instruction *, 4> DependingInstructions;
2108 SmallPtrSet<const BasicBlock *, 4> Visited;
2110 // Check that there is nothing that cares about the reference
2111 // count between the call and the phi.
2112 FindDependencies(NeedsPositiveRetainCount, Arg,
2113 Inst->getParent(), Inst,
2114 DependingInstructions, Visited, PA);
2115 if (DependingInstructions.size() == 1 &&
2116 *DependingInstructions.begin() == PN) {
2119 // Clone the call into each predecessor that has a non-null value.
2120 CallInst *CInst = cast<CallInst>(Inst);
2121 Type *ParamTy = CInst->getArgOperand(0)->getType();
2122 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
2124 StripPointerCastsAndObjCCalls(PN->getIncomingValue(i));
2125 if (!isNullOrUndef(Incoming)) {
2126 CallInst *Clone = cast<CallInst>(CInst->clone());
2127 Value *Op = PN->getIncomingValue(i);
2128 Instruction *InsertPos = &PN->getIncomingBlock(i)->back();
2129 if (Op->getType() != ParamTy)
2130 Op = new BitCastInst(Op, ParamTy, "", InsertPos);
2131 Clone->setArgOperand(0, Op);
2132 Clone->insertBefore(InsertPos);
2133 Worklist.push_back(std::make_pair(Clone, Incoming));
2136 // Erase the original call.
2137 EraseInstruction(CInst);
2141 } while (!Worklist.empty());
2145 /// CheckForCFGHazards - Check for critical edges, loop boundaries, irreducible
2146 /// control flow, or other CFG structures where moving code across the edge
2147 /// would result in it being executed more.
2149 ObjCARCOpt::CheckForCFGHazards(const BasicBlock *BB,
2150 DenseMap<const BasicBlock *, BBState> &BBStates,
2151 BBState &MyStates) const {
2152 // If any top-down local-use or possible-dec has a succ which is earlier in
2153 // the sequence, forget it.
2154 for (BBState::ptr_const_iterator I = MyStates.top_down_ptr_begin(),
2155 E = MyStates.top_down_ptr_end(); I != E; ++I)
2156 switch (I->second.GetSeq()) {
2159 const Value *Arg = I->first;
2160 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2161 bool SomeSuccHasSame = false;
2162 bool AllSuccsHaveSame = true;
2163 PtrState &S = MyStates.getPtrTopDownState(Arg);
2164 for (succ_const_iterator SI(TI), SE(TI, false); SI != SE; ++SI) {
2165 PtrState &SuccS = BBStates[*SI].getPtrBottomUpState(Arg);
2166 switch (SuccS.GetSeq()) {
2168 case S_CanRelease: {
2169 if (!S.RRI.KnownIncremented && !SuccS.RRI.KnownIncremented)
2170 S.ClearSequenceProgress();
2174 SomeSuccHasSame = true;
2178 case S_MovableRelease:
2179 if (!S.RRI.KnownIncremented && !SuccS.RRI.KnownIncremented)
2180 AllSuccsHaveSame = false;
2183 llvm_unreachable("bottom-up pointer in retain state!");
2186 // If the state at the other end of any of the successor edges
2187 // matches the current state, require all edges to match. This
2188 // guards against loops in the middle of a sequence.
2189 if (SomeSuccHasSame && !AllSuccsHaveSame)
2190 S.ClearSequenceProgress();
2192 case S_CanRelease: {
2193 const Value *Arg = I->first;
2194 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2195 bool SomeSuccHasSame = false;
2196 bool AllSuccsHaveSame = true;
2197 PtrState &S = MyStates.getPtrTopDownState(Arg);
2198 for (succ_const_iterator SI(TI), SE(TI, false); SI != SE; ++SI) {
2199 PtrState &SuccS = BBStates[*SI].getPtrBottomUpState(Arg);
2200 switch (SuccS.GetSeq()) {
2202 if (!S.RRI.KnownIncremented && !SuccS.RRI.KnownIncremented)
2203 S.ClearSequenceProgress();
2207 SomeSuccHasSame = true;
2211 case S_MovableRelease:
2213 if (!S.RRI.KnownIncremented && !SuccS.RRI.KnownIncremented)
2214 AllSuccsHaveSame = false;
2217 llvm_unreachable("bottom-up pointer in retain state!");
2220 // If the state at the other end of any of the successor edges
2221 // matches the current state, require all edges to match. This
2222 // guards against loops in the middle of a sequence.
2223 if (SomeSuccHasSame && !AllSuccsHaveSame)
2224 S.ClearSequenceProgress();
2230 ObjCARCOpt::VisitBottomUp(BasicBlock *BB,
2231 DenseMap<const BasicBlock *, BBState> &BBStates,
2232 MapVector<Value *, RRInfo> &Retains) {
2233 bool NestingDetected = false;
2234 BBState &MyStates = BBStates[BB];
2236 // Merge the states from each successor to compute the initial state
2237 // for the current block.
2238 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2239 succ_const_iterator SI(TI), SE(TI, false);
2241 MyStates.SetAsExit();
2244 const BasicBlock *Succ = *SI++;
2247 DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Succ);
2248 // If we haven't seen this node yet, then we've found a CFG cycle.
2249 // Be optimistic here; it's CheckForCFGHazards' job detect trouble.
2250 if (I == BBStates.end())
2252 MyStates.InitFromSucc(I->second);
2256 I = BBStates.find(Succ);
2257 if (I != BBStates.end())
2258 MyStates.MergeSucc(I->second);
2264 // Visit all the instructions, bottom-up.
2265 for (BasicBlock::iterator I = BB->end(), E = BB->begin(); I != E; --I) {
2266 Instruction *Inst = llvm::prior(I);
2267 InstructionClass Class = GetInstructionClass(Inst);
2268 const Value *Arg = 0;
2272 Arg = GetObjCArg(Inst);
2274 PtrState &S = MyStates.getPtrBottomUpState(Arg);
2276 // If we see two releases in a row on the same pointer. If so, make
2277 // a note, and we'll cicle back to revisit it after we've
2278 // hopefully eliminated the second release, which may allow us to
2279 // eliminate the first release too.
2280 // Theoretically we could implement removal of nested retain+release
2281 // pairs by making PtrState hold a stack of states, but this is
2282 // simple and avoids adding overhead for the non-nested case.
2283 if (S.GetSeq() == S_Release || S.GetSeq() == S_MovableRelease)
2284 NestingDetected = true;
2286 S.SetSeqToRelease(Inst->getMetadata(ImpreciseReleaseMDKind));
2288 S.RRI.KnownIncremented = S.IsKnownIncremented();
2289 S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall();
2290 S.RRI.Calls.insert(Inst);
2292 S.IncrementRefCount();
2295 case IC_RetainBlock:
2298 Arg = GetObjCArg(Inst);
2300 PtrState &S = MyStates.getPtrBottomUpState(Arg);
2301 S.DecrementRefCount();
2302 S.SetAtLeastOneRefCount();
2304 switch (S.GetSeq()) {
2307 case S_MovableRelease:
2309 S.RRI.ReverseInsertPts.clear();
2312 // Don't do retain+release tracking for IC_RetainRV, because it's
2313 // better to let it remain as the first instruction after a call.
2314 if (Class != IC_RetainRV) {
2315 S.RRI.IsRetainBlock = Class == IC_RetainBlock;
2316 Retains[Inst] = S.RRI;
2318 S.ClearSequenceProgress();
2323 llvm_unreachable("bottom-up pointer in retain state!");
2327 case IC_AutoreleasepoolPop:
2328 // Conservatively, clear MyStates for all known pointers.
2329 MyStates.clearBottomUpPointers();
2331 case IC_AutoreleasepoolPush:
2333 // These are irrelevant.
2339 // Consider any other possible effects of this instruction on each
2340 // pointer being tracked.
2341 for (BBState::ptr_iterator MI = MyStates.bottom_up_ptr_begin(),
2342 ME = MyStates.bottom_up_ptr_end(); MI != ME; ++MI) {
2343 const Value *Ptr = MI->first;
2345 continue; // Handled above.
2346 PtrState &S = MI->second;
2347 Sequence Seq = S.GetSeq();
2349 // Check for possible releases. Note that we don't have to update
2350 // S's RefCount because any reference count modifications would be
2351 // done through a different provenance.
2352 if (!IsRetain(Class) && Class != IC_RetainBlock &&
2353 CanAlterRefCount(Inst, Ptr, PA, Class))
2356 S.SetSeq(S_CanRelease);
2360 case S_MovableRelease:
2365 llvm_unreachable("bottom-up pointer in retain state!");
2368 // Check for possible direct uses.
2371 case S_MovableRelease:
2372 if (CanUse(Inst, Ptr, PA, Class)) {
2373 S.RRI.ReverseInsertPts.clear();
2374 S.RRI.ReverseInsertPts.insert(Inst);
2376 } else if (Seq == S_Release &&
2377 (Class == IC_User || Class == IC_CallOrUser)) {
2378 // Non-movable releases depend on any possible objc pointer use.
2380 S.RRI.ReverseInsertPts.clear();
2381 S.RRI.ReverseInsertPts.insert(Inst);
2385 if (CanUse(Inst, Ptr, PA, Class))
2393 llvm_unreachable("bottom-up pointer in retain state!");
2398 return NestingDetected;
2402 ObjCARCOpt::VisitTopDown(BasicBlock *BB,
2403 DenseMap<const BasicBlock *, BBState> &BBStates,
2404 DenseMap<Value *, RRInfo> &Releases) {
2405 bool NestingDetected = false;
2406 BBState &MyStates = BBStates[BB];
2408 // Merge the states from each predecessor to compute the initial state
2409 // for the current block.
2410 const_pred_iterator PI(BB), PE(BB, false);
2412 MyStates.SetAsEntry();
2415 const BasicBlock *Pred = *PI++;
2418 DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Pred);
2419 assert(I != BBStates.end());
2420 // If we haven't seen this node yet, then we've found a CFG cycle.
2421 // Be optimistic here; it's CheckForCFGHazards' job detect trouble.
2422 if (!I->second.isVisitedTopDown())
2424 MyStates.InitFromPred(I->second);
2428 I = BBStates.find(Pred);
2429 assert(I != BBStates.end());
2430 if (I->second.isVisitedTopDown())
2431 MyStates.MergePred(I->second);
2437 // Visit all the instructions, top-down.
2438 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
2439 Instruction *Inst = I;
2440 InstructionClass Class = GetInstructionClass(Inst);
2441 const Value *Arg = 0;
2444 case IC_RetainBlock:
2447 Arg = GetObjCArg(Inst);
2449 PtrState &S = MyStates.getPtrTopDownState(Arg);
2451 // Don't do retain+release tracking for IC_RetainRV, because it's
2452 // better to let it remain as the first instruction after a call.
2453 if (Class != IC_RetainRV) {
2454 // If we see two retains in a row on the same pointer. If so, make
2455 // a note, and we'll cicle back to revisit it after we've
2456 // hopefully eliminated the second retain, which may allow us to
2457 // eliminate the first retain too.
2458 // Theoretically we could implement removal of nested retain+release
2459 // pairs by making PtrState hold a stack of states, but this is
2460 // simple and avoids adding overhead for the non-nested case.
2461 if (S.GetSeq() == S_Retain)
2462 NestingDetected = true;
2466 S.RRI.IsRetainBlock = Class == IC_RetainBlock;
2467 S.RRI.KnownIncremented = S.IsKnownIncremented();
2468 S.RRI.Calls.insert(Inst);
2471 S.SetAtLeastOneRefCount();
2472 S.IncrementRefCount();
2476 Arg = GetObjCArg(Inst);
2478 PtrState &S = MyStates.getPtrTopDownState(Arg);
2479 S.DecrementRefCount();
2481 switch (S.GetSeq()) {
2484 S.RRI.ReverseInsertPts.clear();
2487 S.RRI.ReleaseMetadata = Inst->getMetadata(ImpreciseReleaseMDKind);
2488 S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall();
2489 Releases[Inst] = S.RRI;
2490 S.ClearSequenceProgress();
2496 case S_MovableRelease:
2497 llvm_unreachable("top-down pointer in release state!");
2501 case IC_AutoreleasepoolPop:
2502 // Conservatively, clear MyStates for all known pointers.
2503 MyStates.clearTopDownPointers();
2505 case IC_AutoreleasepoolPush:
2507 // These are irrelevant.
2513 // Consider any other possible effects of this instruction on each
2514 // pointer being tracked.
2515 for (BBState::ptr_iterator MI = MyStates.top_down_ptr_begin(),
2516 ME = MyStates.top_down_ptr_end(); MI != ME; ++MI) {
2517 const Value *Ptr = MI->first;
2519 continue; // Handled above.
2520 PtrState &S = MI->second;
2521 Sequence Seq = S.GetSeq();
2523 // Check for possible releases. Note that we don't have to update
2524 // S's RefCount because any reference count modifications would be
2525 // done through a different provenance.
2526 if (!IsRetain(Class) && Class != IC_RetainBlock &&
2527 CanAlterRefCount(Inst, Ptr, PA, Class))
2530 S.SetSeq(S_CanRelease);
2531 S.RRI.ReverseInsertPts.clear();
2532 S.RRI.ReverseInsertPts.insert(Inst);
2534 // One call can't cause a transition from S_Retain to S_CanRelease
2535 // and S_CanRelease to S_Use. If we've made the first transition,
2544 case S_MovableRelease:
2545 llvm_unreachable("top-down pointer in release state!");
2548 // Check for possible direct uses.
2551 if (CanUse(Inst, Ptr, PA, Class))
2560 case S_MovableRelease:
2561 llvm_unreachable("top-down pointer in release state!");
2566 CheckForCFGHazards(BB, BBStates, MyStates);
2567 return NestingDetected;
2570 // Visit - Visit the function both top-down and bottom-up.
2572 ObjCARCOpt::Visit(Function &F,
2573 DenseMap<const BasicBlock *, BBState> &BBStates,
2574 MapVector<Value *, RRInfo> &Retains,
2575 DenseMap<Value *, RRInfo> &Releases) {
2576 // Use reverse-postorder on the reverse CFG for bottom-up, because we
2577 // magically know that loops will be well behaved, i.e. they won't repeatedly
2578 // call retain on a single pointer without doing a release. We can't use
2579 // ReversePostOrderTraversal here because we want to walk up from each
2580 // function exit point.
2581 SmallPtrSet<BasicBlock *, 16> Visited;
2582 SmallVector<std::pair<BasicBlock *, pred_iterator>, 16> Stack;
2583 SmallVector<BasicBlock *, 16> Order;
2584 for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I) {
2586 if (BB->getTerminator()->getNumSuccessors() == 0)
2587 Stack.push_back(std::make_pair(BB, pred_begin(BB)));
2589 while (!Stack.empty()) {
2590 pred_iterator End = pred_end(Stack.back().first);
2591 while (Stack.back().second != End) {
2592 BasicBlock *BB = *Stack.back().second++;
2593 if (Visited.insert(BB))
2594 Stack.push_back(std::make_pair(BB, pred_begin(BB)));
2596 Order.push_back(Stack.pop_back_val().first);
2598 bool BottomUpNestingDetected = false;
2599 while (!Order.empty()) {
2600 BasicBlock *BB = Order.pop_back_val();
2601 BottomUpNestingDetected |= VisitBottomUp(BB, BBStates, Retains);
2604 // Use regular reverse-postorder for top-down.
2605 bool TopDownNestingDetected = false;
2606 typedef ReversePostOrderTraversal<Function *> RPOTType;
2608 for (RPOTType::rpo_iterator I = RPOT.begin(), E = RPOT.end(); I != E; ++I) {
2609 BasicBlock *BB = *I;
2610 TopDownNestingDetected |= VisitTopDown(BB, BBStates, Releases);
2613 return TopDownNestingDetected && BottomUpNestingDetected;
2616 /// MoveCalls - Move the calls in RetainsToMove and ReleasesToMove.
2617 void ObjCARCOpt::MoveCalls(Value *Arg,
2618 RRInfo &RetainsToMove,
2619 RRInfo &ReleasesToMove,
2620 MapVector<Value *, RRInfo> &Retains,
2621 DenseMap<Value *, RRInfo> &Releases,
2622 SmallVectorImpl<Instruction *> &DeadInsts,
2624 Type *ArgTy = Arg->getType();
2625 Type *ParamTy = PointerType::getUnqual(Type::getInt8Ty(ArgTy->getContext()));
2627 // Insert the new retain and release calls.
2628 for (SmallPtrSet<Instruction *, 2>::const_iterator
2629 PI = ReleasesToMove.ReverseInsertPts.begin(),
2630 PE = ReleasesToMove.ReverseInsertPts.end(); PI != PE; ++PI) {
2631 Instruction *InsertPt = *PI;
2632 Value *MyArg = ArgTy == ParamTy ? Arg :
2633 new BitCastInst(Arg, ParamTy, "", InsertPt);
2635 CallInst::Create(RetainsToMove.IsRetainBlock ?
2636 getRetainBlockCallee(M) : getRetainCallee(M),
2637 MyArg, "", InsertPt);
2638 Call->setDoesNotThrow();
2639 if (!RetainsToMove.IsRetainBlock)
2640 Call->setTailCall();
2642 for (SmallPtrSet<Instruction *, 2>::const_iterator
2643 PI = RetainsToMove.ReverseInsertPts.begin(),
2644 PE = RetainsToMove.ReverseInsertPts.end(); PI != PE; ++PI) {
2645 Instruction *LastUse = *PI;
2646 Instruction *InsertPts[] = { 0, 0, 0 };
2647 if (InvokeInst *II = dyn_cast<InvokeInst>(LastUse)) {
2648 // We can't insert code immediately after an invoke instruction, so
2649 // insert code at the beginning of both successor blocks instead.
2650 // The invoke's return value isn't available in the unwind block,
2651 // but our releases will never depend on it, because they must be
2652 // paired with retains from before the invoke.
2653 InsertPts[0] = II->getNormalDest()->getFirstNonPHI();
2654 InsertPts[1] = II->getUnwindDest()->getFirstNonPHI();
2656 // Insert code immediately after the last use.
2657 InsertPts[0] = llvm::next(BasicBlock::iterator(LastUse));
2660 for (Instruction **I = InsertPts; *I; ++I) {
2661 Instruction *InsertPt = *I;
2662 Value *MyArg = ArgTy == ParamTy ? Arg :
2663 new BitCastInst(Arg, ParamTy, "", InsertPt);
2664 CallInst *Call = CallInst::Create(getReleaseCallee(M), MyArg,
2666 // Attach a clang.imprecise_release metadata tag, if appropriate.
2667 if (MDNode *M = ReleasesToMove.ReleaseMetadata)
2668 Call->setMetadata(ImpreciseReleaseMDKind, M);
2669 Call->setDoesNotThrow();
2670 if (ReleasesToMove.IsTailCallRelease)
2671 Call->setTailCall();
2675 // Delete the original retain and release calls.
2676 for (SmallPtrSet<Instruction *, 2>::const_iterator
2677 AI = RetainsToMove.Calls.begin(),
2678 AE = RetainsToMove.Calls.end(); AI != AE; ++AI) {
2679 Instruction *OrigRetain = *AI;
2680 Retains.blot(OrigRetain);
2681 DeadInsts.push_back(OrigRetain);
2683 for (SmallPtrSet<Instruction *, 2>::const_iterator
2684 AI = ReleasesToMove.Calls.begin(),
2685 AE = ReleasesToMove.Calls.end(); AI != AE; ++AI) {
2686 Instruction *OrigRelease = *AI;
2687 Releases.erase(OrigRelease);
2688 DeadInsts.push_back(OrigRelease);
2693 ObjCARCOpt::PerformCodePlacement(DenseMap<const BasicBlock *, BBState>
2695 MapVector<Value *, RRInfo> &Retains,
2696 DenseMap<Value *, RRInfo> &Releases,
2698 bool AnyPairsCompletelyEliminated = false;
2699 RRInfo RetainsToMove;
2700 RRInfo ReleasesToMove;
2701 SmallVector<Instruction *, 4> NewRetains;
2702 SmallVector<Instruction *, 4> NewReleases;
2703 SmallVector<Instruction *, 8> DeadInsts;
2705 for (MapVector<Value *, RRInfo>::const_iterator I = Retains.begin(),
2706 E = Retains.end(); I != E; ) {
2707 Value *V = (I++)->first;
2708 if (!V) continue; // blotted
2710 Instruction *Retain = cast<Instruction>(V);
2711 Value *Arg = GetObjCArg(Retain);
2713 // If the object being released is in static or stack storage, we know it's
2714 // not being managed by ObjC reference counting, so we can delete pairs
2715 // regardless of what possible decrements or uses lie between them.
2716 bool KnownSafe = isa<Constant>(Arg) || isa<AllocaInst>(Arg);
2718 // If a pair happens in a region where it is known that the reference count
2719 // is already incremented, we can similarly ignore possible decrements.
2720 bool KnownIncrementedTD = true, KnownIncrementedBU = true;
2722 // Connect the dots between the top-down-collected RetainsToMove and
2723 // bottom-up-collected ReleasesToMove to form sets of related calls.
2724 // This is an iterative process so that we connect multiple releases
2725 // to multiple retains if needed.
2726 unsigned OldDelta = 0;
2727 unsigned NewDelta = 0;
2728 unsigned OldCount = 0;
2729 unsigned NewCount = 0;
2730 bool FirstRelease = true;
2731 bool FirstRetain = true;
2732 NewRetains.push_back(Retain);
2734 for (SmallVectorImpl<Instruction *>::const_iterator
2735 NI = NewRetains.begin(), NE = NewRetains.end(); NI != NE; ++NI) {
2736 Instruction *NewRetain = *NI;
2737 MapVector<Value *, RRInfo>::const_iterator It = Retains.find(NewRetain);
2738 assert(It != Retains.end());
2739 const RRInfo &NewRetainRRI = It->second;
2740 KnownIncrementedTD &= NewRetainRRI.KnownIncremented;
2741 for (SmallPtrSet<Instruction *, 2>::const_iterator
2742 LI = NewRetainRRI.Calls.begin(),
2743 LE = NewRetainRRI.Calls.end(); LI != LE; ++LI) {
2744 Instruction *NewRetainRelease = *LI;
2745 DenseMap<Value *, RRInfo>::const_iterator Jt =
2746 Releases.find(NewRetainRelease);
2747 if (Jt == Releases.end())
2749 const RRInfo &NewRetainReleaseRRI = Jt->second;
2750 assert(NewRetainReleaseRRI.Calls.count(NewRetain));
2751 if (ReleasesToMove.Calls.insert(NewRetainRelease)) {
2753 BBStates[NewRetainRelease->getParent()].GetAllPathCount();
2755 // Merge the ReleaseMetadata and IsTailCallRelease values.
2757 ReleasesToMove.ReleaseMetadata =
2758 NewRetainReleaseRRI.ReleaseMetadata;
2759 ReleasesToMove.IsTailCallRelease =
2760 NewRetainReleaseRRI.IsTailCallRelease;
2761 FirstRelease = false;
2763 if (ReleasesToMove.ReleaseMetadata !=
2764 NewRetainReleaseRRI.ReleaseMetadata)
2765 ReleasesToMove.ReleaseMetadata = 0;
2766 if (ReleasesToMove.IsTailCallRelease !=
2767 NewRetainReleaseRRI.IsTailCallRelease)
2768 ReleasesToMove.IsTailCallRelease = false;
2771 // Collect the optimal insertion points.
2773 for (SmallPtrSet<Instruction *, 2>::const_iterator
2774 RI = NewRetainReleaseRRI.ReverseInsertPts.begin(),
2775 RE = NewRetainReleaseRRI.ReverseInsertPts.end();
2777 Instruction *RIP = *RI;
2778 if (ReleasesToMove.ReverseInsertPts.insert(RIP))
2779 NewDelta -= BBStates[RIP->getParent()].GetAllPathCount();
2781 NewReleases.push_back(NewRetainRelease);
2786 if (NewReleases.empty()) break;
2788 // Back the other way.
2789 for (SmallVectorImpl<Instruction *>::const_iterator
2790 NI = NewReleases.begin(), NE = NewReleases.end(); NI != NE; ++NI) {
2791 Instruction *NewRelease = *NI;
2792 DenseMap<Value *, RRInfo>::const_iterator It =
2793 Releases.find(NewRelease);
2794 assert(It != Releases.end());
2795 const RRInfo &NewReleaseRRI = It->second;
2796 KnownIncrementedBU &= NewReleaseRRI.KnownIncremented;
2797 for (SmallPtrSet<Instruction *, 2>::const_iterator
2798 LI = NewReleaseRRI.Calls.begin(),
2799 LE = NewReleaseRRI.Calls.end(); LI != LE; ++LI) {
2800 Instruction *NewReleaseRetain = *LI;
2801 MapVector<Value *, RRInfo>::const_iterator Jt =
2802 Retains.find(NewReleaseRetain);
2803 if (Jt == Retains.end())
2805 const RRInfo &NewReleaseRetainRRI = Jt->second;
2806 assert(NewReleaseRetainRRI.Calls.count(NewRelease));
2807 if (RetainsToMove.Calls.insert(NewReleaseRetain)) {
2808 unsigned PathCount =
2809 BBStates[NewReleaseRetain->getParent()].GetAllPathCount();
2810 OldDelta += PathCount;
2811 OldCount += PathCount;
2813 // Merge the IsRetainBlock values.
2815 RetainsToMove.IsRetainBlock = NewReleaseRetainRRI.IsRetainBlock;
2816 FirstRetain = false;
2817 } else if (ReleasesToMove.IsRetainBlock !=
2818 NewReleaseRetainRRI.IsRetainBlock)
2819 // It's not possible to merge the sequences if one uses
2820 // objc_retain and the other uses objc_retainBlock.
2823 // Collect the optimal insertion points.
2825 for (SmallPtrSet<Instruction *, 2>::const_iterator
2826 RI = NewReleaseRetainRRI.ReverseInsertPts.begin(),
2827 RE = NewReleaseRetainRRI.ReverseInsertPts.end();
2829 Instruction *RIP = *RI;
2830 if (RetainsToMove.ReverseInsertPts.insert(RIP)) {
2831 PathCount = BBStates[RIP->getParent()].GetAllPathCount();
2832 NewDelta += PathCount;
2833 NewCount += PathCount;
2836 NewRetains.push_back(NewReleaseRetain);
2840 NewReleases.clear();
2841 if (NewRetains.empty()) break;
2844 // If the pointer is known incremented, we can safely delete the pair
2845 // regardless of what's between them.
2846 if (KnownIncrementedTD || KnownIncrementedBU) {
2847 RetainsToMove.ReverseInsertPts.clear();
2848 ReleasesToMove.ReverseInsertPts.clear();
2851 // Determine whether the new insertion points we computed preserve the
2852 // balance of retain and release calls through the program.
2853 // TODO: If the fully aggressive solution isn't valid, try to find a
2854 // less aggressive solution which is.
2859 // Determine whether the original call points are balanced in the retain and
2860 // release calls through the program. If not, conservatively don't touch
2862 // TODO: It's theoretically possible to do code motion in this case, as
2863 // long as the existing imbalances are maintained.
2867 // Ok, everything checks out and we're all set. Let's move some code!
2869 AnyPairsCompletelyEliminated = NewCount == 0;
2870 NumRRs += OldCount - NewCount;
2871 MoveCalls(Arg, RetainsToMove, ReleasesToMove,
2872 Retains, Releases, DeadInsts, M);
2875 NewReleases.clear();
2877 RetainsToMove.clear();
2878 ReleasesToMove.clear();
2881 // Now that we're done moving everything, we can delete the newly dead
2882 // instructions, as we no longer need them as insert points.
2883 while (!DeadInsts.empty())
2884 EraseInstruction(DeadInsts.pop_back_val());
2886 return AnyPairsCompletelyEliminated;
2889 /// OptimizeWeakCalls - Weak pointer optimizations.
2890 void ObjCARCOpt::OptimizeWeakCalls(Function &F) {
2891 // First, do memdep-style RLE and S2L optimizations. We can't use memdep
2892 // itself because it uses AliasAnalysis and we need to do provenance
2894 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
2895 Instruction *Inst = &*I++;
2896 InstructionClass Class = GetBasicInstructionClass(Inst);
2897 if (Class != IC_LoadWeak && Class != IC_LoadWeakRetained)
2900 // Delete objc_loadWeak calls with no users.
2901 if (Class == IC_LoadWeak && Inst->use_empty()) {
2902 Inst->eraseFromParent();
2906 // TODO: For now, just look for an earlier available version of this value
2907 // within the same block. Theoretically, we could do memdep-style non-local
2908 // analysis too, but that would want caching. A better approach would be to
2909 // use the technique that EarlyCSE uses.
2910 inst_iterator Current = llvm::prior(I);
2911 BasicBlock *CurrentBB = Current.getBasicBlockIterator();
2912 for (BasicBlock::iterator B = CurrentBB->begin(),
2913 J = Current.getInstructionIterator();
2915 Instruction *EarlierInst = &*llvm::prior(J);
2916 InstructionClass EarlierClass = GetInstructionClass(EarlierInst);
2917 switch (EarlierClass) {
2919 case IC_LoadWeakRetained: {
2920 // If this is loading from the same pointer, replace this load's value
2922 CallInst *Call = cast<CallInst>(Inst);
2923 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
2924 Value *Arg = Call->getArgOperand(0);
2925 Value *EarlierArg = EarlierCall->getArgOperand(0);
2926 switch (PA.getAA()->alias(Arg, EarlierArg)) {
2927 case AliasAnalysis::MustAlias:
2929 // If the load has a builtin retain, insert a plain retain for it.
2930 if (Class == IC_LoadWeakRetained) {
2932 CallInst::Create(getRetainCallee(F.getParent()), EarlierCall,
2936 // Zap the fully redundant load.
2937 Call->replaceAllUsesWith(EarlierCall);
2938 Call->eraseFromParent();
2940 case AliasAnalysis::MayAlias:
2941 case AliasAnalysis::PartialAlias:
2943 case AliasAnalysis::NoAlias:
2950 // If this is storing to the same pointer and has the same size etc.
2951 // replace this load's value with the stored value.
2952 CallInst *Call = cast<CallInst>(Inst);
2953 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
2954 Value *Arg = Call->getArgOperand(0);
2955 Value *EarlierArg = EarlierCall->getArgOperand(0);
2956 switch (PA.getAA()->alias(Arg, EarlierArg)) {
2957 case AliasAnalysis::MustAlias:
2959 // If the load has a builtin retain, insert a plain retain for it.
2960 if (Class == IC_LoadWeakRetained) {
2962 CallInst::Create(getRetainCallee(F.getParent()), EarlierCall,
2966 // Zap the fully redundant load.
2967 Call->replaceAllUsesWith(EarlierCall->getArgOperand(1));
2968 Call->eraseFromParent();
2970 case AliasAnalysis::MayAlias:
2971 case AliasAnalysis::PartialAlias:
2973 case AliasAnalysis::NoAlias:
2980 // TOOD: Grab the copied value.
2982 case IC_AutoreleasepoolPush:
2985 // Weak pointers are only modified through the weak entry points
2986 // (and arbitrary calls, which could call the weak entry points).
2989 // Anything else could modify the weak pointer.
2996 // Then, for each destroyWeak with an alloca operand, check to see if
2997 // the alloca and all its users can be zapped.
2998 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
2999 Instruction *Inst = &*I++;
3000 InstructionClass Class = GetBasicInstructionClass(Inst);
3001 if (Class != IC_DestroyWeak)
3004 CallInst *Call = cast<CallInst>(Inst);
3005 Value *Arg = Call->getArgOperand(0);
3006 if (AllocaInst *Alloca = dyn_cast<AllocaInst>(Arg)) {
3007 for (Value::use_iterator UI = Alloca->use_begin(),
3008 UE = Alloca->use_end(); UI != UE; ++UI) {
3009 Instruction *UserInst = cast<Instruction>(*UI);
3010 switch (GetBasicInstructionClass(UserInst)) {
3013 case IC_DestroyWeak:
3020 for (Value::use_iterator UI = Alloca->use_begin(),
3021 UE = Alloca->use_end(); UI != UE; ) {
3022 CallInst *UserInst = cast<CallInst>(*UI++);
3023 if (!UserInst->use_empty())
3024 UserInst->replaceAllUsesWith(UserInst->getOperand(1));
3025 UserInst->eraseFromParent();
3027 Alloca->eraseFromParent();
3033 /// OptimizeSequences - Identify program paths which execute sequences of
3034 /// retains and releases which can be eliminated.
3035 bool ObjCARCOpt::OptimizeSequences(Function &F) {
3036 /// Releases, Retains - These are used to store the results of the main flow
3037 /// analysis. These use Value* as the key instead of Instruction* so that the
3038 /// map stays valid when we get around to rewriting code and calls get
3039 /// replaced by arguments.
3040 DenseMap<Value *, RRInfo> Releases;
3041 MapVector<Value *, RRInfo> Retains;
3043 /// BBStates, This is used during the traversal of the function to track the
3044 /// states for each identified object at each block.
3045 DenseMap<const BasicBlock *, BBState> BBStates;
3047 // Analyze the CFG of the function, and all instructions.
3048 bool NestingDetected = Visit(F, BBStates, Retains, Releases);
3051 return PerformCodePlacement(BBStates, Retains, Releases, F.getParent()) &&
3055 /// OptimizeReturns - Look for this pattern:
3057 /// %call = call i8* @something(...)
3058 /// %2 = call i8* @objc_retain(i8* %call)
3059 /// %3 = call i8* @objc_autorelease(i8* %2)
3062 /// And delete the retain and autorelease.
3064 /// Otherwise if it's just this:
3066 /// %3 = call i8* @objc_autorelease(i8* %2)
3069 /// convert the autorelease to autoreleaseRV.
3070 void ObjCARCOpt::OptimizeReturns(Function &F) {
3071 if (!F.getReturnType()->isPointerTy())
3074 SmallPtrSet<Instruction *, 4> DependingInstructions;
3075 SmallPtrSet<const BasicBlock *, 4> Visited;
3076 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI) {
3077 BasicBlock *BB = FI;
3078 ReturnInst *Ret = dyn_cast<ReturnInst>(&BB->back());
3081 const Value *Arg = StripPointerCastsAndObjCCalls(Ret->getOperand(0));
3082 FindDependencies(NeedsPositiveRetainCount, Arg,
3083 BB, Ret, DependingInstructions, Visited, PA);
3084 if (DependingInstructions.size() != 1)
3088 CallInst *Autorelease =
3089 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3092 InstructionClass AutoreleaseClass =
3093 GetBasicInstructionClass(Autorelease);
3094 if (!IsAutorelease(AutoreleaseClass))
3096 if (GetObjCArg(Autorelease) != Arg)
3099 DependingInstructions.clear();
3102 // Check that there is nothing that can affect the reference
3103 // count between the autorelease and the retain.
3104 FindDependencies(CanChangeRetainCount, Arg,
3105 BB, Autorelease, DependingInstructions, Visited, PA);
3106 if (DependingInstructions.size() != 1)
3111 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3113 // Check that we found a retain with the same argument.
3115 !IsRetain(GetBasicInstructionClass(Retain)) ||
3116 GetObjCArg(Retain) != Arg)
3119 DependingInstructions.clear();
3122 // Convert the autorelease to an autoreleaseRV, since it's
3123 // returning the value.
3124 if (AutoreleaseClass == IC_Autorelease) {
3125 Autorelease->setCalledFunction(getAutoreleaseRVCallee(F.getParent()));
3126 AutoreleaseClass = IC_AutoreleaseRV;
3129 // Check that there is nothing that can affect the reference
3130 // count between the retain and the call.
3131 FindDependencies(CanChangeRetainCount, Arg, BB, Retain,
3132 DependingInstructions, Visited, PA);
3133 if (DependingInstructions.size() != 1)
3138 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3140 // Check that the pointer is the return value of the call.
3141 if (!Call || Arg != Call)
3144 // Check that the call is a regular call.
3145 InstructionClass Class = GetBasicInstructionClass(Call);
3146 if (Class != IC_CallOrUser && Class != IC_Call)
3149 // If so, we can zap the retain and autorelease.
3152 EraseInstruction(Retain);
3153 EraseInstruction(Autorelease);
3159 DependingInstructions.clear();
3164 bool ObjCARCOpt::doInitialization(Module &M) {
3168 Run = ModuleHasARC(M);
3172 // Identify the imprecise release metadata kind.
3173 ImpreciseReleaseMDKind =
3174 M.getContext().getMDKindID("clang.imprecise_release");
3176 // Intuitively, objc_retain and others are nocapture, however in practice
3177 // they are not, because they return their argument value. And objc_release
3178 // calls finalizers.
3180 // These are initialized lazily.
3182 AutoreleaseRVCallee = 0;
3185 RetainBlockCallee = 0;
3186 AutoreleaseCallee = 0;
3191 bool ObjCARCOpt::runOnFunction(Function &F) {
3195 // If nothing in the Module uses ARC, don't do anything.
3201 PA.setAA(&getAnalysis<AliasAnalysis>());
3203 // This pass performs several distinct transformations. As a compile-time aid
3204 // when compiling code that isn't ObjC, skip these if the relevant ObjC
3205 // library functions aren't declared.
3207 // Preliminary optimizations. This also computs UsedInThisFunction.
3208 OptimizeIndividualCalls(F);
3210 // Optimizations for weak pointers.
3211 if (UsedInThisFunction & ((1 << IC_LoadWeak) |
3212 (1 << IC_LoadWeakRetained) |
3213 (1 << IC_StoreWeak) |
3214 (1 << IC_InitWeak) |
3215 (1 << IC_CopyWeak) |
3216 (1 << IC_MoveWeak) |
3217 (1 << IC_DestroyWeak)))
3218 OptimizeWeakCalls(F);
3220 // Optimizations for retain+release pairs.
3221 if (UsedInThisFunction & ((1 << IC_Retain) |
3222 (1 << IC_RetainRV) |
3223 (1 << IC_RetainBlock)))
3224 if (UsedInThisFunction & (1 << IC_Release))
3225 // Run OptimizeSequences until it either stops making changes or
3226 // no retain+release pair nesting is detected.
3227 while (OptimizeSequences(F)) {}
3229 // Optimizations if objc_autorelease is used.
3230 if (UsedInThisFunction &
3231 ((1 << IC_Autorelease) | (1 << IC_AutoreleaseRV)))
3237 void ObjCARCOpt::releaseMemory() {
3241 //===----------------------------------------------------------------------===//
3243 //===----------------------------------------------------------------------===//
3245 // TODO: ObjCARCContract could insert PHI nodes when uses aren't
3246 // dominated by single calls.
3248 #include "llvm/Operator.h"
3249 #include "llvm/InlineAsm.h"
3250 #include "llvm/Analysis/Dominators.h"
3252 STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed");
3255 /// ObjCARCContract - Late ARC optimizations. These change the IR in a way
3256 /// that makes it difficult to be analyzed by ObjCARCOpt, so it's run late.
3257 class ObjCARCContract : public FunctionPass {
3261 ProvenanceAnalysis PA;
3263 /// Run - A flag indicating whether this optimization pass should run.
3266 /// StoreStrongCallee, etc. - Declarations for ObjC runtime
3267 /// functions, for use in creating calls to them. These are initialized
3268 /// lazily to avoid cluttering up the Module with unused declarations.
3269 Constant *StoreStrongCallee,
3270 *RetainAutoreleaseCallee, *RetainAutoreleaseRVCallee;
3272 /// RetainRVMarker - The inline asm string to insert between calls and
3273 /// RetainRV calls to make the optimization work on targets which need it.
3274 const MDString *RetainRVMarker;
3276 Constant *getStoreStrongCallee(Module *M);
3277 Constant *getRetainAutoreleaseCallee(Module *M);
3278 Constant *getRetainAutoreleaseRVCallee(Module *M);
3280 bool ContractAutorelease(Function &F, Instruction *Autorelease,
3281 InstructionClass Class,
3282 SmallPtrSet<Instruction *, 4>
3283 &DependingInstructions,
3284 SmallPtrSet<const BasicBlock *, 4>
3287 void ContractRelease(Instruction *Release,
3288 inst_iterator &Iter);
3290 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
3291 virtual bool doInitialization(Module &M);
3292 virtual bool runOnFunction(Function &F);
3296 ObjCARCContract() : FunctionPass(ID) {
3297 initializeObjCARCContractPass(*PassRegistry::getPassRegistry());
3302 char ObjCARCContract::ID = 0;
3303 INITIALIZE_PASS_BEGIN(ObjCARCContract,
3304 "objc-arc-contract", "ObjC ARC contraction", false, false)
3305 INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
3306 INITIALIZE_PASS_DEPENDENCY(DominatorTree)
3307 INITIALIZE_PASS_END(ObjCARCContract,
3308 "objc-arc-contract", "ObjC ARC contraction", false, false)
3310 Pass *llvm::createObjCARCContractPass() {
3311 return new ObjCARCContract();
3314 void ObjCARCContract::getAnalysisUsage(AnalysisUsage &AU) const {
3315 AU.addRequired<AliasAnalysis>();
3316 AU.addRequired<DominatorTree>();
3317 AU.setPreservesCFG();
3320 Constant *ObjCARCContract::getStoreStrongCallee(Module *M) {
3321 if (!StoreStrongCallee) {
3322 LLVMContext &C = M->getContext();
3323 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3324 Type *I8XX = PointerType::getUnqual(I8X);
3325 std::vector<Type *> Params;
3326 Params.push_back(I8XX);
3327 Params.push_back(I8X);
3329 AttrListPtr Attributes;
3330 Attributes.addAttr(~0u, Attribute::NoUnwind);
3331 Attributes.addAttr(1, Attribute::NoCapture);
3334 M->getOrInsertFunction(
3336 FunctionType::get(Type::getVoidTy(C), Params, /*isVarArg=*/false),
3339 return StoreStrongCallee;
3342 Constant *ObjCARCContract::getRetainAutoreleaseCallee(Module *M) {
3343 if (!RetainAutoreleaseCallee) {
3344 LLVMContext &C = M->getContext();
3345 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3346 std::vector<Type *> Params;
3347 Params.push_back(I8X);
3349 FunctionType::get(I8X, Params, /*isVarArg=*/false);
3350 AttrListPtr Attributes;
3351 Attributes.addAttr(~0u, Attribute::NoUnwind);
3352 RetainAutoreleaseCallee =
3353 M->getOrInsertFunction("objc_retainAutorelease", FTy, Attributes);
3355 return RetainAutoreleaseCallee;
3358 Constant *ObjCARCContract::getRetainAutoreleaseRVCallee(Module *M) {
3359 if (!RetainAutoreleaseRVCallee) {
3360 LLVMContext &C = M->getContext();
3361 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3362 std::vector<Type *> Params;
3363 Params.push_back(I8X);
3365 FunctionType::get(I8X, Params, /*isVarArg=*/false);
3366 AttrListPtr Attributes;
3367 Attributes.addAttr(~0u, Attribute::NoUnwind);
3368 RetainAutoreleaseRVCallee =
3369 M->getOrInsertFunction("objc_retainAutoreleaseReturnValue", FTy,
3372 return RetainAutoreleaseRVCallee;
3375 /// ContractAutorelease - Merge an autorelease with a retain into a fused
3378 ObjCARCContract::ContractAutorelease(Function &F, Instruction *Autorelease,
3379 InstructionClass Class,
3380 SmallPtrSet<Instruction *, 4>
3381 &DependingInstructions,
3382 SmallPtrSet<const BasicBlock *, 4>
3384 const Value *Arg = GetObjCArg(Autorelease);
3386 // Check that there are no instructions between the retain and the autorelease
3387 // (such as an autorelease_pop) which may change the count.
3388 CallInst *Retain = 0;
3389 if (Class == IC_AutoreleaseRV)
3390 FindDependencies(RetainAutoreleaseRVDep, Arg,
3391 Autorelease->getParent(), Autorelease,
3392 DependingInstructions, Visited, PA);
3394 FindDependencies(RetainAutoreleaseDep, Arg,
3395 Autorelease->getParent(), Autorelease,
3396 DependingInstructions, Visited, PA);
3399 if (DependingInstructions.size() != 1) {
3400 DependingInstructions.clear();
3404 Retain = dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3405 DependingInstructions.clear();
3408 GetBasicInstructionClass(Retain) != IC_Retain ||
3409 GetObjCArg(Retain) != Arg)
3415 if (Class == IC_AutoreleaseRV)
3416 Retain->setCalledFunction(getRetainAutoreleaseRVCallee(F.getParent()));
3418 Retain->setCalledFunction(getRetainAutoreleaseCallee(F.getParent()));
3420 EraseInstruction(Autorelease);
3424 /// ContractRelease - Attempt to merge an objc_release with a store, load, and
3425 /// objc_retain to form an objc_storeStrong. This can be a little tricky because
3426 /// the instructions don't always appear in order, and there may be unrelated
3427 /// intervening instructions.
3428 void ObjCARCContract::ContractRelease(Instruction *Release,
3429 inst_iterator &Iter) {
3430 LoadInst *Load = dyn_cast<LoadInst>(GetObjCArg(Release));
3431 if (!Load || Load->isVolatile()) return;
3433 // For now, require everything to be in one basic block.
3434 BasicBlock *BB = Release->getParent();
3435 if (Load->getParent() != BB) return;
3437 // Walk down to find the store.
3438 BasicBlock::iterator I = Load, End = BB->end();
3440 AliasAnalysis::Location Loc = AA->getLocation(Load);
3443 IsRetain(GetBasicInstructionClass(I)) ||
3444 !(AA->getModRefInfo(I, Loc) & AliasAnalysis::Mod)))
3446 StoreInst *Store = dyn_cast<StoreInst>(I);
3447 if (!Store || Store->isVolatile()) return;
3448 if (Store->getPointerOperand() != Loc.Ptr) return;
3450 Value *New = StripPointerCastsAndObjCCalls(Store->getValueOperand());
3452 // Walk up to find the retain.
3454 BasicBlock::iterator Begin = BB->begin();
3455 while (I != Begin && GetBasicInstructionClass(I) != IC_Retain)
3457 Instruction *Retain = I;
3458 if (GetBasicInstructionClass(Retain) != IC_Retain) return;
3459 if (GetObjCArg(Retain) != New) return;
3464 LLVMContext &C = Release->getContext();
3465 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3466 Type *I8XX = PointerType::getUnqual(I8X);
3468 Value *Args[] = { Load->getPointerOperand(), New };
3469 if (Args[0]->getType() != I8XX)
3470 Args[0] = new BitCastInst(Args[0], I8XX, "", Store);
3471 if (Args[1]->getType() != I8X)
3472 Args[1] = new BitCastInst(Args[1], I8X, "", Store);
3473 CallInst *StoreStrong =
3474 CallInst::Create(getStoreStrongCallee(BB->getParent()->getParent()),
3476 StoreStrong->setDoesNotThrow();
3477 StoreStrong->setDebugLoc(Store->getDebugLoc());
3479 if (&*Iter == Store) ++Iter;
3480 Store->eraseFromParent();
3481 Release->eraseFromParent();
3482 EraseInstruction(Retain);
3483 if (Load->use_empty())
3484 Load->eraseFromParent();
3487 bool ObjCARCContract::doInitialization(Module &M) {
3488 Run = ModuleHasARC(M);
3492 // These are initialized lazily.
3493 StoreStrongCallee = 0;
3494 RetainAutoreleaseCallee = 0;
3495 RetainAutoreleaseRVCallee = 0;
3497 // Initialize RetainRVMarker.
3499 if (NamedMDNode *NMD =
3500 M.getNamedMetadata("clang.arc.retainAutoreleasedReturnValueMarker"))
3501 if (NMD->getNumOperands() == 1) {
3502 const MDNode *N = NMD->getOperand(0);
3503 if (N->getNumOperands() == 1)
3504 if (const MDString *S = dyn_cast<MDString>(N->getOperand(0)))
3511 bool ObjCARCContract::runOnFunction(Function &F) {
3515 // If nothing in the Module uses ARC, don't do anything.
3520 AA = &getAnalysis<AliasAnalysis>();
3521 DT = &getAnalysis<DominatorTree>();
3523 PA.setAA(&getAnalysis<AliasAnalysis>());
3525 // For ObjC library calls which return their argument, replace uses of the
3526 // argument with uses of the call return value, if it dominates the use. This
3527 // reduces register pressure.
3528 SmallPtrSet<Instruction *, 4> DependingInstructions;
3529 SmallPtrSet<const BasicBlock *, 4> Visited;
3530 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
3531 Instruction *Inst = &*I++;
3533 // Only these library routines return their argument. In particular,
3534 // objc_retainBlock does not necessarily return its argument.
3535 InstructionClass Class = GetBasicInstructionClass(Inst);
3538 case IC_FusedRetainAutorelease:
3539 case IC_FusedRetainAutoreleaseRV:
3541 case IC_Autorelease:
3542 case IC_AutoreleaseRV:
3543 if (ContractAutorelease(F, Inst, Class, DependingInstructions, Visited))
3547 // If we're compiling for a target which needs a special inline-asm
3548 // marker to do the retainAutoreleasedReturnValue optimization,
3550 if (!RetainRVMarker)
3552 BasicBlock::iterator BBI = Inst;
3554 while (isNoopInstruction(BBI)) --BBI;
3555 if (&*BBI == GetObjCArg(Inst)) {
3557 InlineAsm::get(FunctionType::get(Type::getVoidTy(Inst->getContext()),
3558 /*isVarArg=*/false),
3559 RetainRVMarker->getString(),
3560 /*Constraints=*/"", /*hasSideEffects=*/true);
3561 CallInst::Create(IA, "", Inst);
3566 // objc_initWeak(p, null) => *p = null
3567 CallInst *CI = cast<CallInst>(Inst);
3568 if (isNullOrUndef(CI->getArgOperand(1))) {
3570 ConstantPointerNull::get(cast<PointerType>(CI->getType()));
3572 new StoreInst(Null, CI->getArgOperand(0), CI);
3573 CI->replaceAllUsesWith(Null);
3574 CI->eraseFromParent();
3579 ContractRelease(Inst, I);
3585 // Don't use GetObjCArg because we don't want to look through bitcasts
3586 // and such; to do the replacement, the argument must have type i8*.
3587 const Value *Arg = cast<CallInst>(Inst)->getArgOperand(0);
3589 // If we're compiling bugpointed code, don't get in trouble.
3590 if (!isa<Instruction>(Arg) && !isa<Argument>(Arg))
3592 // Look through the uses of the pointer.
3593 for (Value::const_use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
3595 Use &U = UI.getUse();
3596 unsigned OperandNo = UI.getOperandNo();
3597 ++UI; // Increment UI now, because we may unlink its element.
3598 if (Instruction *UserInst = dyn_cast<Instruction>(U.getUser()))
3599 if (Inst != UserInst && DT->dominates(Inst, UserInst)) {
3601 Instruction *Replacement = Inst;
3602 Type *UseTy = U.get()->getType();
3603 if (PHINode *PHI = dyn_cast<PHINode>(UserInst)) {
3604 // For PHI nodes, insert the bitcast in the predecessor block.
3606 PHINode::getIncomingValueNumForOperand(OperandNo);
3608 PHI->getIncomingBlock(ValNo);
3609 if (Replacement->getType() != UseTy)
3610 Replacement = new BitCastInst(Replacement, UseTy, "",
3612 for (unsigned i = 0, e = PHI->getNumIncomingValues();
3614 if (PHI->getIncomingBlock(i) == BB) {
3615 // Keep the UI iterator valid.
3616 if (&PHI->getOperandUse(
3617 PHINode::getOperandNumForIncomingValue(i)) ==
3620 PHI->setIncomingValue(i, Replacement);
3623 if (Replacement->getType() != UseTy)
3624 Replacement = new BitCastInst(Replacement, UseTy, "", UserInst);
3630 // If Arg is a no-op casted pointer, strip one level of casts and
3632 if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg))
3633 Arg = BI->getOperand(0);
3634 else if (isa<GEPOperator>(Arg) &&
3635 cast<GEPOperator>(Arg)->hasAllZeroIndices())
3636 Arg = cast<GEPOperator>(Arg)->getPointerOperand();
3637 else if (isa<GlobalAlias>(Arg) &&
3638 !cast<GlobalAlias>(Arg)->mayBeOverridden())
3639 Arg = cast<GlobalAlias>(Arg)->getAliasee();