/// opposed to objc_retain calls).
bool IsRetainBlock;
+ /// CopyOnEscape - True if this the Calls are objc_retainBlock calls
+ /// which all have the !clang.arc.copy_on_escape metadata.
+ bool CopyOnEscape;
+
/// IsTailCallRelease - True of the objc_release calls are all marked
/// with the "tail" keyword.
bool IsTailCallRelease;
SmallPtrSet<Instruction *, 2> ReverseInsertPts;
RRInfo() :
- KnownSafe(false), IsRetainBlock(false), IsTailCallRelease(false),
- Partial(false),
+ KnownSafe(false), IsRetainBlock(false), CopyOnEscape(false),
+ IsTailCallRelease(false), Partial(false),
ReleaseMetadata(0) {}
void clear();
void RRInfo::clear() {
KnownSafe = false;
IsRetainBlock = false;
+ CopyOnEscape = false;
IsTailCallRelease = false;
Partial = false;
ReleaseMetadata = 0;
if (RRI.ReleaseMetadata != Other.RRI.ReleaseMetadata)
RRI.ReleaseMetadata = 0;
+ RRI.CopyOnEscape = RRI.CopyOnEscape && Other.RRI.CopyOnEscape;
RRI.KnownSafe = RRI.KnownSafe && Other.RRI.KnownSafe;
RRI.IsTailCallRelease = RRI.IsTailCallRelease && Other.RRI.IsTailCallRelease;
RRI.Calls.insert(Other.RRI.Calls.begin(), Other.RRI.Calls.end());
/// metadata.
unsigned ImpreciseReleaseMDKind;
+ /// CopyOnEscape - The Metadata Kind for clang.arc.copy_on_escape
+ /// metadata.
+ unsigned CopyOnEscapeMDKind;
+
Constant *getRetainRVCallee(Module *M);
Constant *getAutoreleaseRVCallee(Module *M);
Constant *getReleaseCallee(Module *M);
S.SetAtLeastOneRefCount();
S.DecrementNestCount();
- // An objc_retainBlock call with just a use still needs to be kept,
- // because it may be copying a block from the stack to the heap.
- if (Class == IC_RetainBlock && S.GetSeq() == S_Use)
+ // An non-copy-on-escape objc_retainBlock call with just a use still
+ // needs to be kept, because it may be copying a block from the stack
+ // to the heap.
+ if (Class == IC_RetainBlock &&
+ !Inst->getMetadata(CopyOnEscapeMDKind) &&
+ S.GetSeq() == S_Use)
S.SetSeq(S_CanRelease);
switch (S.GetSeq()) {
// better to let it remain as the first instruction after a call.
if (Class != IC_RetainRV) {
S.RRI.IsRetainBlock = Class == IC_RetainBlock;
+ if (S.RRI.IsRetainBlock)
+ S.RRI.CopyOnEscape = !!Inst->getMetadata(CopyOnEscapeMDKind);
Retains[Inst] = S.RRI;
}
S.ClearSequenceProgress();
S.SetSeq(S_Retain);
S.RRI.clear();
S.RRI.IsRetainBlock = Class == IC_RetainBlock;
+ if (S.RRI.IsRetainBlock)
+ S.RRI.CopyOnEscape = !!Inst->getMetadata(CopyOnEscapeMDKind);
// Don't check S.IsKnownIncremented() here because it's not
// sufficient.
S.RRI.KnownSafe = S.IsKnownNested();
S.SetSeq(S_Use);
break;
case S_Retain:
- // An objc_retainBlock call may be responsible for copying the block
- // data from the stack to the heap. Model this by moving it straight
- // from S_Retain to S_Use.
+ // A non-copy-on-scape objc_retainBlock call may be responsible for
+ // copying the block data from the stack to the heap. Model this by
+ // moving it straight from S_Retain to S_Use.
if (S.RRI.IsRetainBlock &&
+ !S.RRI.CopyOnEscape &&
CanUse(Inst, Ptr, PA, Class)) {
assert(S.RRI.ReverseInsertPts.empty());
S.RRI.ReverseInsertPts.insert(Inst);
getRetainBlockCallee(M) : getRetainCallee(M),
MyArg, "", InsertPt);
Call->setDoesNotThrow();
+ if (RetainsToMove.CopyOnEscape)
+ Call->setMetadata(CopyOnEscapeMDKind,
+ MDNode::get(M->getContext(), ArrayRef<Value *>()));
if (!RetainsToMove.IsRetainBlock)
Call->setTailCall();
}
// regardless of what possible decrements or uses lie between them.
bool KnownSafe = isa<Constant>(Arg);
- // Same for stack storage, unless this is an objc_retainBlock call,
- // which is responsible for copying the block data from the stack to
- // the heap.
- if (!I->second.IsRetainBlock && isa<AllocaInst>(Arg))
+ // Same for stack storage, unless this is a non-copy-on-escape
+ // objc_retainBlock call, which is responsible for copying the block data
+ // from the stack to the heap.
+ if ((!I->second.IsRetainBlock || I->second.CopyOnEscape) &&
+ isa<AllocaInst>(Arg))
KnownSafe = true;
// A constant pointer can't be pointing to an object on the heap. It may
// Merge the IsRetainBlock values.
if (FirstRetain) {
RetainsToMove.IsRetainBlock = NewReleaseRetainRRI.IsRetainBlock;
+ RetainsToMove.CopyOnEscape = NewReleaseRetainRRI.CopyOnEscape;
FirstRetain = false;
} else if (ReleasesToMove.IsRetainBlock !=
NewReleaseRetainRRI.IsRetainBlock)
// objc_retain and the other uses objc_retainBlock.
goto next_retain;
+ // Merge the CopyOnEscape values.
+ RetainsToMove.CopyOnEscape &= NewReleaseRetainRRI.CopyOnEscape;
+
// Collect the optimal insertion points.
if (!KnownSafe)
for (SmallPtrSet<Instruction *, 2>::const_iterator
// Identify the imprecise release metadata kind.
ImpreciseReleaseMDKind =
M.getContext().getMDKindID("clang.imprecise_release");
+ CopyOnEscapeMDKind =
+ M.getContext().getMDKindID("clang.arc.copy_on_escape");
// Intuitively, objc_retain and others are nocapture, however in practice
// they are not, because they return their argument value. And objc_release
; RUN: opt -S -objc-arc < %s | FileCheck %s
; rdar://10209613
-; CHECK: define void @test
-; CHECK: %3 = call i8* @objc_retainBlock(i8* %2) nounwind
-; CHECK: @objc_msgSend
-; CHECK-NEXT: @objc_release(i8* %3)
-
%0 = type opaque
%struct.__block_descriptor = type { i64, i64 }
@__block_descriptor_tmp = external hidden constant { i64, i64, i8*, i8*, i8*, i8* }
@"\01L_OBJC_SELECTOR_REFERENCES_" = external hidden global i8*, section "__DATA, __objc_selrefs, literal_pointers, no_dead_strip"
+; CHECK: define void @test(
+; CHECK: %3 = call i8* @objc_retainBlock(i8* %2) nounwind
+; CHECK: @objc_msgSend
+; CHECK-NEXT: @objc_release(i8* %3)
define void @test(%0* %array) uwtable {
entry:
%block = alloca <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>, align 8
ret void
}
+; Same as test, but the objc_retainBlock has a clang.arc.copy_on_escape
+; tag so it's safe to delete.
+
+; CHECK: define void @test_with_COE(
+; CHECK-NOT: @objc_retainBlock
+; CHECK: @objc_msgSend
+; CHECK: @objc_release
+; CHECK-NOT: @objc_release
+; CHECK: }
+define void @test_with_COE(%0* %array) uwtable {
+entry:
+ %block = alloca <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>, align 8
+ %0 = bitcast %0* %array to i8*
+ %1 = tail call i8* @objc_retain(i8* %0) nounwind
+ %block.isa = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>* %block, i64 0, i32 0
+ store i8* bitcast (i8** @_NSConcreteStackBlock to i8*), i8** %block.isa, align 8
+ %block.flags = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>* %block, i64 0, i32 1
+ store i32 1107296256, i32* %block.flags, align 8
+ %block.reserved = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>* %block, i64 0, i32 2
+ store i32 0, i32* %block.reserved, align 4
+ %block.invoke = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>* %block, i64 0, i32 3
+ store i8* bitcast (void (i8*)* @__test_block_invoke_0 to i8*), i8** %block.invoke, align 8
+ %block.descriptor = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>* %block, i64 0, i32 4
+ store %struct.__block_descriptor* bitcast ({ i64, i64, i8*, i8*, i8*, i8* }* @__block_descriptor_tmp to %struct.__block_descriptor*), %struct.__block_descriptor** %block.descriptor, align 8
+ %block.captured = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>* %block, i64 0, i32 5
+ store %0* %array, %0** %block.captured, align 8
+ %2 = bitcast <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>* %block to i8*
+ %3 = call i8* @objc_retainBlock(i8* %2) nounwind, !clang.arc.copy_on_escape !0
+ %tmp2 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
+ call void bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to void (i8*, i8*, i8*)*)(i8* %0, i8* %tmp2, i8* %3)
+ call void @objc_release(i8* %3) nounwind
+ %strongdestroy = load %0** %block.captured, align 8
+ %4 = bitcast %0* %strongdestroy to i8*
+ call void @objc_release(i8* %4) nounwind, !clang.imprecise_release !0
+ ret void
+}
+
declare i8* @objc_retain(i8*)
declare void @__test_block_invoke_0(i8* nocapture) uwtable