1 ; RUN: opt -S -objc-arc < %s | FileCheck %s
3 target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
4 %struct.__block_byref_x = type { i8*, %struct.__block_byref_x*, i32, i32, i32 }
5 %struct.__block_descriptor = type { i64, i64 }
6 @_NSConcreteStackBlock = external global i8*
7 @__block_descriptor_tmp = external hidden constant { i64, i64, i8*, i8*, i8*, i8* }
9 ; The optimizer should make use of the !clang.arc.no_objc_arc_exceptions
10 ; metadata and eliminate the retainBlock+release pair here.
13 ; CHECK: define void @test0(
16 define void @test0() {
18 %x = alloca %struct.__block_byref_x, align 8
19 %block = alloca <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>, align 8
20 %byref.isa = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 0
21 store i8* null, i8** %byref.isa, align 8
22 %byref.forwarding = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 1
23 store %struct.__block_byref_x* %x, %struct.__block_byref_x** %byref.forwarding, align 8
24 %byref.flags = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 2
25 store i32 0, i32* %byref.flags, align 8
26 %byref.size = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 3
27 store i32 32, i32* %byref.size, align 4
28 %block.isa = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 0
29 store i8* bitcast (i8** @_NSConcreteStackBlock to i8*), i8** %block.isa, align 8
30 %block.flags = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 1
31 store i32 1107296256, i32* %block.flags, align 8
32 %block.reserved = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 2
33 store i32 0, i32* %block.reserved, align 4
34 %block.invoke = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 3
35 store i8* bitcast (void (i8*)* @__foo_block_invoke_0 to i8*), i8** %block.invoke, align 8
36 %block.descriptor = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 4
37 store %struct.__block_descriptor* bitcast ({ i64, i64, i8*, i8*, i8*, i8* }* @__block_descriptor_tmp to %struct.__block_descriptor*), %struct.__block_descriptor** %block.descriptor, align 8
38 %block.captured = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 5
39 %t1 = bitcast %struct.__block_byref_x* %x to i8*
40 store i8* %t1, i8** %block.captured, align 8
41 %t2 = bitcast <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block to i8*
42 %t3 = call i8* @objc_retainBlock(i8* %t2) nounwind, !clang.arc.copy_on_escape !4
43 %t4 = getelementptr inbounds i8* %t3, i64 16
44 %t5 = bitcast i8* %t4 to i8**
45 %t6 = load i8** %t5, align 8
46 %t7 = bitcast i8* %t6 to void (i8*)*
47 invoke void %t7(i8* %t3)
48 to label %invoke.cont unwind label %lpad, !clang.arc.no_objc_arc_exceptions !4
50 invoke.cont: ; preds = %entry
51 call void @objc_release(i8* %t3) nounwind, !clang.imprecise_release !4
52 call void @_Block_object_dispose(i8* %t1, i32 8)
55 lpad: ; preds = %entry
56 %t8 = landingpad { i8*, i32 } personality i8* bitcast (i32 (...)* @__objc_personality_v0 to i8*)
58 call void @_Block_object_dispose(i8* %t1, i32 8)
59 resume { i8*, i32 } %t8
62 ; There is no !clang.arc.no_objc_arc_exceptions metadata here, so the optimizer
63 ; shouldn't eliminate anything, but *CAN* strength reduce the objc_retainBlock
66 ; CHECK: define void @test0_no_metadata(
67 ; CHECK: call i8* @objc_retain(
69 ; CHECK: call void @objc_release(
71 define void @test0_no_metadata() {
73 %x = alloca %struct.__block_byref_x, align 8
74 %block = alloca <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>, align 8
75 %byref.isa = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 0
76 store i8* null, i8** %byref.isa, align 8
77 %byref.forwarding = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 1
78 store %struct.__block_byref_x* %x, %struct.__block_byref_x** %byref.forwarding, align 8
79 %byref.flags = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 2
80 store i32 0, i32* %byref.flags, align 8
81 %byref.size = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 3
82 store i32 32, i32* %byref.size, align 4
83 %block.isa = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 0
84 store i8* bitcast (i8** @_NSConcreteStackBlock to i8*), i8** %block.isa, align 8
85 %block.flags = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 1
86 store i32 1107296256, i32* %block.flags, align 8
87 %block.reserved = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 2
88 store i32 0, i32* %block.reserved, align 4
89 %block.invoke = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 3
90 store i8* bitcast (void (i8*)* @__foo_block_invoke_0 to i8*), i8** %block.invoke, align 8
91 %block.descriptor = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 4
92 store %struct.__block_descriptor* bitcast ({ i64, i64, i8*, i8*, i8*, i8* }* @__block_descriptor_tmp to %struct.__block_descriptor*), %struct.__block_descriptor** %block.descriptor, align 8
93 %block.captured = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 5
94 %t1 = bitcast %struct.__block_byref_x* %x to i8*
95 store i8* %t1, i8** %block.captured, align 8
96 %t2 = bitcast <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block to i8*
97 %t3 = call i8* @objc_retainBlock(i8* %t2) nounwind, !clang.arc.copy_on_escape !4
98 %t4 = getelementptr inbounds i8* %t3, i64 16
99 %t5 = bitcast i8* %t4 to i8**
100 %t6 = load i8** %t5, align 8
101 %t7 = bitcast i8* %t6 to void (i8*)*
102 invoke void %t7(i8* %t3)
103 to label %invoke.cont unwind label %lpad
105 invoke.cont: ; preds = %entry
106 call void @objc_release(i8* %t3) nounwind, !clang.imprecise_release !4
107 call void @_Block_object_dispose(i8* %t1, i32 8)
110 lpad: ; preds = %entry
111 %t8 = landingpad { i8*, i32 } personality i8* bitcast (i32 (...)* @__objc_personality_v0 to i8*)
113 call void @_Block_object_dispose(i8* %t1, i32 8)
114 resume { i8*, i32 } %t8
117 declare i8* @objc_retainBlock(i8*)
118 declare void @objc_release(i8*)
119 declare void @_Block_object_dispose(i8*, i32)
120 declare i32 @__objc_personality_v0(...)
121 declare void @__foo_block_invoke_0(i8* nocapture) uwtable ssp