1// RUN: %clang_cc1 -triple x86_64-apple-darwin -fblocks -fobjc-arc -fobjc-runtime-has-weak -emit-llvm %s -o - | FileCheck -check-prefix CHECK-LP64 %s
2// RUN: %clang_cc1 -triple x86_64-apple-darwin -O1 -fblocks -fobjc-arc -fobjc-runtime-has-weak -emit-llvm %s -o - | FileCheck -check-prefix CHECK-LP64-OPT %s
3// rdar://9503326
4// rdar://9606600
5
6extern void use(id);
7extern void use_block(void (^)(void));
8
9struct NSFastEnumerationState;
10@interface NSArray
11- (unsigned long) countByEnumeratingWithState: (struct NSFastEnumerationState*) state
12                  objects: (id*) buffer
13                  count: (unsigned long) bufferSize;
14@end;
15
16void test0(NSArray *array) {
17  // 'x' should be initialized without a retain.
18  // We should actually do a non-constant capture, and that
19  // capture should require a retain.
20  for (id x in array) {
21    use_block(^{ use(x); });
22  }
23}
24
25// CHECK-LP64-LABEL:    define{{.*}} void @test0(
26// CHECK-LP64:      [[ARRAY:%.*]] = alloca [[ARRAY_T:%.*]]*,
27// CHECK-LP64-NEXT: [[X:%.*]] = alloca i8*,
28// CHECK-LP64-NEXT: [[STATE:%.*]] = alloca [[STATE_T:%.*]],
29// CHECK-LP64-NEXT: [[BUFFER:%.*]] = alloca [16 x i8*], align 8
30// CHECK-LP64-NEXT: [[BLOCK:%.*]] = alloca [[BLOCK_T:<{.*}>]],
31
32// CHECK-LP64-OPT-LABEL: define{{.*}} void @test0
33// CHECK-LP64-OPT: [[STATE:%.*]] = alloca [[STATE_T:%.*]], align 8
34// CHECK-LP64-OPT-NEXT: [[BUFFER:%.*]] = alloca [16 x i8*], align 8
35// CHECK-LP64-OPT-NEXT: [[BLOCK:%.*]] = alloca [[BLOCK_T:<{.*}>]], align 8
36
37// Initialize 'array'.
38// CHECK-LP64-NEXT: store [[ARRAY_T]]* null, [[ARRAY_T]]** [[ARRAY]]
39// CHECK-LP64-NEXT: [[ZERO:%.*]] = bitcast [[ARRAY_T]]** [[ARRAY]] to i8**
40// CHECK-LP64-NEXT: [[ONE:%.*]] = bitcast [[ARRAY_T]]* {{%.*}} to i8*
41// CHECK-LP64-NEXT: call void @llvm.objc.storeStrong(i8** [[ZERO]], i8* [[ONE]]) [[NUW:#[0-9]+]]
42
43// Initialize the fast enumaration state.
44// CHECK-LP64-NEXT: [[T0:%.*]] = bitcast [[STATE_T]]* [[STATE]] to i8*
45// CHECK-LP64-NEXT: call void @llvm.memset.p0i8.i64(i8* align 8 [[T0]], i8 0, i64 64, i1 false)
46
47// Evaluate the collection expression and retain.
48// CHECK-LP64-NEXT: [[T0:%.*]] = load [[ARRAY_T]]*, [[ARRAY_T]]** [[ARRAY]], align 8
49// CHECK-LP64-NEXT: [[T1:%.*]] = bitcast [[ARRAY_T]]* [[T0]] to i8*
50// CHECK-LP64-NEXT: [[T2:%.*]] = call i8* @llvm.objc.retain(i8* [[T1]])
51// CHECK-LP64-NEXT: [[SAVED_ARRAY:%.*]] = bitcast i8* [[T2]] to [[ARRAY_T]]*
52
53// Call the enumeration method.
54// CHECK-LP64-NEXT: [[T0:%.*]] = load i8*, i8** @OBJC_SELECTOR_REFERENCES_
55// CHECK-LP64-NEXT: [[T1:%.*]] = bitcast [[ARRAY_T]]* [[SAVED_ARRAY]] to i8*
56// CHECK-LP64-NEXT: [[SIZE:%.*]] = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, [[STATE_T]]*, [16 x i8*]*, i64)*)(i8* [[T1]], i8* [[T0]], [[STATE_T]]* [[STATE]], [16 x i8*]* [[BUFFER]], i64 16)
57
58// Check for a nonzero result.
59// CHECK-LP64-NEXT: [[T0:%.*]] = icmp eq i64 [[SIZE]], 0
60// CHECK-LP64-NEXT: br i1 [[T0]]
61
62// CHECK-LP64:      [[T0:%.*]] = getelementptr inbounds [[STATE_T]], [[STATE_T]]* [[STATE]], i32 0, i32 1
63// CHECK-LP64-NEXT: [[T1:%.*]] = load i8**, i8*** [[T0]]
64// CHECK-LP64-NEXT: [[T2:%.*]] = getelementptr i8*, i8** [[T1]], i64
65// CHECK-LP64-NEXT: [[T3:%.*]] = load i8*, i8** [[T2]]
66// CHECK-LP64-NEXT: store i8* [[T3]], i8** [[X]]
67
68// CHECK-LP64:      [[CAPTURED:%.*]] = getelementptr inbounds [[BLOCK_T]], [[BLOCK_T]]* [[BLOCK]], i32 0, i32 5
69// CHECK-LP64-NEXT: [[T1:%.*]] = load i8*, i8** [[X]]
70// CHECK-LP64-NEXT: [[T2:%.*]] = call i8* @llvm.objc.retain(i8* [[T1]])
71// CHECK-LP64-NEXT: store i8* [[T2]], i8** [[CAPTURED]]
72// CHECK-LP64-NEXT: [[BLOCK1:%.*]] = bitcast [[BLOCK_T]]* [[BLOCK]]
73// CHECK-LP64-NEXT: call void @use_block(void ()* [[BLOCK1]])
74// CHECK-LP64-NEXT: call void @llvm.objc.storeStrong(i8** [[CAPTURED]], i8* null)
75// CHECK-LP64-NOT:  call void (...) @llvm.objc.clang.arc.use(
76
77// CHECK-LP64-OPT: [[D0:%.*]] = getelementptr inbounds [[BLOCK_T]], [[BLOCK_T]]* [[BLOCK]], i64 0, i32 5
78// CHECK-LP64-OPT: [[CAPTURE:%.*]] = load i8*, i8** [[D0]]
79// CHECK-LP64-OPT: call void (...) @llvm.objc.clang.arc.use(i8* [[CAPTURE]])
80
81// CHECK-LP64:      [[T0:%.*]] = load i8*, i8** @OBJC_SELECTOR_REFERENCES_
82// CHECK-LP64-NEXT: [[T1:%.*]] = bitcast [[ARRAY_T]]* [[SAVED_ARRAY]] to i8*
83// CHECK-LP64-NEXT: [[SIZE:%.*]] = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, [[STATE_T]]*, [16 x i8*]*, i64)*)(i8* [[T1]], i8* [[T0]], [[STATE_T]]* [[STATE]], [16 x i8*]* [[BUFFER]], i64 16)
84
85// Release the array.
86// CHECK-LP64:      [[T0:%.*]] = bitcast [[ARRAY_T]]* [[SAVED_ARRAY]] to i8*
87// CHECK-LP64-NEXT: call void @llvm.objc.release(i8* [[T0]])
88
89// Destroy 'array'.
90// CHECK-LP64:      [[T0:%.*]] = bitcast [[ARRAY_T]]** [[ARRAY]] to i8**
91// CHECK-LP64-NEXT: call void @llvm.objc.storeStrong(i8** [[T0]], i8* null)
92// CHECK-LP64-NEXT: ret void
93
94// CHECK-LP64-LABEL:    define internal void @__test0_block_invoke
95// CHECK-LP64:      [[BLOCK:%.*]] = bitcast i8* {{%.*}} to [[BLOCK_T]]*
96// CHECK-LP64-NOT:  ret
97// CHECK-LP64:      [[T0:%.*]] = getelementptr inbounds [[BLOCK_T]], [[BLOCK_T]]* [[BLOCK]], i32 0, i32 5
98// CHECK-LP64-NEXT: [[T2:%.*]] = load i8*, i8** [[T0]], align 8
99// CHECK-LP64-NEXT: call void @use(i8* [[T2]])
100
101void test1(NSArray *array) {
102  for (__weak id x in array) {
103    use_block(^{ use(x); });
104  }
105}
106
107// CHECK-LP64-LABEL:    define{{.*}} void @test1(
108// CHECK-LP64:      alloca [[ARRAY_T:%.*]]*,
109// CHECK-LP64-NEXT: [[X:%.*]] = alloca i8*,
110// CHECK-LP64-NEXT: [[STATE:%.*]] = alloca [[STATE_T:%.*]],
111// CHECK-LP64-NEXT: alloca [16 x i8*], align 8
112// CHECK-LP64-NEXT: [[BLOCK:%.*]] = alloca [[BLOCK_T:<{.*}>]],
113
114// CHECK-LP64:      [[T0:%.*]] = getelementptr inbounds [[STATE_T]], [[STATE_T]]* [[STATE]], i32 0, i32 1
115// CHECK-LP64-NEXT: [[T1:%.*]] = load i8**, i8*** [[T0]]
116// CHECK-LP64-NEXT: [[T2:%.*]] = getelementptr i8*, i8** [[T1]], i64
117// CHECK-LP64-NEXT: [[T3:%.*]] = load i8*, i8** [[T2]]
118// CHECK-LP64-NEXT: call i8* @llvm.objc.initWeak(i8** [[X]], i8* [[T3]])
119
120// CHECK-LP64:      [[T0:%.*]] = getelementptr inbounds [[BLOCK_T]], [[BLOCK_T]]* [[BLOCK]], i32 0, i32 5
121// CHECK-LP64-NEXT: call void @llvm.objc.copyWeak(i8** [[T0]], i8** [[X]])
122// CHECK-LP64-NEXT: [[T1:%.*]] = bitcast [[BLOCK_T]]* [[BLOCK]] to
123// CHECK-LP64: call void @use_block
124// CHECK-LP64-NEXT: call void @llvm.objc.destroyWeak(i8** [[T0]])
125// CHECK-LP64-NEXT: call void @llvm.objc.destroyWeak(i8** [[X]])
126
127// rdar://problem/9817306
128@interface Test2
129- (NSArray *) array;
130@end
131void test2(Test2 *a) {
132  for (id x in a.array) {
133    use(x);
134  }
135}
136
137// CHECK-LP64-LABEL:    define{{.*}} void @test2(
138// CHECK-LP64:      [[T0:%.*]] = call [[ARRAY_T]]* bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to [[ARRAY_T]]* (i8*, i8*)*)(
139// CHECK-LP64-NEXT: [[T1:%.*]] = bitcast [[ARRAY_T]]* [[T0]] to i8*
140// CHECK-LP64-NEXT: [[T2:%.*]] = notail call i8* @llvm.objc.retainAutoreleasedReturnValue(i8* [[T1]])
141// CHECK-LP64-NEXT: [[COLL:%.*]] = bitcast i8* [[T2]] to [[ARRAY_T]]*
142
143// Make sure it's not immediately released before starting the iteration.
144// CHECK-LP64-NEXT: load i8*, i8** @OBJC_SELECTOR_REFERENCES_
145// CHECK-LP64-NEXT: [[T0:%.*]] = bitcast [[ARRAY_T]]* [[COLL]] to i8*
146// CHECK-LP64-NEXT: @objc_msgSend
147
148// This bitcast is for the mutation check.
149// CHECK-LP64:      [[T0:%.*]] = bitcast [[ARRAY_T]]* [[COLL]] to i8*
150// CHECK-LP64-NEXT: @objc_enumerationMutation
151
152// This bitcast is for the 'next' message send.
153// CHECK-LP64:      [[T0:%.*]] = bitcast [[ARRAY_T]]* [[COLL]] to i8*
154// CHECK-LP64-NEXT: @objc_msgSend
155
156// This bitcast is for the final release.
157// CHECK-LP64:      [[T0:%.*]] = bitcast [[ARRAY_T]]* [[COLL]] to i8*
158// CHECK-LP64-NEXT: call void @llvm.objc.release(i8* [[T0]])
159
160
161// Check that the 'continue' label is positioned appropriately
162// relative to the collection clenaup.
163void test3(NSArray *array) {
164  for (id x in array) {
165    if (!x) continue;
166    use(x);
167  }
168
169  // CHECK-LP64-LABEL:    define{{.*}} void @test3(
170  // CHECK-LP64:      [[ARRAY:%.*]] = alloca [[ARRAY_T]]*, align 8
171  // CHECK-LP64-NEXT: [[X:%.*]] = alloca i8*, align 8
172  // CHECK-LP64:      [[T0:%.*]] = load i8*, i8** [[X]], align 8
173  // CHECK-LP64-NEXT: [[T1:%.*]] = icmp ne i8* [[T0]], null
174  // CHECK-LP64-NEXT: br i1 [[T1]],
175  // CHECK-LP64:      br label [[L:%[^ ]+]]
176  // CHECK-LP64:      [[T0:%.*]] = load i8*, i8** [[X]], align 8
177  // CHECK-LP64-NEXT: call void @use(i8* [[T0]])
178  // CHECK-LP64-NEXT: br label [[L]]
179}
180
181@interface NSObject @end
182
183@interface I1 : NSObject
184- (NSArray *) foo1:(void (^)(void))block;
185- (void) foo2;
186@end
187
188NSArray *array4;
189
190@implementation I1 : NSObject
191- (NSArray *) foo1:(void (^)(void))block {
192  block();
193  return array4;
194}
195
196- (void) foo2 {
197  for (id x in [self foo1:^{ use(self); }]) {
198    use(x);
199    break;
200  }
201}
202@end
203
204// CHECK-LP64-LABEL: define internal void @"\01-[I1 foo2]"(
205// CHECK-LP64:         [[SELF_ADDR:%.*]] = alloca [[TY:%.*]]*,
206// CHECK-LP64:         [[BLOCK:%.*]] = alloca <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, [[TY]]* }>,
207// CHECK-LP64:         store [[TY]]* %self, [[TY]]** [[SELF_ADDR]]
208// CHECK-LP64:         [[BC:%.*]] = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, [[TY]]* }>, <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, [[TY]]* }>* [[BLOCK]], i32 0, i32 5
209// CHECK-LP64:         [[T1:%.*]] = load [[TY]]*, [[TY]]** [[SELF_ADDR]]
210// CHECK-LP64:         [[T2:%.*]] = bitcast [[TY]]* [[T1]] to i8*
211// CHECK-LP64:         call i8* @llvm.objc.retain(i8* [[T2]])
212
213// CHECK-LP64-OPT-LABEL: define internal void @"\01-[I1 foo2]"(
214// CHECK-LP64-OPT: [[TY:%.*]]* %self
215// CHECK-LP64-OPT: [[BLOCK:%.*]] = alloca [[BLOCK_T:<{.*}>]],
216// CHECK-LP64-OPT: [[T0:%.*]] = getelementptr inbounds [[BLOCK_T]], [[BLOCK_T]]* [[BLOCK]], i64 0, i32 5
217
218// CHECK-LP64:         [[T5:%.*]] = bitcast [[TY]]** [[BC]] to i8**
219// CHECK-LP64:         call void @llvm.objc.storeStrong(i8** [[T5]], i8* null)
220// CHECK-LP64-NOT:     call void (...) @llvm.objc.clang.arc.use([[TY]]* [[T5]])
221// CHECK-LP64:         switch i32 {{%.*}}, label %[[UNREACHABLE:.*]] [
222// CHECK-LP64-NEXT:      i32 0, label %[[CLEANUP_CONT:.*]]
223// CHECK-LP64-NEXT:      i32 2, label %[[FORCOLL_END:.*]]
224// CHECK-LP64-NEXT:    ]
225
226// CHECK-LP64-OPT: [[T5:%.*]] = load [[TY]]*, [[TY]]** [[T0]]
227// CHECK-LP64-OPT: call void (...) @llvm.objc.clang.arc.use([[TY]]* [[T5]])
228
229// CHECK-LP64:       {{^|:}}[[CLEANUP_CONT]]
230// CHECK-LP64-NEXT:    br label %[[FORCOLL_END]]
231
232// CHECK-LP64:       {{^|:}}[[FORCOLL_END]]
233// CHECK-LP64-NEXT:    ret void
234
235// CHECK-LP64:       {{^|:}}[[UNREACHABLE]]
236// CHECK-LP64-NEXT:    unreachable
237
238// CHECK-LP64: attributes [[NUW]] = { nounwind }
239