1; RUN: opt < %s -S -openmp-opt-cgscc        | FileCheck %s
2; RUN: opt < %s -S -passes=openmp-opt-cgscc | FileCheck %s
3; RUN: opt < %s -S -openmp-opt-cgscc        -openmp-ir-builder-optimistic-attributes | FileCheck %s --check-prefix=OPTIMISTIC
4; RUN: opt < %s -S -passes=openmp-opt-cgscc -openmp-ir-builder-optimistic-attributes | FileCheck %s --check-prefix=OPTIMISTIC
5target datalayout = "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128"
6
7%struct.omp_lock_t = type { i8* }
8%struct.omp_nest_lock_t = type { i8* }
9%struct.ident_t = type { i32, i32, i32, i32, i8* }
10
11define void @call_all(i32 %schedule, %struct.omp_lock_t* %lock, i32 %lock_hint, %struct.omp_nest_lock_t* %nest_lock, i32 %i, i8* %s, i64 %st, i8* %vp, double %d, i32 %proc_bind, i64 %allocator_handle, i8* %cp, i64 %event_handle, i32 %pause_resource) {
12entry:
13  %schedule.addr = alloca i32, align 4
14  %lock.addr = alloca %struct.omp_lock_t*, align 8
15  %lock_hint.addr = alloca i32, align 4
16  %nest_lock.addr = alloca %struct.omp_nest_lock_t*, align 8
17  %i.addr = alloca i32, align 4
18  %s.addr = alloca i8*, align 8
19  %st.addr = alloca i64, align 8
20  %vp.addr = alloca i8*, align 8
21  %d.addr = alloca double, align 8
22  %proc_bind.addr = alloca i32, align 4
23  %allocator_handle.addr = alloca i64, align 8
24  %cp.addr = alloca i8*, align 8
25  %event_handle.addr = alloca i64, align 8
26  %pause_resource.addr = alloca i32, align 4
27  store i32 %schedule, i32* %schedule.addr, align 4
28  store %struct.omp_lock_t* %lock, %struct.omp_lock_t** %lock.addr, align 8
29  store i32 %lock_hint, i32* %lock_hint.addr, align 4
30  store %struct.omp_nest_lock_t* %nest_lock, %struct.omp_nest_lock_t** %nest_lock.addr, align 8
31  store i32 %i, i32* %i.addr, align 4
32  store i8* %s, i8** %s.addr, align 8
33  store i64 %st, i64* %st.addr, align 8
34  store i8* %vp, i8** %vp.addr, align 8
35  store double %d, double* %d.addr, align 8
36  store i32 %proc_bind, i32* %proc_bind.addr, align 4
37  store i64 %allocator_handle, i64* %allocator_handle.addr, align 8
38  store i8* %cp, i8** %cp.addr, align 8
39  store i64 %event_handle, i64* %event_handle.addr, align 8
40  store i32 %pause_resource, i32* %pause_resource.addr, align 4
41  call void @omp_set_num_threads(i32 0)
42  call void @omp_set_dynamic(i32 0)
43  call void @omp_set_nested(i32 0)
44  call void @omp_set_max_active_levels(i32 0)
45  %0 = load i32, i32* %schedule.addr, align 4
46  call void @omp_set_schedule(i32 %0, i32 0)
47  %call = call i32 @omp_get_num_threads()
48  store i32 %call, i32* %i.addr, align 4
49  %1 = load i32, i32* %i.addr, align 4
50  call void @use_int(i32 %1)
51  %call1 = call i32 @omp_get_dynamic()
52  store i32 %call1, i32* %i.addr, align 4
53  %2 = load i32, i32* %i.addr, align 4
54  call void @use_int(i32 %2)
55  %call2 = call i32 @omp_get_nested()
56  store i32 %call2, i32* %i.addr, align 4
57  %3 = load i32, i32* %i.addr, align 4
58  call void @use_int(i32 %3)
59  %call3 = call i32 @omp_get_max_threads()
60  store i32 %call3, i32* %i.addr, align 4
61  %4 = load i32, i32* %i.addr, align 4
62  call void @use_int(i32 %4)
63  %call4 = call i32 @omp_get_thread_num()
64  store i32 %call4, i32* %i.addr, align 4
65  %5 = load i32, i32* %i.addr, align 4
66  call void @use_int(i32 %5)
67  %call5 = call i32 @omp_get_num_procs()
68  store i32 %call5, i32* %i.addr, align 4
69  %6 = load i32, i32* %i.addr, align 4
70  call void @use_int(i32 %6)
71  %call6 = call i32 @omp_in_parallel()
72  store i32 %call6, i32* %i.addr, align 4
73  %7 = load i32, i32* %i.addr, align 4
74  call void @use_int(i32 %7)
75  %call7 = call i32 @omp_in_final()
76  store i32 %call7, i32* %i.addr, align 4
77  %8 = load i32, i32* %i.addr, align 4
78  call void @use_int(i32 %8)
79  %call8 = call i32 @omp_get_active_level()
80  store i32 %call8, i32* %i.addr, align 4
81  %9 = load i32, i32* %i.addr, align 4
82  call void @use_int(i32 %9)
83  %call9 = call i32 @omp_get_level()
84  store i32 %call9, i32* %i.addr, align 4
85  %10 = load i32, i32* %i.addr, align 4
86  call void @use_int(i32 %10)
87  %call10 = call i32 @omp_get_ancestor_thread_num(i32 0)
88  store i32 %call10, i32* %i.addr, align 4
89  %11 = load i32, i32* %i.addr, align 4
90  call void @use_int(i32 %11)
91  %call11 = call i32 @omp_get_team_size(i32 0)
92  store i32 %call11, i32* %i.addr, align 4
93  %12 = load i32, i32* %i.addr, align 4
94  call void @use_int(i32 %12)
95  %call12 = call i32 @omp_get_thread_limit()
96  store i32 %call12, i32* %i.addr, align 4
97  %13 = load i32, i32* %i.addr, align 4
98  call void @use_int(i32 %13)
99  %call13 = call i32 @omp_get_max_active_levels()
100  store i32 %call13, i32* %i.addr, align 4
101  %14 = load i32, i32* %i.addr, align 4
102  call void @use_int(i32 %14)
103  call void @omp_get_schedule(i32* %schedule.addr, i32* %i.addr)
104  %call14 = call i32 @omp_get_max_task_priority()
105  store i32 %call14, i32* %i.addr, align 4
106  %15 = load i32, i32* %i.addr, align 4
107  call void @use_int(i32 %15)
108  %16 = load %struct.omp_lock_t*, %struct.omp_lock_t** %lock.addr, align 8
109  call void @omp_init_lock(%struct.omp_lock_t* %16)
110  %17 = load %struct.omp_lock_t*, %struct.omp_lock_t** %lock.addr, align 8
111  call void @omp_set_lock(%struct.omp_lock_t* %17)
112  %18 = load %struct.omp_lock_t*, %struct.omp_lock_t** %lock.addr, align 8
113  call void @omp_unset_lock(%struct.omp_lock_t* %18)
114  %19 = load %struct.omp_lock_t*, %struct.omp_lock_t** %lock.addr, align 8
115  call void @omp_destroy_lock(%struct.omp_lock_t* %19)
116  %20 = load %struct.omp_lock_t*, %struct.omp_lock_t** %lock.addr, align 8
117  %call15 = call i32 @omp_test_lock(%struct.omp_lock_t* %20)
118  store i32 %call15, i32* %i.addr, align 4
119  %21 = load i32, i32* %i.addr, align 4
120  call void @use_int(i32 %21)
121  %22 = load %struct.omp_nest_lock_t*, %struct.omp_nest_lock_t** %nest_lock.addr, align 8
122  call void @omp_init_nest_lock(%struct.omp_nest_lock_t* %22)
123  %23 = load %struct.omp_nest_lock_t*, %struct.omp_nest_lock_t** %nest_lock.addr, align 8
124  call void @omp_set_nest_lock(%struct.omp_nest_lock_t* %23)
125  %24 = load %struct.omp_nest_lock_t*, %struct.omp_nest_lock_t** %nest_lock.addr, align 8
126  call void @omp_unset_nest_lock(%struct.omp_nest_lock_t* %24)
127  %25 = load %struct.omp_nest_lock_t*, %struct.omp_nest_lock_t** %nest_lock.addr, align 8
128  call void @omp_destroy_nest_lock(%struct.omp_nest_lock_t* %25)
129  %26 = load %struct.omp_nest_lock_t*, %struct.omp_nest_lock_t** %nest_lock.addr, align 8
130  %call16 = call i32 @omp_test_nest_lock(%struct.omp_nest_lock_t* %26)
131  store i32 %call16, i32* %i.addr, align 4
132  %27 = load i32, i32* %i.addr, align 4
133  call void @use_int(i32 %27)
134  %28 = load %struct.omp_lock_t*, %struct.omp_lock_t** %lock.addr, align 8
135  %29 = load i32, i32* %lock_hint.addr, align 4
136  call void @omp_init_lock_with_hint(%struct.omp_lock_t* %28, i32 %29)
137  %30 = load %struct.omp_nest_lock_t*, %struct.omp_nest_lock_t** %nest_lock.addr, align 8
138  %31 = load i32, i32* %lock_hint.addr, align 4
139  call void @omp_init_nest_lock_with_hint(%struct.omp_nest_lock_t* %30, i32 %31)
140  %call17 = call double @omp_get_wtime()
141  store double %call17, double* %d.addr, align 8
142  %32 = load double, double* %d.addr, align 8
143  call void @use_double(double %32)
144  %call18 = call double @omp_get_wtick()
145  store double %call18, double* %d.addr, align 8
146  %33 = load double, double* %d.addr, align 8
147  call void @use_double(double %33)
148  %call19 = call i32 @omp_get_default_device()
149  store i32 %call19, i32* %i.addr, align 4
150  %34 = load i32, i32* %i.addr, align 4
151  call void @use_int(i32 %34)
152  call void @omp_set_default_device(i32 0)
153  %call20 = call i32 @omp_is_initial_device()
154  store i32 %call20, i32* %i.addr, align 4
155  %35 = load i32, i32* %i.addr, align 4
156  call void @use_int(i32 %35)
157  %call21 = call i32 @omp_get_num_devices()
158  store i32 %call21, i32* %i.addr, align 4
159  %36 = load i32, i32* %i.addr, align 4
160  call void @use_int(i32 %36)
161  %call22 = call i32 @omp_get_num_teams()
162  store i32 %call22, i32* %i.addr, align 4
163  %37 = load i32, i32* %i.addr, align 4
164  call void @use_int(i32 %37)
165  %call23 = call i32 @omp_get_team_num()
166  store i32 %call23, i32* %i.addr, align 4
167  %38 = load i32, i32* %i.addr, align 4
168  call void @use_int(i32 %38)
169  %call24 = call i32 @omp_get_cancellation()
170  store i32 %call24, i32* %i.addr, align 4
171  %39 = load i32, i32* %i.addr, align 4
172  call void @use_int(i32 %39)
173  %call25 = call i32 @omp_get_initial_device()
174  store i32 %call25, i32* %i.addr, align 4
175  %40 = load i32, i32* %i.addr, align 4
176  call void @use_int(i32 %40)
177  %41 = load i64, i64* %st.addr, align 8
178  %42 = load i32, i32* %i.addr, align 4
179  %call26 = call i8* @omp_target_alloc(i64 %41, i32 %42)
180  store i8* %call26, i8** %vp.addr, align 8
181  %43 = load i8*, i8** %vp.addr, align 8
182  call void @use_voidptr(i8* %43)
183  %44 = load i8*, i8** %vp.addr, align 8
184  %45 = load i32, i32* %i.addr, align 4
185  call void @omp_target_free(i8* %44, i32 %45)
186  %46 = load i8*, i8** %vp.addr, align 8
187  %47 = load i32, i32* %i.addr, align 4
188  %call27 = call i32 @omp_target_is_present(i8* %46, i32 %47)
189  store i32 %call27, i32* %i.addr, align 4
190  %48 = load i32, i32* %i.addr, align 4
191  call void @use_int(i32 %48)
192  %49 = load i8*, i8** %vp.addr, align 8
193  %50 = load i8*, i8** %vp.addr, align 8
194  %51 = load i64, i64* %st.addr, align 8
195  %52 = load i64, i64* %st.addr, align 8
196  %53 = load i64, i64* %st.addr, align 8
197  %54 = load i32, i32* %i.addr, align 4
198  %55 = load i32, i32* %i.addr, align 4
199  %call28 = call i32 @omp_target_memcpy(i8* %49, i8* %50, i64 %51, i64 %52, i64 %53, i32 %54, i32 %55)
200  store i32 %call28, i32* %i.addr, align 4
201  %56 = load i32, i32* %i.addr, align 4
202  call void @use_int(i32 %56)
203  %57 = load i8*, i8** %vp.addr, align 8
204  %58 = load i8*, i8** %vp.addr, align 8
205  %59 = load i64, i64* %st.addr, align 8
206  %60 = load i64, i64* %st.addr, align 8
207  %61 = load i32, i32* %i.addr, align 4
208  %call29 = call i32 @omp_target_associate_ptr(i8* %57, i8* %58, i64 %59, i64 %60, i32 %61)
209  store i32 %call29, i32* %i.addr, align 4
210  %62 = load i32, i32* %i.addr, align 4
211  call void @use_int(i32 %62)
212  %63 = load i8*, i8** %vp.addr, align 8
213  %64 = load i32, i32* %i.addr, align 4
214  %call30 = call i32 @omp_target_disassociate_ptr(i8* %63, i32 %64)
215  store i32 %call30, i32* %i.addr, align 4
216  %65 = load i32, i32* %i.addr, align 4
217  call void @use_int(i32 %65)
218  %call31 = call i32 @omp_get_device_num()
219  store i32 %call31, i32* %i.addr, align 4
220  %66 = load i32, i32* %i.addr, align 4
221  call void @use_int(i32 %66)
222  %call32 = call i32 @omp_get_proc_bind()
223  store i32 %call32, i32* %proc_bind.addr, align 4
224  %call33 = call i32 @omp_get_num_places()
225  store i32 %call33, i32* %i.addr, align 4
226  %67 = load i32, i32* %i.addr, align 4
227  call void @use_int(i32 %67)
228  %call34 = call i32 @omp_get_place_num_procs(i32 0)
229  store i32 %call34, i32* %i.addr, align 4
230  %68 = load i32, i32* %i.addr, align 4
231  call void @use_int(i32 %68)
232  %69 = load i32, i32* %i.addr, align 4
233  call void @omp_get_place_proc_ids(i32 %69, i32* %i.addr)
234  %call35 = call i32 @omp_get_place_num()
235  store i32 %call35, i32* %i.addr, align 4
236  %70 = load i32, i32* %i.addr, align 4
237  call void @use_int(i32 %70)
238  %call36 = call i32 @omp_get_partition_num_places()
239  store i32 %call36, i32* %i.addr, align 4
240  %71 = load i32, i32* %i.addr, align 4
241  call void @use_int(i32 %71)
242  call void @omp_get_partition_place_nums(i32* %i.addr)
243  %72 = load i32, i32* %i.addr, align 4
244  %73 = load i32, i32* %i.addr, align 4
245  %74 = load i8*, i8** %vp.addr, align 8
246  %call37 = call i32 @omp_control_tool(i32 %72, i32 %73, i8* %74)
247  store i32 %call37, i32* %i.addr, align 4
248  %75 = load i32, i32* %i.addr, align 4
249  call void @use_int(i32 %75)
250  %76 = load i64, i64* %allocator_handle.addr, align 8
251  call void @omp_destroy_allocator(i64 %76)
252  %77 = load i64, i64* %allocator_handle.addr, align 8
253  call void @omp_set_default_allocator(i64 %77)
254  %call38 = call i64 @omp_get_default_allocator()
255  store i64 %call38, i64* %allocator_handle.addr, align 8
256  %78 = load i64, i64* %st.addr, align 8
257  %79 = load i64, i64* %allocator_handle.addr, align 8
258  %call39 = call i8* @omp_alloc(i64 %78, i64 %79)
259  store i8* %call39, i8** %vp.addr, align 8
260  %80 = load i8*, i8** %vp.addr, align 8
261  call void @use_voidptr(i8* %80)
262  %81 = load i8*, i8** %vp.addr, align 8
263  %82 = load i64, i64* %allocator_handle.addr, align 8
264  call void @omp_free(i8* %81, i64 %82)
265  %83 = load i64, i64* %st.addr, align 8
266  %84 = load i64, i64* %allocator_handle.addr, align 8
267  %call40 = call i8* @omp_alloc(i64 %83, i64 %84)
268  store i8* %call40, i8** %vp.addr, align 8
269  %85 = load i8*, i8** %vp.addr, align 8
270  call void @use_voidptr(i8* %85)
271  %86 = load i8*, i8** %vp.addr, align 8
272  %87 = load i64, i64* %allocator_handle.addr, align 8
273  call void @omp_free(i8* %86, i64 %87)
274  %88 = load i8*, i8** %s.addr, align 8
275  call void @ompc_set_affinity_format(i8* %88)
276  %89 = load i8*, i8** %cp.addr, align 8
277  %90 = load i64, i64* %st.addr, align 8
278  %call41 = call i64 @ompc_get_affinity_format(i8* %89, i64 %90)
279  store i64 %call41, i64* %st.addr, align 8
280  %91 = load i64, i64* %st.addr, align 8
281  call void @use_sizet(i64 %91)
282  %92 = load i8*, i8** %s.addr, align 8
283  call void @ompc_display_affinity(i8* %92)
284  %93 = load i8*, i8** %cp.addr, align 8
285  %94 = load i64, i64* %st.addr, align 8
286  %95 = load i8*, i8** %s.addr, align 8
287  %call42 = call i64 @ompc_capture_affinity(i8* %93, i64 %94, i8* %95)
288  store i64 %call42, i64* %st.addr, align 8
289  %96 = load i64, i64* %st.addr, align 8
290  call void @use_sizet(i64 %96)
291  %97 = load i64, i64* %event_handle.addr, align 8
292  call void @omp_fulfill_event(i64 %97)
293  %98 = load i32, i32* %pause_resource.addr, align 4
294  %99 = load i32, i32* %i.addr, align 4
295  %call43 = call i32 @omp_pause_resource(i32 %98, i32 %99)
296  store i32 %call43, i32* %i.addr, align 4
297  %100 = load i32, i32* %i.addr, align 4
298  call void @use_int(i32 %100)
299  %101 = load i32, i32* %pause_resource.addr, align 4
300  %call44 = call i32 @omp_pause_resource_all(i32 %101)
301  store i32 %call44, i32* %i.addr, align 4
302  %102 = load i32, i32* %i.addr, align 4
303  call void @use_int(i32 %102)
304  %call45 = call i32 @omp_get_supported_active_levels()
305  store i32 %call45, i32* %i.addr, align 4
306  %103 = load i32, i32* %i.addr, align 4
307  call void @use_int(i32 %103)
308  ret void
309}
310
311declare dso_local void @omp_set_num_threads(i32)
312
313declare dso_local void @omp_set_dynamic(i32)
314
315declare dso_local void @omp_set_nested(i32)
316
317declare dso_local void @omp_set_max_active_levels(i32)
318
319declare dso_local void @omp_set_schedule(i32, i32)
320
321declare dso_local i32 @omp_get_num_threads()
322
323declare dso_local void @use_int(i32)
324
325declare dso_local i32 @omp_get_dynamic()
326
327declare dso_local i32 @omp_get_nested()
328
329declare dso_local i32 @omp_get_max_threads()
330
331declare dso_local i32 @omp_get_thread_num()
332
333declare dso_local i32 @omp_get_num_procs()
334
335declare dso_local i32 @omp_in_parallel()
336
337declare dso_local i32 @omp_in_final()
338
339declare dso_local i32 @omp_get_active_level()
340
341declare dso_local i32 @omp_get_level()
342
343declare dso_local i32 @omp_get_ancestor_thread_num(i32)
344
345declare dso_local i32 @omp_get_team_size(i32)
346
347declare dso_local i32 @omp_get_thread_limit()
348
349declare dso_local i32 @omp_get_max_active_levels()
350
351declare dso_local void @omp_get_schedule(i32*, i32*)
352
353declare dso_local i32 @omp_get_max_task_priority()
354
355declare dso_local void @omp_init_lock(%struct.omp_lock_t*)
356
357declare dso_local void @omp_set_lock(%struct.omp_lock_t*)
358
359declare dso_local void @omp_unset_lock(%struct.omp_lock_t*)
360
361declare dso_local void @omp_destroy_lock(%struct.omp_lock_t*)
362
363declare dso_local i32 @omp_test_lock(%struct.omp_lock_t*)
364
365declare dso_local void @omp_init_nest_lock(%struct.omp_nest_lock_t*)
366
367declare dso_local void @omp_set_nest_lock(%struct.omp_nest_lock_t*)
368
369declare dso_local void @omp_unset_nest_lock(%struct.omp_nest_lock_t*)
370
371declare dso_local void @omp_destroy_nest_lock(%struct.omp_nest_lock_t*)
372
373declare dso_local i32 @omp_test_nest_lock(%struct.omp_nest_lock_t*)
374
375declare dso_local void @omp_init_lock_with_hint(%struct.omp_lock_t*, i32)
376
377declare dso_local void @omp_init_nest_lock_with_hint(%struct.omp_nest_lock_t*, i32)
378
379declare dso_local double @omp_get_wtime()
380
381declare dso_local void @use_double(double)
382
383declare dso_local double @omp_get_wtick()
384
385declare dso_local i32 @omp_get_default_device()
386
387declare dso_local void @omp_set_default_device(i32)
388
389declare dso_local i32 @omp_is_initial_device()
390
391declare dso_local i32 @omp_get_num_devices()
392
393declare dso_local i32 @omp_get_num_teams()
394
395declare dso_local i32 @omp_get_team_num()
396
397declare dso_local i32 @omp_get_cancellation()
398
399declare dso_local i32 @omp_get_initial_device()
400
401declare dso_local i8* @omp_target_alloc(i64, i32)
402
403declare dso_local void @use_voidptr(i8*)
404
405declare dso_local void @omp_target_free(i8*, i32)
406
407declare dso_local i32 @omp_target_is_present(i8*, i32)
408
409declare dso_local i32 @omp_target_memcpy(i8*, i8*, i64, i64, i64, i32, i32)
410
411declare dso_local i32 @omp_target_associate_ptr(i8*, i8*, i64, i64, i32)
412
413declare dso_local i32 @omp_target_disassociate_ptr(i8*, i32)
414
415declare dso_local i32 @omp_get_device_num()
416
417declare dso_local i32 @omp_get_proc_bind()
418
419declare dso_local i32 @omp_get_num_places()
420
421declare dso_local i32 @omp_get_place_num_procs(i32)
422
423declare dso_local void @omp_get_place_proc_ids(i32, i32*)
424
425declare dso_local i32 @omp_get_place_num()
426
427declare dso_local i32 @omp_get_partition_num_places()
428
429declare dso_local void @omp_get_partition_place_nums(i32*)
430
431declare dso_local i32 @omp_control_tool(i32, i32, i8*)
432
433declare dso_local void @omp_destroy_allocator(i64)
434
435declare dso_local void @omp_set_default_allocator(i64)
436
437declare dso_local i64 @omp_get_default_allocator()
438
439declare dso_local i8* @omp_alloc(i64, i64)
440
441declare dso_local void @omp_free(i8*, i64)
442
443declare dso_local void @ompc_set_affinity_format(i8*)
444
445declare dso_local i64 @ompc_get_affinity_format(i8*, i64)
446
447declare dso_local void @use_sizet(i64)
448
449declare dso_local void @ompc_display_affinity(i8*)
450
451declare dso_local i64 @ompc_capture_affinity(i8*, i64, i8*)
452
453declare dso_local void @omp_fulfill_event(i64)
454
455declare dso_local i32 @omp_pause_resource(i32, i32)
456
457declare dso_local i32 @omp_pause_resource_all(i32)
458
459declare dso_local i32 @omp_get_supported_active_levels()
460
461declare void @__kmpc_barrier(%struct.ident_t*, i32)
462
463declare i32 @__kmpc_cancel(%struct.ident_t*, i32, i32)
464
465declare i32 @__kmpc_cancel_barrier(%struct.ident_t*, i32)
466
467declare void @__kmpc_flush(%struct.ident_t*)
468
469declare i32 @__kmpc_global_thread_num(%struct.ident_t*)
470
471declare void @__kmpc_fork_call(%struct.ident_t*, i32, void (i32*, i32*, ...)*, ...)
472
473declare i32 @__kmpc_omp_taskwait(%struct.ident_t*, i32)
474
475declare i32 @__kmpc_omp_taskyield(%struct.ident_t*, i32, i32)
476
477declare void @__kmpc_push_num_threads(%struct.ident_t*, i32, i32)
478
479declare void @__kmpc_push_proc_bind(%struct.ident_t*, i32, i32)
480
481declare void @__kmpc_serialized_parallel(%struct.ident_t*, i32)
482
483declare void @__kmpc_end_serialized_parallel(%struct.ident_t*, i32)
484
485declare i32 @__kmpc_master(%struct.ident_t*, i32)
486
487declare void @__kmpc_end_master(%struct.ident_t*, i32)
488
489declare void @__kmpc_critical(%struct.ident_t*, i32, [8 x i32]*)
490
491declare void @__kmpc_critical_with_hint(%struct.ident_t*, i32, [8 x i32]*, i32)
492
493declare void @__kmpc_end_critical(%struct.ident_t*, i32, [8 x i32]*)
494
495declare void @__kmpc_begin(%struct.ident_t*, i32)
496
497declare void @__kmpc_end(%struct.ident_t*)
498
499declare i32 @__kmpc_reduce(%struct.ident_t*, i32, i32, i64, i8*, void (i8*, i8*)*, [8 x i32]*)
500
501declare i32 @__kmpc_reduce_nowait(%struct.ident_t*, i32, i32, i64, i8*, void (i8*, i8*)*, [8 x i32]*)
502
503declare void @__kmpc_end_reduce(%struct.ident_t*, i32, [8 x i32]*)
504
505declare void @__kmpc_end_reduce_nowait(%struct.ident_t*, i32, [8 x i32]*)
506
507declare void @__kmpc_ordered(%struct.ident_t*, i32)
508
509declare void @__kmpc_end_ordered(%struct.ident_t*, i32)
510
511declare void @__kmpc_for_static_init_4(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32, i32)
512
513declare void @__kmpc_for_static_init_4u(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32, i32)
514
515declare void @__kmpc_for_static_init_8(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64, i64)
516
517declare void @__kmpc_for_static_init_8u(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64, i64)
518
519declare void @__kmpc_for_static_fini(%struct.ident_t*, i32)
520
521declare void @__kmpc_team_static_init_4(%struct.ident_t*, i32, i32*, i32*, i32*, i32*, i32, i32)
522
523declare void @__kmpc_team_static_init_4u(%struct.ident_t*, i32, i32*, i32*, i32*, i32*, i32, i32)
524
525declare void @__kmpc_team_static_init_8(%struct.ident_t*, i32, i32*, i64*, i64*, i64*, i64, i64)
526
527declare void @__kmpc_team_static_init_8u(%struct.ident_t*, i32, i32*, i64*, i64*, i64*, i64, i64)
528
529declare void @__kmpc_dist_for_static_init_4(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32*, i32, i32)
530
531declare void @__kmpc_dist_for_static_init_4u(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32*, i32, i32)
532
533declare void @__kmpc_dist_for_static_init_8(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64*, i64, i64)
534
535declare void @__kmpc_dist_for_static_init_8u(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64*, i64, i64)
536
537declare i32 @__kmpc_single(%struct.ident_t*, i32)
538
539declare void @__kmpc_end_single(%struct.ident_t*, i32)
540
541declare i8* @__kmpc_omp_task_alloc(%struct.ident_t*, i32, i32, i64, i64, i32 (i32, i8*)*)
542
543declare i32 @__kmpc_omp_task(%struct.ident_t*, i32, i8*)
544
545declare void @__kmpc_end_taskgroup(%struct.ident_t*, i32)
546
547declare void @__kmpc_taskgroup(%struct.ident_t*, i32)
548
549declare void @__kmpc_dist_dispatch_init_4(%struct.ident_t*, i32, i32, i32*, i32, i32, i32, i32)
550
551declare void @__kmpc_dist_dispatch_init_4u(%struct.ident_t*, i32, i32, i32*, i32, i32, i32, i32)
552
553declare void @__kmpc_dist_dispatch_init_8(%struct.ident_t*, i32, i32, i32*, i64, i64, i64, i64)
554
555declare void @__kmpc_dist_dispatch_init_8u(%struct.ident_t*, i32, i32, i32*, i64, i64, i64, i64)
556
557declare void @__kmpc_dispatch_init_4(%struct.ident_t*, i32, i32, i32, i32, i32, i32)
558
559declare void @__kmpc_dispatch_init_4u(%struct.ident_t*, i32, i32, i32, i32, i32, i32)
560
561declare void @__kmpc_dispatch_init_8(%struct.ident_t*, i32, i32, i64, i64, i64, i64)
562
563declare void @__kmpc_dispatch_init_8u(%struct.ident_t*, i32, i32, i64, i64, i64, i64)
564
565declare i32 @__kmpc_dispatch_next_4(%struct.ident_t*, i32, i32*, i32*, i32*, i32*)
566
567declare i32 @__kmpc_dispatch_next_4u(%struct.ident_t*, i32, i32*, i32*, i32*, i32*)
568
569declare i32 @__kmpc_dispatch_next_8(%struct.ident_t*, i32, i32*, i64*, i64*, i64*)
570
571declare i32 @__kmpc_dispatch_next_8u(%struct.ident_t*, i32, i32*, i64*, i64*, i64*)
572
573declare void @__kmpc_dispatch_fini_4(%struct.ident_t*, i32)
574
575declare void @__kmpc_dispatch_fini_4u(%struct.ident_t*, i32)
576
577declare void @__kmpc_dispatch_fini_8(%struct.ident_t*, i32)
578
579declare void @__kmpc_dispatch_fini_8u(%struct.ident_t*, i32)
580
581declare void @__kmpc_omp_task_begin_if0(%struct.ident_t*, i32, i8*)
582
583declare void @__kmpc_omp_task_complete_if0(%struct.ident_t*, i32, i8*)
584
585declare i32 @__kmpc_omp_task_with_deps(%struct.ident_t*, i32, i8*, i32, i8*, i32, i8*)
586
587declare void @__kmpc_omp_wait_deps(%struct.ident_t*, i32, i32, i8*, i32, i8*)
588
589declare i32 @__kmpc_cancellationpoint(%struct.ident_t*, i32, i32)
590
591declare void @__kmpc_push_num_teams(%struct.ident_t*, i32, i32, i32)
592
593declare void @__kmpc_fork_teams(%struct.ident_t*, i32, void (i32*, i32*, ...)*, ...)
594
595declare void @__kmpc_taskloop(%struct.ident_t*, i32, i8*, i32, i64*, i64*, i64, i32, i32, i64, i8*)
596
597declare i8* @__kmpc_omp_target_task_alloc(%struct.ident_t*, i32, i32, i64, i64, i32 (i32, i8*)*, i64)
598
599declare i8* @__kmpc_taskred_modifier_init(%struct.ident_t*, i32, i32, i32, i8*)
600
601declare i8* @__kmpc_taskred_init(i32, i32, i8*)
602
603declare void @__kmpc_task_reduction_modifier_fini(%struct.ident_t*, i32, i32)
604
605declare void @__kmpc_copyprivate(%struct.ident_t*, i32, i64, i8*, void (i8*, i8*)*, i32)
606
607declare i8* @__kmpc_threadprivate_cached(%struct.ident_t*, i32, i8*, i64, i8***)
608
609declare void @__kmpc_threadprivate_register(%struct.ident_t*, i8*, i8* (i8*)*, i8* (i8*, i8*)*, void (i8*)*)
610
611declare void @__kmpc_doacross_init(%struct.ident_t*, i32, i32, i8*)
612
613declare void @__kmpc_doacross_wait(%struct.ident_t*, i32, i64*)
614
615declare void @__kmpc_doacross_post(%struct.ident_t*, i32, i64*)
616
617declare void @__kmpc_doacross_fini(%struct.ident_t*, i32)
618
619declare i8* @__kmpc_alloc(i32, i64, i8*)
620
621declare void @__kmpc_free(i32, i8*, i8*)
622
623declare i8* @__kmpc_init_allocator(i32, i8*, i32, i8*)
624
625declare void @__kmpc_destroy_allocator(i32, i8*)
626
627declare void @__kmpc_push_target_tripcount_mapper(%struct.ident_t*, i64, i64)
628
629declare i32 @__kmpc_warp_active_thread_mask()
630
631declare void @__kmpc_syncwarp(i32)
632
633declare i32 @__tgt_target_mapper(%struct.ident_t*, i64, i8*, i32, i8**, i8**, i64*, i64*, i8**, i8**)
634
635declare i32 @__tgt_target_nowait_mapper(%struct.ident_t*, i64, i8*, i32, i8**, i8**, i64*, i64*, i8**, i8**, i32, i8*, i32, i8*)
636
637declare i32 @__tgt_target_teams_mapper(%struct.ident_t*, i64, i8*, i32, i8**, i8**, i64*, i64*, i8**, i8**, i32, i32)
638
639declare i32 @__tgt_target_teams_nowait_mapper(%struct.ident_t*, i64, i8*, i32, i8**, i8**, i64*, i64*, i8**, i8**, i32, i32, i32, i8*, i32, i8*)
640
641declare void @__tgt_register_requires(i64)
642
643declare void @__tgt_target_data_begin_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
644
645declare void @__tgt_target_data_begin_nowait_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
646
647declare void @__tgt_target_data_end_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
648
649declare void @__tgt_target_data_end_nowait_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
650
651declare void @__tgt_target_data_update_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
652
653declare void @__tgt_target_data_update_nowait_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
654
655declare i64 @__tgt_mapper_num_components(i8*)
656
657declare void @__tgt_push_mapper_component(i8*, i8*, i8*, i64, i64, i8*)
658
659declare i8* @__kmpc_task_allow_completion_event(%struct.ident_t*, i32, i8*)
660
661declare i8* @__kmpc_task_reduction_get_th_data(i32, i8*, i8*)
662
663declare i8* @__kmpc_task_reduction_init(i32, i32, i8*)
664
665declare i8* @__kmpc_task_reduction_modifier_init(i8*, i32, i32, i32, i8*)
666
667declare void @__kmpc_proxy_task_completed_ooo(i8*)
668
669; Function Attrs: noinline cold
670declare void @__kmpc_barrier_simple_spmd(%struct.ident_t* nocapture nofree readonly, i32) #0
671
672attributes #0 = { noinline cold }
673
674; CHECK: ; Function Attrs: nounwind
675; CHECK-NEXT: declare dso_local void @omp_set_num_threads(i32)
676
677; CHECK: ; Function Attrs: nounwind
678; CHECK-NEXT: declare dso_local void @omp_set_dynamic(i32)
679
680; CHECK: ; Function Attrs: nounwind
681; CHECK-NEXT: declare dso_local void @omp_set_nested(i32)
682
683; CHECK: ; Function Attrs: nounwind
684; CHECK-NEXT: declare dso_local void @omp_set_max_active_levels(i32)
685
686; CHECK: ; Function Attrs: nounwind
687; CHECK-NEXT: declare dso_local void @omp_set_schedule(i32, i32)
688
689; CHECK: ; Function Attrs: nounwind
690; CHECK-NEXT: declare dso_local i32 @omp_get_num_threads()
691
692; CHECK-NOT: Function Attrs
693; CHECK: declare dso_local void @use_int(i32)
694
695; CHECK: ; Function Attrs: nounwind
696; CHECK-NEXT: declare dso_local i32 @omp_get_dynamic()
697
698; CHECK: ; Function Attrs: nounwind
699; CHECK-NEXT: declare dso_local i32 @omp_get_nested()
700
701; CHECK: ; Function Attrs: nounwind
702; CHECK-NEXT: declare dso_local i32 @omp_get_max_threads()
703
704; CHECK: ; Function Attrs: nounwind
705; CHECK-NEXT: declare dso_local i32 @omp_get_thread_num()
706
707; CHECK: ; Function Attrs: nounwind
708; CHECK-NEXT: declare dso_local i32 @omp_get_num_procs()
709
710; CHECK: ; Function Attrs: nounwind
711; CHECK-NEXT: declare dso_local i32 @omp_in_parallel()
712
713; CHECK: ; Function Attrs: nounwind
714; CHECK-NEXT: declare dso_local i32 @omp_in_final()
715
716; CHECK: ; Function Attrs: nounwind
717; CHECK-NEXT: declare dso_local i32 @omp_get_active_level()
718
719; CHECK: ; Function Attrs: nounwind
720; CHECK-NEXT: declare dso_local i32 @omp_get_level()
721
722; CHECK: ; Function Attrs: nounwind
723; CHECK-NEXT: declare dso_local i32 @omp_get_ancestor_thread_num(i32)
724
725; CHECK: ; Function Attrs: nounwind
726; CHECK-NEXT: declare dso_local i32 @omp_get_team_size(i32)
727
728; CHECK: ; Function Attrs: nounwind
729; CHECK-NEXT: declare dso_local i32 @omp_get_thread_limit()
730
731; CHECK: ; Function Attrs: nounwind
732; CHECK-NEXT: declare dso_local i32 @omp_get_max_active_levels()
733
734; CHECK: ; Function Attrs: nounwind
735; CHECK-NEXT: declare dso_local void @omp_get_schedule(i32* nocapture writeonly, i32* nocapture writeonly)
736
737; CHECK-NOT: Function Attrs
738; CHECK: declare dso_local i32 @omp_get_max_task_priority()
739
740; CHECK-NOT: Function Attrs
741; CHECK: declare dso_local void @omp_init_lock(%struct.omp_lock_t*)
742
743; CHECK-NOT: Function Attrs
744; CHECK: declare dso_local void @omp_set_lock(%struct.omp_lock_t*)
745
746; CHECK-NOT: Function Attrs
747; CHECK: declare dso_local void @omp_unset_lock(%struct.omp_lock_t*)
748
749; CHECK-NOT: Function Attrs
750; CHECK: declare dso_local void @omp_destroy_lock(%struct.omp_lock_t*)
751
752; CHECK-NOT: Function Attrs
753; CHECK: declare dso_local i32 @omp_test_lock(%struct.omp_lock_t*)
754
755; CHECK-NOT: Function Attrs
756; CHECK: declare dso_local void @omp_init_nest_lock(%struct.omp_nest_lock_t*)
757
758; CHECK-NOT: Function Attrs
759; CHECK: declare dso_local void @omp_set_nest_lock(%struct.omp_nest_lock_t*)
760
761; CHECK-NOT: Function Attrs
762; CHECK: declare dso_local void @omp_unset_nest_lock(%struct.omp_nest_lock_t*)
763
764; CHECK-NOT: Function Attrs
765; CHECK: declare dso_local void @omp_destroy_nest_lock(%struct.omp_nest_lock_t*)
766
767; CHECK-NOT: Function Attrs
768; CHECK: declare dso_local i32 @omp_test_nest_lock(%struct.omp_nest_lock_t*)
769
770; CHECK-NOT: Function Attrs
771; CHECK: declare dso_local void @omp_init_lock_with_hint(%struct.omp_lock_t*, i32)
772
773; CHECK-NOT: Function Attrs
774; CHECK: declare dso_local void @omp_init_nest_lock_with_hint(%struct.omp_nest_lock_t*, i32)
775
776; CHECK-NOT: Function Attrs
777; CHECK: declare dso_local double @omp_get_wtime()
778
779; CHECK-NOT: Function Attrs
780; CHECK: declare dso_local void @use_double(double)
781
782; CHECK-NOT: Function Attrs
783; CHECK: declare dso_local double @omp_get_wtick()
784
785; CHECK-NOT: Function Attrs
786; CHECK: declare dso_local i32 @omp_get_default_device()
787
788; CHECK-NOT: Function Attrs
789; CHECK: declare dso_local void @omp_set_default_device(i32)
790
791; CHECK-NOT: Function Attrs
792; CHECK: declare dso_local i32 @omp_is_initial_device()
793
794; CHECK-NOT: Function Attrs
795; CHECK: declare dso_local i32 @omp_get_num_devices()
796
797; CHECK-NOT: Function Attrs
798; CHECK: declare dso_local i32 @omp_get_num_teams()
799
800; CHECK-NOT: Function Attrs
801; CHECK: declare dso_local i32 @omp_get_team_num()
802
803; CHECK: ; Function Attrs: nounwind
804; CHECK-NEXT: declare dso_local i32 @omp_get_cancellation()
805
806; CHECK-NOT: Function Attrs
807; CHECK: declare dso_local i32 @omp_get_initial_device()
808
809; CHECK-NOT: Function Attrs
810; CHECK: declare dso_local i8* @omp_target_alloc(i64, i32)
811
812; CHECK-NOT: Function Attrs
813; CHECK: declare dso_local void @use_voidptr(i8*)
814
815; CHECK-NOT: Function Attrs
816; CHECK: declare dso_local void @omp_target_free(i8*, i32)
817
818; CHECK-NOT: Function Attrs
819; CHECK: declare dso_local i32 @omp_target_is_present(i8*, i32)
820
821; CHECK-NOT: Function Attrs
822; CHECK: declare dso_local i32 @omp_target_memcpy(i8*, i8*, i64, i64, i64, i32, i32)
823
824; CHECK-NOT: Function Attrs
825; CHECK: declare dso_local i32 @omp_target_associate_ptr(i8*, i8*, i64, i64, i32)
826
827; CHECK-NOT: Function Attrs
828; CHECK: declare dso_local i32 @omp_target_disassociate_ptr(i8*, i32)
829
830; CHECK-NOT: Function Attrs
831; CHECK: declare dso_local i32 @omp_get_device_num()
832
833; CHECK: ; Function Attrs: nounwind
834; CHECK-NEXT: declare dso_local i32 @omp_get_proc_bind()
835
836; CHECK: ; Function Attrs: nounwind
837; CHECK-NEXT: declare dso_local i32 @omp_get_num_places()
838
839; CHECK-NOT: Function Attrs
840; CHECK: declare dso_local i32 @omp_get_place_num_procs(i32)
841
842; CHECK: ; Function Attrs: nounwind
843; CHECK-NEXT: declare dso_local void @omp_get_place_proc_ids(i32, i32* nocapture writeonly)
844
845; CHECK: ; Function Attrs: nounwind
846; CHECK-NEXT: declare dso_local i32 @omp_get_place_num()
847
848; CHECK: ; Function Attrs: nounwind
849; CHECK-NEXT: declare dso_local i32 @omp_get_partition_num_places()
850
851; CHECK: ; Function Attrs: nounwind
852; CHECK-NEXT: declare dso_local void @omp_get_partition_place_nums(i32*)
853
854; CHECK-NOT: Function Attrs
855; CHECK: declare dso_local i32 @omp_control_tool(i32, i32, i8*)
856
857; CHECK-NOT: Function Attrs
858; CHECK: declare dso_local void @omp_destroy_allocator(i64)
859
860; CHECK-NOT: Function Attrs
861; CHECK: declare dso_local void @omp_set_default_allocator(i64)
862
863; CHECK-NOT: Function Attrs
864; CHECK: declare dso_local i64 @omp_get_default_allocator()
865
866; CHECK-NOT: Function Attrs
867; CHECK: declare dso_local i8* @omp_alloc(i64, i64)
868
869; CHECK-NOT: Function Attrs
870; CHECK: declare dso_local void @omp_free(i8*, i64)
871
872; CHECK-NOT: Function Attrs
873; CHECK: declare dso_local void @ompc_set_affinity_format(i8*)
874
875; CHECK-NOT: Function Attrs
876; CHECK: declare dso_local i64 @ompc_get_affinity_format(i8*, i64)
877
878; CHECK-NOT: Function Attrs
879; CHECK: declare dso_local void @use_sizet(i64)
880
881; CHECK-NOT: Function Attrs
882; CHECK: declare dso_local void @ompc_display_affinity(i8*)
883
884; CHECK-NOT: Function Attrs
885; CHECK: declare dso_local i64 @ompc_capture_affinity(i8*, i64, i8*)
886
887; CHECK-NOT: Function Attrs
888; CHECK: declare dso_local void @omp_fulfill_event(i64)
889
890; CHECK-NOT: Function Attrs
891; CHECK: declare dso_local i32 @omp_pause_resource(i32, i32)
892
893; CHECK-NOT: Function Attrs
894; CHECK: declare dso_local i32 @omp_pause_resource_all(i32)
895
896; CHECK: ; Function Attrs: nounwind
897; CHECK-NEXT: declare dso_local i32 @omp_get_supported_active_levels()
898
899; CHECK: ; Function Attrs: convergent nounwind
900; CHECK-NEXT: declare void @__kmpc_barrier(%struct.ident_t*, i32)
901
902; CHECK: ; Function Attrs: nounwind
903; CHECK-NEXT: declare i32 @__kmpc_cancel(%struct.ident_t*, i32, i32)
904
905; CHECK: ; Function Attrs: convergent nounwind
906; CHECK-NEXT: declare i32 @__kmpc_cancel_barrier(%struct.ident_t*, i32)
907
908; CHECK: ; Function Attrs: convergent nounwind
909; CHECK-NEXT: declare void @__kmpc_flush(%struct.ident_t*)
910
911; CHECK: ; Function Attrs: nounwind
912; CHECK-NEXT: declare i32 @__kmpc_global_thread_num(%struct.ident_t*)
913
914; CHECK: ; Function Attrs: nounwind
915; CHECK-NEXT: declare void @__kmpc_fork_call(%struct.ident_t*, i32, void (i32*, i32*, ...)*, ...)
916
917; CHECK: ; Function Attrs: convergent nounwind
918; CHECK-NEXT: declare i32 @__kmpc_omp_taskwait(%struct.ident_t*, i32)
919
920; CHECK: ; Function Attrs: nounwind
921; CHECK-NEXT: declare i32 @__kmpc_omp_taskyield(%struct.ident_t*, i32, i32)
922
923; CHECK: ; Function Attrs: nounwind
924; CHECK-NEXT: declare void @__kmpc_push_num_threads(%struct.ident_t*, i32, i32)
925
926; CHECK: ; Function Attrs: nounwind
927; CHECK-NEXT: declare void @__kmpc_push_proc_bind(%struct.ident_t*, i32, i32)
928
929; CHECK: ; Function Attrs: nounwind
930; CHECK-NEXT: declare void @__kmpc_serialized_parallel(%struct.ident_t*, i32)
931
932; CHECK: ; Function Attrs: nounwind
933; CHECK-NEXT: declare void @__kmpc_end_serialized_parallel(%struct.ident_t*, i32)
934
935; CHECK: ; Function Attrs: nounwind
936; CHECK-NEXT: declare i32 @__kmpc_master(%struct.ident_t*, i32)
937
938; CHECK: ; Function Attrs: nounwind
939; CHECK-NEXT: declare void @__kmpc_end_master(%struct.ident_t*, i32)
940
941; CHECK: ; Function Attrs: convergent nounwind
942; CHECK-NEXT: declare void @__kmpc_critical(%struct.ident_t*, i32, [8 x i32]*)
943
944; CHECK: ; Function Attrs: convergent nounwind
945; CHECK-NEXT: declare void @__kmpc_critical_with_hint(%struct.ident_t*, i32, [8 x i32]*, i32)
946
947; CHECK: ; Function Attrs: convergent nounwind
948; CHECK-NEXT: declare void @__kmpc_end_critical(%struct.ident_t*, i32, [8 x i32]*)
949
950; CHECK: ; Function Attrs: nounwind
951; CHECK-NEXT: declare void @__kmpc_begin(%struct.ident_t*, i32)
952
953; CHECK: ; Function Attrs: nounwind
954; CHECK-NEXT: declare void @__kmpc_end(%struct.ident_t*)
955
956; CHECK: ; Function Attrs: convergent nounwind
957; CHECK-NEXT: declare i32 @__kmpc_reduce(%struct.ident_t*, i32, i32, i64, i8*, void (i8*, i8*)*, [8 x i32]*)
958
959; CHECK: ; Function Attrs: convergent nounwind
960; CHECK-NEXT: declare i32 @__kmpc_reduce_nowait(%struct.ident_t*, i32, i32, i64, i8*, void (i8*, i8*)*, [8 x i32]*)
961
962; CHECK: ; Function Attrs: convergent nounwind
963; CHECK-NEXT: declare void @__kmpc_end_reduce(%struct.ident_t*, i32, [8 x i32]*)
964
965; CHECK: ; Function Attrs: convergent nounwind
966; CHECK-NEXT: declare void @__kmpc_end_reduce_nowait(%struct.ident_t*, i32, [8 x i32]*)
967
968; CHECK: ; Function Attrs: convergent nounwind
969; CHECK-NEXT: declare void @__kmpc_ordered(%struct.ident_t*, i32)
970
971; CHECK: ; Function Attrs: convergent nounwind
972; CHECK-NEXT: declare void @__kmpc_end_ordered(%struct.ident_t*, i32)
973
974; CHECK: ; Function Attrs: nounwind
975; CHECK-NEXT: declare void @__kmpc_for_static_init_4(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32, i32)
976
977; CHECK: ; Function Attrs: nounwind
978; CHECK-NEXT: declare void @__kmpc_for_static_init_4u(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32, i32)
979
980; CHECK: ; Function Attrs: nounwind
981; CHECK-NEXT: declare void @__kmpc_for_static_init_8(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64, i64)
982
983; CHECK: ; Function Attrs: nounwind
984; CHECK-NEXT: declare void @__kmpc_for_static_init_8u(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64, i64)
985
986; CHECK: ; Function Attrs: nounwind
987; CHECK-NEXT: declare void @__kmpc_for_static_fini(%struct.ident_t*, i32)
988
989; CHECK: ; Function Attrs: nounwind
990; CHECK-NEXT: declare void @__kmpc_team_static_init_4(%struct.ident_t*, i32, i32*, i32*, i32*, i32*, i32, i32)
991
992; CHECK: ; Function Attrs: nounwind
993; CHECK-NEXT: declare void @__kmpc_team_static_init_4u(%struct.ident_t*, i32, i32*, i32*, i32*, i32*, i32, i32)
994
995; CHECK: ; Function Attrs: nounwind
996; CHECK-NEXT: declare void @__kmpc_team_static_init_8(%struct.ident_t*, i32, i32*, i64*, i64*, i64*, i64, i64)
997
998; CHECK: ; Function Attrs: nounwind
999; CHECK-NEXT: declare void @__kmpc_team_static_init_8u(%struct.ident_t*, i32, i32*, i64*, i64*, i64*, i64, i64)
1000
1001; CHECK: ; Function Attrs: nounwind
1002; CHECK-NEXT: declare void @__kmpc_dist_for_static_init_4(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32*, i32, i32)
1003
1004; CHECK: ; Function Attrs: nounwind
1005; CHECK-NEXT: declare void @__kmpc_dist_for_static_init_4u(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32*, i32, i32)
1006
1007; CHECK: ; Function Attrs: nounwind
1008; CHECK-NEXT: declare void @__kmpc_dist_for_static_init_8(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64*, i64, i64)
1009
1010; CHECK: ; Function Attrs: nounwind
1011; CHECK-NEXT: declare void @__kmpc_dist_for_static_init_8u(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64*, i64, i64)
1012
1013; CHECK: ; Function Attrs: convergent nounwind
1014; CHECK-NEXT: declare i32 @__kmpc_single(%struct.ident_t*, i32)
1015
1016; CHECK: ; Function Attrs: convergent nounwind
1017; CHECK-NEXT: declare void @__kmpc_end_single(%struct.ident_t*, i32)
1018
1019; CHECK: ; Function Attrs: nounwind
1020; CHECK-NEXT: declare i8* @__kmpc_omp_task_alloc(%struct.ident_t*, i32, i32, i64, i64, i32 (i32, i8*)*)
1021
1022; CHECK: ; Function Attrs: nounwind
1023; CHECK-NEXT: declare i32 @__kmpc_omp_task(%struct.ident_t*, i32, i8*)
1024
1025; CHECK: ; Function Attrs: convergent nounwind
1026; CHECK-NEXT: declare void @__kmpc_end_taskgroup(%struct.ident_t*, i32)
1027
1028; CHECK: ; Function Attrs: convergent nounwind
1029; CHECK-NEXT: declare void @__kmpc_taskgroup(%struct.ident_t*, i32)
1030
1031; CHECK: ; Function Attrs: nounwind
1032; CHECK-NEXT: declare void @__kmpc_dist_dispatch_init_4(%struct.ident_t*, i32, i32, i32*, i32, i32, i32, i32)
1033
1034; CHECK: ; Function Attrs: nounwind
1035; CHECK-NEXT: declare void @__kmpc_dist_dispatch_init_4u(%struct.ident_t*, i32, i32, i32*, i32, i32, i32, i32)
1036
1037; CHECK: ; Function Attrs: nounwind
1038; CHECK-NEXT: declare void @__kmpc_dist_dispatch_init_8(%struct.ident_t*, i32, i32, i32*, i64, i64, i64, i64)
1039
1040; CHECK: ; Function Attrs: nounwind
1041; CHECK-NEXT: declare void @__kmpc_dist_dispatch_init_8u(%struct.ident_t*, i32, i32, i32*, i64, i64, i64, i64)
1042
1043; CHECK: ; Function Attrs: nounwind
1044; CHECK-NEXT: declare void @__kmpc_dispatch_init_4(%struct.ident_t*, i32, i32, i32, i32, i32, i32)
1045
1046; CHECK: ; Function Attrs: nounwind
1047; CHECK-NEXT: declare void @__kmpc_dispatch_init_4u(%struct.ident_t*, i32, i32, i32, i32, i32, i32)
1048
1049; CHECK: ; Function Attrs: nounwind
1050; CHECK-NEXT: declare void @__kmpc_dispatch_init_8(%struct.ident_t*, i32, i32, i64, i64, i64, i64)
1051
1052; CHECK: ; Function Attrs: nounwind
1053; CHECK-NEXT: declare void @__kmpc_dispatch_init_8u(%struct.ident_t*, i32, i32, i64, i64, i64, i64)
1054
1055; CHECK: ; Function Attrs: nounwind
1056; CHECK-NEXT: declare i32 @__kmpc_dispatch_next_4(%struct.ident_t*, i32, i32*, i32*, i32*, i32*)
1057
1058; CHECK: ; Function Attrs: nounwind
1059; CHECK-NEXT: declare i32 @__kmpc_dispatch_next_4u(%struct.ident_t*, i32, i32*, i32*, i32*, i32*)
1060
1061; CHECK: ; Function Attrs: nounwind
1062; CHECK-NEXT: declare i32 @__kmpc_dispatch_next_8(%struct.ident_t*, i32, i32*, i64*, i64*, i64*)
1063
1064; CHECK: ; Function Attrs: nounwind
1065; CHECK-NEXT: declare i32 @__kmpc_dispatch_next_8u(%struct.ident_t*, i32, i32*, i64*, i64*, i64*)
1066
1067; CHECK: ; Function Attrs: nounwind
1068; CHECK-NEXT: declare void @__kmpc_dispatch_fini_4(%struct.ident_t*, i32)
1069
1070; CHECK: ; Function Attrs: nounwind
1071; CHECK-NEXT: declare void @__kmpc_dispatch_fini_4u(%struct.ident_t*, i32)
1072
1073; CHECK: ; Function Attrs: nounwind
1074; CHECK-NEXT: declare void @__kmpc_dispatch_fini_8(%struct.ident_t*, i32)
1075
1076; CHECK: ; Function Attrs: nounwind
1077; CHECK-NEXT: declare void @__kmpc_dispatch_fini_8u(%struct.ident_t*, i32)
1078
1079; CHECK: ; Function Attrs: nounwind
1080; CHECK-NEXT: declare void @__kmpc_omp_task_begin_if0(%struct.ident_t*, i32, i8*)
1081
1082; CHECK: ; Function Attrs: nounwind
1083; CHECK-NEXT: declare void @__kmpc_omp_task_complete_if0(%struct.ident_t*, i32, i8*)
1084
1085; CHECK: ; Function Attrs: nounwind
1086; CHECK-NEXT: declare i32 @__kmpc_omp_task_with_deps(%struct.ident_t*, i32, i8*, i32, i8*, i32, i8*)
1087
1088; CHECK: ; Function Attrs: convergent nounwind
1089; CHECK-NEXT: declare void @__kmpc_omp_wait_deps(%struct.ident_t*, i32, i32, i8*, i32, i8*)
1090
1091; CHECK: ; Function Attrs: nounwind
1092; CHECK-NEXT: declare i32 @__kmpc_cancellationpoint(%struct.ident_t*, i32, i32)
1093
1094; CHECK: ; Function Attrs: nounwind
1095; CHECK-NEXT: declare void @__kmpc_push_num_teams(%struct.ident_t*, i32, i32, i32)
1096
1097; CHECK: ; Function Attrs: nounwind
1098; CHECK-NEXT: declare void @__kmpc_fork_teams(%struct.ident_t*, i32, void (i32*, i32*, ...)*, ...)
1099
1100; CHECK: ; Function Attrs: nounwind
1101; CHECK-NEXT: declare void @__kmpc_taskloop(%struct.ident_t*, i32, i8*, i32, i64*, i64*, i64, i32, i32, i64, i8*)
1102
1103; CHECK: ; Function Attrs: nounwind
1104; CHECK-NEXT: declare i8* @__kmpc_omp_target_task_alloc(%struct.ident_t*, i32, i32, i64, i64, i32 (i32, i8*)*, i64)
1105
1106; CHECK: ; Function Attrs: nounwind
1107; CHECK-NEXT: declare i8* @__kmpc_taskred_modifier_init(%struct.ident_t*, i32, i32, i32, i8*)
1108
1109; CHECK: ; Function Attrs: nounwind
1110; CHECK-NEXT: declare i8* @__kmpc_taskred_init(i32, i32, i8*)
1111
1112; CHECK: ; Function Attrs: convergent nounwind
1113; CHECK-NEXT: declare void @__kmpc_task_reduction_modifier_fini(%struct.ident_t*, i32, i32)
1114
1115; CHECK: ; Function Attrs: nounwind
1116; CHECK-NEXT: declare void @__kmpc_copyprivate(%struct.ident_t*, i32, i64, i8*, void (i8*, i8*)*, i32)
1117
1118; CHECK: ; Function Attrs: nounwind
1119; CHECK-NEXT: declare i8* @__kmpc_threadprivate_cached(%struct.ident_t*, i32, i8*, i64, i8***)
1120
1121; CHECK: ; Function Attrs: nounwind
1122; CHECK-NEXT: declare void @__kmpc_threadprivate_register(%struct.ident_t*, i8*, i8* (i8*)*, i8* (i8*, i8*)*, void (i8*)*)
1123
1124; CHECK: ; Function Attrs: convergent nounwind
1125; CHECK-NEXT: declare void @__kmpc_doacross_init(%struct.ident_t*, i32, i32, i8*)
1126
1127; CHECK: ; Function Attrs: convergent nounwind
1128; CHECK-NEXT: declare void @__kmpc_doacross_wait(%struct.ident_t*, i32, i64*)
1129
1130; CHECK: ; Function Attrs: convergent nounwind
1131; CHECK-NEXT: declare void @__kmpc_doacross_post(%struct.ident_t*, i32, i64*)
1132
1133; CHECK: ; Function Attrs: convergent nounwind
1134; CHECK-NEXT: declare void @__kmpc_doacross_fini(%struct.ident_t*, i32)
1135
1136; CHECK: ; Function Attrs: nounwind
1137; CHECK-NEXT: declare i8* @__kmpc_alloc(i32, i64, i8*)
1138
1139; CHECK: ; Function Attrs: nounwind
1140; CHECK-NEXT: declare void @__kmpc_free(i32, i8*, i8*)
1141
1142; CHECK: ; Function Attrs: nounwind
1143; CHECK-NEXT: declare i8* @__kmpc_init_allocator(i32, i8*, i32, i8*)
1144
1145; CHECK: ; Function Attrs: nounwind
1146; CHECK-NEXT: declare void @__kmpc_destroy_allocator(i32, i8*)
1147
1148; CHECK: ; Function Attrs: nounwind
1149; CHECK-NEXT: declare void @__kmpc_push_target_tripcount_mapper(%struct.ident_t*, i64, i64)
1150
1151; CHECK: ; Function Attrs: convergent nounwind
1152; CHECK-NEXT: declare i32 @__kmpc_warp_active_thread_mask()
1153
1154; CHECK: ; Function Attrs: convergent nounwind
1155; CHECK-NEXT: declare void @__kmpc_syncwarp(i32)
1156
1157; CHECK: ; Function Attrs: nounwind
1158; CHECK-NEXT: declare i32 @__tgt_target_mapper(%struct.ident_t*, i64, i8*, i32, i8**, i8**, i64*, i64*, i8**, i8**)
1159
1160; CHECK: ; Function Attrs: nounwind
1161; CHECK-NEXT: declare i32 @__tgt_target_nowait_mapper(%struct.ident_t*, i64, i8*, i32, i8**, i8**, i64*, i64*, i8**, i8**, i32, i8*, i32, i8*)
1162
1163; CHECK: ; Function Attrs: nounwind
1164; CHECK-NEXT: declare i32 @__tgt_target_teams_mapper(%struct.ident_t*, i64, i8*, i32, i8**, i8**, i64*, i64*, i8**, i8**, i32, i32)
1165
1166; CHECK: ; Function Attrs: nounwind
1167; CHECK-NEXT: declare i32 @__tgt_target_teams_nowait_mapper(%struct.ident_t*, i64, i8*, i32, i8**, i8**, i64*, i64*, i8**, i8**, i32, i32, i32, i8*, i32, i8*)
1168
1169; CHECK: ; Function Attrs: nounwind
1170; CHECK-NEXT: declare void @__tgt_register_requires(i64)
1171
1172; CHECK: ; Function Attrs: nounwind
1173; CHECK-NEXT: declare void @__tgt_target_data_begin_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
1174
1175; CHECK: ; Function Attrs: nounwind
1176; CHECK-NEXT: declare void @__tgt_target_data_begin_nowait_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
1177
1178; CHECK: ; Function Attrs: nounwind
1179; CHECK-NEXT: declare void @__tgt_target_data_end_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
1180
1181; CHECK: ; Function Attrs: nounwind
1182; CHECK-NEXT: declare void @__tgt_target_data_end_nowait_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
1183
1184; CHECK: ; Function Attrs: nounwind
1185; CHECK-NEXT: declare void @__tgt_target_data_update_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
1186
1187; CHECK: ; Function Attrs: nounwind
1188; CHECK-NEXT: declare void @__tgt_target_data_update_nowait_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
1189
1190; CHECK: ; Function Attrs: nounwind
1191; CHECK-NEXT: declare i64 @__tgt_mapper_num_components(i8*)
1192
1193; CHECK: ; Function Attrs: nounwind
1194; CHECK-NEXT: declare void @__tgt_push_mapper_component(i8*, i8*, i8*, i64, i64, i8*)
1195
1196; CHECK: ; Function Attrs: nounwind
1197; CHECK-NEXT: declare i8* @__kmpc_task_allow_completion_event(%struct.ident_t*, i32, i8*)
1198
1199; CHECK: ; Function Attrs: nounwind
1200; CHECK-NEXT: declare i8* @__kmpc_task_reduction_get_th_data(i32, i8*, i8*)
1201
1202; CHECK: ; Function Attrs: nounwind
1203; CHECK-NEXT: declare i8* @__kmpc_task_reduction_init(i32, i32, i8*)
1204
1205; CHECK: ; Function Attrs: nounwind
1206; CHECK-NEXT: declare i8* @__kmpc_task_reduction_modifier_init(i8*, i32, i32, i32, i8*)
1207
1208; CHECK: ; Function Attrs: nounwind
1209; CHECK-NEXT: declare void @__kmpc_proxy_task_completed_ooo(i8*)
1210
1211; CHECK: ; Function Attrs: cold convergent nounwind
1212; CHECK-NEXT: declare void @__kmpc_barrier_simple_spmd(%struct.ident_t* nocapture nofree readonly, i32)
1213
1214; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind willreturn writeonly
1215; OPTIMISTIC-NEXT: declare dso_local void @omp_set_num_threads(i32)
1216
1217; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind willreturn writeonly
1218; OPTIMISTIC-NEXT: declare dso_local void @omp_set_dynamic(i32)
1219
1220; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind willreturn writeonly
1221; OPTIMISTIC-NEXT: declare dso_local void @omp_set_nested(i32)
1222
1223; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind willreturn writeonly
1224; OPTIMISTIC-NEXT: declare dso_local void @omp_set_max_active_levels(i32)
1225
1226; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind willreturn writeonly
1227; OPTIMISTIC-NEXT: declare dso_local void @omp_set_schedule(i32, i32)
1228
1229; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1230; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_num_threads()
1231
1232; OPTIMISTIC-NOT: Function Attrs
1233; OPTIMISTIC: declare dso_local void @use_int(i32)
1234
1235; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1236; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_dynamic()
1237
1238; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1239; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_nested()
1240
1241; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1242; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_max_threads()
1243
1244; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1245; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_thread_num()
1246
1247; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1248; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_num_procs()
1249
1250; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1251; OPTIMISTIC-NEXT: declare dso_local i32 @omp_in_parallel()
1252
1253; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1254; OPTIMISTIC-NEXT: declare dso_local i32 @omp_in_final()
1255
1256; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1257; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_active_level()
1258
1259; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1260; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_level()
1261
1262; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1263; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_ancestor_thread_num(i32)
1264
1265; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1266; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_team_size(i32)
1267
1268; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1269; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_thread_limit()
1270
1271; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1272; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_max_active_levels()
1273
1274; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1275; OPTIMISTIC-NEXT: declare dso_local void @omp_get_schedule(i32* nocapture writeonly, i32* nocapture writeonly)
1276
1277; OPTIMISTIC-NOT: Function Attrs
1278; OPTIMISTIC: declare dso_local i32 @omp_get_max_task_priority()
1279
1280; OPTIMISTIC-NOT: Function Attrs
1281; OPTIMISTIC: declare dso_local void @omp_init_lock(%struct.omp_lock_t*)
1282
1283; OPTIMISTIC-NOT: Function Attrs
1284; OPTIMISTIC: declare dso_local void @omp_set_lock(%struct.omp_lock_t*)
1285
1286; OPTIMISTIC-NOT: Function Attrs
1287; OPTIMISTIC: declare dso_local void @omp_unset_lock(%struct.omp_lock_t*)
1288
1289; OPTIMISTIC-NOT: Function Attrs
1290; OPTIMISTIC: declare dso_local void @omp_destroy_lock(%struct.omp_lock_t*)
1291
1292; OPTIMISTIC-NOT: Function Attrs
1293; OPTIMISTIC: declare dso_local i32 @omp_test_lock(%struct.omp_lock_t*)
1294
1295; OPTIMISTIC-NOT: Function Attrs
1296; OPTIMISTIC: declare dso_local void @omp_init_nest_lock(%struct.omp_nest_lock_t*)
1297
1298; OPTIMISTIC-NOT: Function Attrs
1299; OPTIMISTIC: declare dso_local void @omp_set_nest_lock(%struct.omp_nest_lock_t*)
1300
1301; OPTIMISTIC-NOT: Function Attrs
1302; OPTIMISTIC: declare dso_local void @omp_unset_nest_lock(%struct.omp_nest_lock_t*)
1303
1304; OPTIMISTIC-NOT: Function Attrs
1305; OPTIMISTIC: declare dso_local void @omp_destroy_nest_lock(%struct.omp_nest_lock_t*)
1306
1307; OPTIMISTIC-NOT: Function Attrs
1308; OPTIMISTIC: declare dso_local i32 @omp_test_nest_lock(%struct.omp_nest_lock_t*)
1309
1310; OPTIMISTIC-NOT: Function Attrs
1311; OPTIMISTIC: declare dso_local void @omp_init_lock_with_hint(%struct.omp_lock_t*, i32)
1312
1313; OPTIMISTIC-NOT: Function Attrs
1314; OPTIMISTIC: declare dso_local void @omp_init_nest_lock_with_hint(%struct.omp_nest_lock_t*, i32)
1315
1316; OPTIMISTIC-NOT: Function Attrs
1317; OPTIMISTIC: declare dso_local double @omp_get_wtime()
1318
1319; OPTIMISTIC-NOT: Function Attrs
1320; OPTIMISTIC: declare dso_local void @use_double(double)
1321
1322; OPTIMISTIC-NOT: Function Attrs
1323; OPTIMISTIC: declare dso_local double @omp_get_wtick()
1324
1325; OPTIMISTIC-NOT: Function Attrs
1326; OPTIMISTIC: declare dso_local i32 @omp_get_default_device()
1327
1328; OPTIMISTIC-NOT: Function Attrs
1329; OPTIMISTIC: declare dso_local void @omp_set_default_device(i32)
1330
1331; OPTIMISTIC-NOT: Function Attrs
1332; OPTIMISTIC: declare dso_local i32 @omp_is_initial_device()
1333
1334; OPTIMISTIC-NOT: Function Attrs
1335; OPTIMISTIC: declare dso_local i32 @omp_get_num_devices()
1336
1337; OPTIMISTIC-NOT: Function Attrs
1338; OPTIMISTIC: declare dso_local i32 @omp_get_num_teams()
1339
1340; OPTIMISTIC-NOT: Function Attrs
1341; OPTIMISTIC: declare dso_local i32 @omp_get_team_num()
1342
1343; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly
1344; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_cancellation()
1345
1346; OPTIMISTIC-NOT: Function Attrs
1347; OPTIMISTIC: declare dso_local i32 @omp_get_initial_device()
1348
1349; OPTIMISTIC-NOT: Function Attrs
1350; OPTIMISTIC: declare dso_local i8* @omp_target_alloc(i64, i32)
1351
1352; OPTIMISTIC-NOT: Function Attrs
1353; OPTIMISTIC: declare dso_local void @use_voidptr(i8*)
1354
1355; OPTIMISTIC-NOT: Function Attrs
1356; OPTIMISTIC: declare dso_local void @omp_target_free(i8*, i32)
1357
1358; OPTIMISTIC-NOT: Function Attrs
1359; OPTIMISTIC: declare dso_local i32 @omp_target_is_present(i8*, i32)
1360
1361; OPTIMISTIC-NOT: Function Attrs
1362; OPTIMISTIC: declare dso_local i32 @omp_target_memcpy(i8*, i8*, i64, i64, i64, i32, i32)
1363
1364; OPTIMISTIC-NOT: Function Attrs
1365; OPTIMISTIC: declare dso_local i32 @omp_target_associate_ptr(i8*, i8*, i64, i64, i32)
1366
1367; OPTIMISTIC-NOT: Function Attrs
1368; OPTIMISTIC: declare dso_local i32 @omp_target_disassociate_ptr(i8*, i32)
1369
1370; OPTIMISTIC-NOT: Function Attrs
1371; OPTIMISTIC: declare dso_local i32 @omp_get_device_num()
1372
1373; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly
1374; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_proc_bind()
1375
1376; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly
1377; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_num_places()
1378
1379; OPTIMISTIC-NOT: Function Attrs
1380; OPTIMISTIC: declare dso_local i32 @omp_get_place_num_procs(i32)
1381
1382; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind
1383; OPTIMISTIC-NEXT: declare dso_local void @omp_get_place_proc_ids(i32, i32* nocapture writeonly)
1384
1385; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly
1386; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_place_num()
1387
1388; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly
1389; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_partition_num_places()
1390
1391; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly
1392; OPTIMISTIC-NEXT: declare dso_local void @omp_get_partition_place_nums(i32*)
1393
1394; OPTIMISTIC-NOT: Function Attrs
1395; OPTIMISTIC: declare dso_local i32 @omp_control_tool(i32, i32, i8*)
1396
1397; OPTIMISTIC-NOT: Function Attrs
1398; OPTIMISTIC: declare dso_local void @omp_destroy_allocator(i64)
1399
1400; OPTIMISTIC-NOT: Function Attrs
1401; OPTIMISTIC: declare dso_local void @omp_set_default_allocator(i64)
1402
1403; OPTIMISTIC-NOT: Function Attrs
1404; OPTIMISTIC: declare dso_local i64 @omp_get_default_allocator()
1405
1406; OPTIMISTIC-NOT: Function Attrs
1407; OPTIMISTIC: declare dso_local i8* @omp_alloc(i64, i64)
1408
1409; OPTIMISTIC-NOT: Function Attrs
1410; OPTIMISTIC: declare dso_local void @omp_free(i8*, i64)
1411
1412; OPTIMISTIC-NOT: Function Attrs
1413; OPTIMISTIC: declare dso_local void @ompc_set_affinity_format(i8*)
1414
1415; OPTIMISTIC-NOT: Function Attrs
1416; OPTIMISTIC: declare dso_local i64 @ompc_get_affinity_format(i8*, i64)
1417
1418; OPTIMISTIC-NOT: Function Attrs
1419; OPTIMISTIC: declare dso_local void @use_sizet(i64)
1420
1421; OPTIMISTIC-NOT: Function Attrs
1422; OPTIMISTIC: declare dso_local void @ompc_display_affinity(i8*)
1423
1424; OPTIMISTIC-NOT: Function Attrs
1425; OPTIMISTIC: declare dso_local i64 @ompc_capture_affinity(i8*, i64, i8*)
1426
1427; OPTIMISTIC-NOT: Function Attrs
1428; OPTIMISTIC: declare dso_local void @omp_fulfill_event(i64)
1429
1430; OPTIMISTIC-NOT: Function Attrs
1431; OPTIMISTIC: declare dso_local i32 @omp_pause_resource(i32, i32)
1432
1433; OPTIMISTIC-NOT: Function Attrs
1434; OPTIMISTIC: declare dso_local i32 @omp_pause_resource_all(i32)
1435
1436; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1437; OPTIMISTIC-NEXT: declare dso_local i32 @omp_get_supported_active_levels()
1438
1439; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind readonly willreturn
1440; OPTIMISTIC-NEXT: declare i32 @__kmpc_global_thread_num(%struct.ident_t* nocapture nofree readonly)
1441
1442; OPTIMISTIC: ; Function Attrs: nounwind
1443; OPTIMISTIC-NEXT: declare void @__kmpc_fork_call(%struct.ident_t* nocapture nofree readonly, i32, void (i32*, i32*, ...)* nocapture nofree readonly, ...)
1444
1445; OPTIMISTIC: ; Function Attrs: convergent nounwind
1446; OPTIMISTIC-NEXT: declare i32 @__kmpc_omp_taskwait(%struct.ident_t* nocapture nofree readonly, i32)
1447
1448; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1449; OPTIMISTIC-NEXT: declare i32 @__kmpc_omp_taskyield(%struct.ident_t* nocapture nofree readonly, i32, i32)
1450
1451; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1452; OPTIMISTIC-NEXT: declare void @__kmpc_push_num_threads(%struct.ident_t* nocapture nofree readonly, i32, i32)
1453
1454; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1455; OPTIMISTIC-NEXT: declare void @__kmpc_push_proc_bind(%struct.ident_t* nocapture nofree readonly, i32, i32)
1456
1457; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1458; OPTIMISTIC-NEXT: declare void @__kmpc_serialized_parallel(%struct.ident_t* nocapture nofree readonly, i32)
1459
1460; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1461; OPTIMISTIC-NEXT: declare void @__kmpc_end_serialized_parallel(%struct.ident_t* nocapture nofree readonly, i32)
1462
1463; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1464; OPTIMISTIC-NEXT: declare i32 @__kmpc_master(%struct.ident_t* nocapture nofree readonly, i32)
1465
1466; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1467; OPTIMISTIC-NEXT: declare void @__kmpc_end_master(%struct.ident_t* nocapture nofree readonly, i32)
1468
1469; OPTIMISTIC: ; Function Attrs: convergent nounwind
1470; OPTIMISTIC-NEXT: declare void @__kmpc_critical(%struct.ident_t* nocapture nofree readonly, i32, [8 x i32]*)
1471
1472; OPTIMISTIC: ; Function Attrs: convergent nounwind
1473; OPTIMISTIC-NEXT: declare void @__kmpc_critical_with_hint(%struct.ident_t* nocapture nofree readonly, i32, [8 x i32]*, i32)
1474
1475; OPTIMISTIC: ; Function Attrs: convergent nounwind
1476; OPTIMISTIC-NEXT: declare void @__kmpc_end_critical(%struct.ident_t* nocapture nofree readonly, i32, [8 x i32]*)
1477
1478; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1479; OPTIMISTIC-NEXT: declare void @__kmpc_begin(%struct.ident_t* nocapture nofree readonly, i32)
1480
1481; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1482; OPTIMISTIC-NEXT: declare void @__kmpc_end(%struct.ident_t* nocapture nofree readonly)
1483
1484; OPTIMISTIC: ; Function Attrs: convergent nounwind
1485; OPTIMISTIC-NEXT: declare i32 @__kmpc_reduce(%struct.ident_t* nocapture nofree readonly, i32, i32, i64, i8* nocapture nofree readonly, void (i8*, i8*)*, [8 x i32]*)
1486
1487; OPTIMISTIC: ; Function Attrs: convergent nounwind
1488; OPTIMISTIC-NEXT: declare i32 @__kmpc_reduce_nowait(%struct.ident_t* nocapture nofree readonly, i32, i32, i64, i8* nocapture nofree readonly, void (i8*, i8*)*, [8 x i32]*)
1489
1490; OPTIMISTIC: ; Function Attrs: convergent nounwind
1491; OPTIMISTIC-NEXT: declare void @__kmpc_end_reduce(%struct.ident_t* nocapture nofree readonly, i32, [8 x i32]*)
1492
1493; OPTIMISTIC: ; Function Attrs: convergent nounwind
1494; OPTIMISTIC-NEXT: declare void @__kmpc_end_reduce_nowait(%struct.ident_t* nocapture nofree readonly, i32, [8 x i32]*)
1495
1496; OPTIMISTIC: ; Function Attrs: convergent nounwind
1497; OPTIMISTIC-NEXT: declare void @__kmpc_ordered(%struct.ident_t* nocapture nofree readonly, i32)
1498
1499; OPTIMISTIC: ; Function Attrs: convergent nounwind
1500; OPTIMISTIC-NEXT: declare void @__kmpc_end_ordered(%struct.ident_t* nocapture nofree readonly, i32)
1501
1502; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1503; OPTIMISTIC-NEXT: declare void @__kmpc_for_static_init_4(%struct.ident_t* nocapture nofree readonly, i32, i32, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32, i32)
1504
1505; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1506; OPTIMISTIC-NEXT: declare void @__kmpc_for_static_init_4u(%struct.ident_t* nocapture nofree readonly, i32, i32, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32, i32)
1507
1508; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1509; OPTIMISTIC-NEXT: declare void @__kmpc_for_static_init_8(%struct.ident_t* nocapture nofree readonly, i32, i32, i32* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64, i64)
1510
1511; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1512; OPTIMISTIC-NEXT: declare void @__kmpc_for_static_init_8u(%struct.ident_t* nocapture nofree readonly, i32, i32, i32* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64, i64)
1513
1514; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1515; OPTIMISTIC-NEXT: declare void @__kmpc_for_static_fini(%struct.ident_t* nocapture nofree readonly, i32)
1516
1517; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1518; OPTIMISTIC-NEXT: declare void @__kmpc_team_static_init_4(%struct.ident_t* nocapture nofree readonly, i32, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32, i32)
1519
1520; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1521; OPTIMISTIC-NEXT: declare void @__kmpc_team_static_init_4u(%struct.ident_t* nocapture nofree readonly, i32, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32, i32)
1522
1523; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1524; OPTIMISTIC-NEXT: declare void @__kmpc_team_static_init_8(%struct.ident_t* nocapture nofree readonly, i32, i32* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64, i64)
1525
1526; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1527; OPTIMISTIC-NEXT: declare void @__kmpc_team_static_init_8u(%struct.ident_t* nocapture nofree readonly, i32, i32* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64, i64)
1528
1529; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1530; OPTIMISTIC-NEXT: declare void @__kmpc_dist_for_static_init_4(%struct.ident_t* nocapture nofree readonly, i32, i32, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32, i32)
1531
1532; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1533; OPTIMISTIC-NEXT: declare void @__kmpc_dist_for_static_init_4u(%struct.ident_t* nocapture nofree readonly, i32, i32, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32, i32)
1534
1535; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1536; OPTIMISTIC-NEXT: declare void @__kmpc_dist_for_static_init_8(%struct.ident_t* nocapture nofree readonly, i32, i32, i32* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64, i64)
1537
1538; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1539; OPTIMISTIC-NEXT: declare void @__kmpc_dist_for_static_init_8u(%struct.ident_t* nocapture nofree readonly, i32, i32, i32* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64, i64)
1540
1541; OPTIMISTIC: ; Function Attrs: convergent nounwind
1542; OPTIMISTIC-NEXT: declare i32 @__kmpc_single(%struct.ident_t* nocapture nofree readonly, i32)
1543
1544; OPTIMISTIC: ; Function Attrs: convergent nounwind
1545; OPTIMISTIC-NEXT: declare void @__kmpc_end_single(%struct.ident_t* nocapture nofree readonly, i32)
1546
1547; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1548; OPTIMISTIC-NEXT: declare noalias i8* @__kmpc_omp_task_alloc(%struct.ident_t* nocapture nofree readonly, i32, i32, i64, i64, i32 (i32, i8*)* nocapture nofree readonly)
1549
1550; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1551; OPTIMISTIC-NEXT: declare i32 @__kmpc_omp_task(%struct.ident_t* nocapture nofree readonly, i32, i8*)
1552
1553; OPTIMISTIC: ; Function Attrs: convergent nounwind
1554; OPTIMISTIC-NEXT: declare void @__kmpc_end_taskgroup(%struct.ident_t* nocapture nofree readonly, i32)
1555
1556; OPTIMISTIC: ; Function Attrs: convergent nounwind
1557; OPTIMISTIC-NEXT: declare void @__kmpc_taskgroup(%struct.ident_t* nocapture nofree readonly, i32)
1558
1559; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1560; OPTIMISTIC-NEXT: declare void @__kmpc_dist_dispatch_init_4(%struct.ident_t* nocapture nofree readonly, i32, i32, i32* nocapture nofree, i32, i32, i32, i32)
1561
1562; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1563; OPTIMISTIC-NEXT: declare void @__kmpc_dist_dispatch_init_4u(%struct.ident_t* nocapture nofree readonly, i32, i32, i32* nocapture nofree, i32, i32, i32, i32)
1564
1565; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1566; OPTIMISTIC-NEXT: declare void @__kmpc_dist_dispatch_init_8(%struct.ident_t* nocapture nofree readonly, i32, i32, i32* nocapture nofree, i64, i64, i64, i64)
1567
1568; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1569; OPTIMISTIC-NEXT: declare void @__kmpc_dist_dispatch_init_8u(%struct.ident_t* nocapture nofree readonly, i32, i32, i32* nocapture nofree, i64, i64, i64, i64)
1570
1571; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1572; OPTIMISTIC-NEXT: declare void @__kmpc_dispatch_init_4(%struct.ident_t* nocapture nofree readonly, i32, i32, i32, i32, i32, i32)
1573
1574; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1575; OPTIMISTIC-NEXT: declare void @__kmpc_dispatch_init_4u(%struct.ident_t* nocapture nofree readonly, i32, i32, i32, i32, i32, i32)
1576
1577; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1578; OPTIMISTIC-NEXT: declare void @__kmpc_dispatch_init_8(%struct.ident_t* nocapture nofree readonly, i32, i32, i64, i64, i64, i64)
1579
1580; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1581; OPTIMISTIC-NEXT: declare void @__kmpc_dispatch_init_8u(%struct.ident_t* nocapture nofree readonly, i32, i32, i64, i64, i64, i64)
1582
1583; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1584; OPTIMISTIC-NEXT: declare i32 @__kmpc_dispatch_next_4(%struct.ident_t* nocapture nofree readonly, i32, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree)
1585
1586; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1587; OPTIMISTIC-NEXT: declare i32 @__kmpc_dispatch_next_4u(%struct.ident_t* nocapture nofree readonly, i32, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree, i32* nocapture nofree)
1588
1589; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1590; OPTIMISTIC-NEXT: declare i32 @__kmpc_dispatch_next_8(%struct.ident_t* nocapture nofree readonly, i32, i32* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree)
1591
1592; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1593; OPTIMISTIC-NEXT: declare i32 @__kmpc_dispatch_next_8u(%struct.ident_t* nocapture nofree readonly, i32, i32* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree, i64* nocapture nofree)
1594
1595; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1596; OPTIMISTIC-NEXT: declare void @__kmpc_dispatch_fini_4(%struct.ident_t* nocapture nofree readonly, i32)
1597
1598; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1599; OPTIMISTIC-NEXT: declare void @__kmpc_dispatch_fini_4u(%struct.ident_t* nocapture nofree readonly, i32)
1600
1601; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1602; OPTIMISTIC-NEXT: declare void @__kmpc_dispatch_fini_8(%struct.ident_t* nocapture nofree readonly, i32)
1603
1604; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1605; OPTIMISTIC-NEXT: declare void @__kmpc_dispatch_fini_8u(%struct.ident_t* nocapture nofree readonly, i32)
1606
1607; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1608; OPTIMISTIC-NEXT: declare void @__kmpc_omp_task_begin_if0(%struct.ident_t* nocapture nofree readonly, i32, i8*)
1609
1610; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1611; OPTIMISTIC-NEXT: declare void @__kmpc_omp_task_complete_if0(%struct.ident_t* nocapture nofree readonly, i32, i8*)
1612
1613; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1614; OPTIMISTIC-NEXT: declare i32 @__kmpc_omp_task_with_deps(%struct.ident_t* nocapture nofree readonly, i32, i8*, i32, i8* nocapture nofree readonly, i32, i8* nocapture nofree readonly)
1615
1616; OPTIMISTIC: ; Function Attrs: convergent nounwind
1617; OPTIMISTIC-NEXT: declare void @__kmpc_omp_wait_deps(%struct.ident_t* nocapture nofree readonly, i32, i32, i8* nocapture nofree readonly, i32, i8*)
1618
1619; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1620; OPTIMISTIC-NEXT: declare i32 @__kmpc_cancellationpoint(%struct.ident_t* nocapture nofree readonly, i32, i32)
1621
1622; OPTIMISTIC: ; Function Attrs: inaccessiblemem_or_argmemonly nofree nosync nounwind willreturn
1623; OPTIMISTIC-NEXT: declare void @__kmpc_push_num_teams(%struct.ident_t* nocapture nofree readonly, i32, i32, i32)
1624
1625; OPTIMISTIC: ; Function Attrs: nounwind
1626; OPTIMISTIC-NEXT: declare void @__kmpc_fork_teams(%struct.ident_t* nocapture nofree readonly, i32, void (i32*, i32*, ...)* nocapture nofree readonly, ...)
1627
1628; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1629; OPTIMISTIC-NEXT: declare void @__kmpc_taskloop(%struct.ident_t* nocapture nofree readonly, i32, i8*, i32, i64* nocapture nofree, i64* nocapture nofree, i64, i32, i32, i64, i8*)
1630
1631; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1632; OPTIMISTIC-NEXT: declare noalias i8* @__kmpc_omp_target_task_alloc(%struct.ident_t* nocapture nofree readonly, i32, i32, i64, i64, i32 (i32, i8*)* nocapture nofree readonly, i64)
1633
1634; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1635; OPTIMISTIC-NEXT: declare noalias i8* @__kmpc_taskred_modifier_init(%struct.ident_t* nocapture nofree readonly, i32, i32, i32, i8*)
1636
1637; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1638; OPTIMISTIC-NEXT: declare i8* @__kmpc_taskred_init(i32, i32, i8*)
1639
1640; OPTIMISTIC: ; Function Attrs: convergent nounwind
1641; OPTIMISTIC-NEXT: declare void @__kmpc_task_reduction_modifier_fini(%struct.ident_t* nocapture nofree readonly, i32, i32)
1642
1643; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1644; OPTIMISTIC-NEXT: declare void @__kmpc_copyprivate(%struct.ident_t* nocapture nofree readonly, i32, i64, i8* nocapture nofree readonly, void (i8*, i8*)*, i32)
1645
1646; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1647; OPTIMISTIC-NEXT: declare noalias i8* @__kmpc_threadprivate_cached(%struct.ident_t* nocapture nofree readonly, i32, i8*, i64, i8***)
1648
1649; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1650; OPTIMISTIC-NEXT: declare void @__kmpc_threadprivate_register(%struct.ident_t* nocapture nofree readonly, i8*, i8* (i8*)* nocapture nofree readonly, i8* (i8*, i8*)* nocapture nofree readonly, void (i8*)* nocapture nofree readonly)
1651
1652; OPTIMISTIC: ; Function Attrs: convergent nounwind
1653; OPTIMISTIC-NEXT: declare void @__kmpc_doacross_init(%struct.ident_t* nocapture nofree readonly, i32, i32, i8*)
1654
1655; OPTIMISTIC: ; Function Attrs: convergent nounwind
1656; OPTIMISTIC-NEXT: declare void @__kmpc_doacross_wait(%struct.ident_t* nocapture nofree readonly, i32, i64* nocapture nofree readonly)
1657
1658; OPTIMISTIC: ; Function Attrs: convergent nounwind
1659; OPTIMISTIC-NEXT: declare void @__kmpc_doacross_post(%struct.ident_t* nocapture nofree readonly, i32, i64* nocapture nofree readonly)
1660
1661; OPTIMISTIC: ; Function Attrs: convergent nounwind
1662; OPTIMISTIC-NEXT: declare void @__kmpc_doacross_fini(%struct.ident_t* nocapture nofree readonly, i32)
1663
1664; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1665; OPTIMISTIC-NEXT: declare noalias i8* @__kmpc_alloc(i32, i64, i8*)
1666
1667; OPTIMISTIC: ; Function Attrs: nosync nounwind willreturn
1668; OPTIMISTIC-NEXT: declare void @__kmpc_free(i32, i8*, i8*)
1669
1670; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1671; OPTIMISTIC-NEXT: declare noalias i8* @__kmpc_init_allocator(i32, i8*, i32, i8*)
1672
1673; OPTIMISTIC: ; Function Attrs: nosync nounwind willreturn
1674; OPTIMISTIC-NEXT: declare void @__kmpc_destroy_allocator(i32, i8*)
1675
1676; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind willreturn writeonly
1677; OPTIMISTIC-NEXT: declare void @__kmpc_push_target_tripcount_mapper(%struct.ident_t*, i64, i64)
1678
1679; OPTIMISTIC: ; Function Attrs: convergent nounwind
1680; OPTIMISTIC-NEXT: declare i32 @__kmpc_warp_active_thread_mask()
1681
1682; OPTIMISTIC: ; Function Attrs: convergent nounwind
1683; OPTIMISTIC-NEXT: declare void @__kmpc_syncwarp(i32)
1684
1685; OPTIMISTIC: ; Function Attrs: nounwind
1686; OPTIMISTIC-NEXT: declare i32 @__tgt_target_mapper(%struct.ident_t*, i64, i8*, i32, i8**, i8**, i64*, i64*, i8**, i8**)
1687
1688; OPTIMISTIC: ; Function Attrs: nounwind
1689; OPTIMISTIC-NEXT: declare i32 @__tgt_target_nowait_mapper(%struct.ident_t*, i64, i8*, i32, i8**, i8**, i64*, i64*, i8**, i8**, i32, i8*, i32, i8*)
1690
1691; OPTIMISTIC: ; Function Attrs: nounwind
1692; OPTIMISTIC-NEXT: declare i32 @__tgt_target_teams_mapper(%struct.ident_t*, i64, i8*, i32, i8**, i8**, i64*, i64*, i8**, i8**, i32, i32)
1693
1694; OPTIMISTIC: ; Function Attrs: nounwind
1695; OPTIMISTIC-NEXT: declare i32 @__tgt_target_teams_nowait_mapper(%struct.ident_t*, i64, i8*, i32, i8**, i8**, i64*, i64*, i8**, i8**, i32, i32, i32, i8*, i32, i8*)
1696
1697; OPTIMISTIC: ; Function Attrs: nounwind
1698; OPTIMISTIC-NEXT: declare void @__tgt_register_requires(i64)
1699
1700; OPTIMISTIC: ; Function Attrs: nounwind
1701; OPTIMISTIC-NEXT: declare void @__tgt_target_data_begin_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
1702
1703; OPTIMISTIC: ; Function Attrs: nounwind
1704; OPTIMISTIC-NEXT: declare void @__tgt_target_data_begin_nowait_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
1705
1706; OPTIMISTIC: ; Function Attrs: nounwind
1707; OPTIMISTIC-NEXT: declare void @__tgt_target_data_end_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
1708
1709; OPTIMISTIC: ; Function Attrs: nounwind
1710; OPTIMISTIC-NEXT: declare void @__tgt_target_data_end_nowait_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
1711
1712; OPTIMISTIC: ; Function Attrs: nounwind
1713; OPTIMISTIC-NEXT: declare void @__tgt_target_data_update_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
1714
1715; OPTIMISTIC: ; Function Attrs: nounwind
1716; OPTIMISTIC-NEXT: declare void @__tgt_target_data_update_nowait_mapper(%struct.ident_t*, i64, i32, i8**, i8**, i64*, i64*, i8**, i8**)
1717
1718; OPTIMISTIC: ; Function Attrs: nounwind
1719; OPTIMISTIC-NEXT: declare i64 @__tgt_mapper_num_components(i8*)
1720
1721; OPTIMISTIC: ; Function Attrs: nounwind
1722; OPTIMISTIC-NEXT: declare void @__tgt_push_mapper_component(i8*, i8*, i8*, i64, i64, i8*)
1723
1724; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1725; OPTIMISTIC-NEXT: declare noalias i8* @__kmpc_task_allow_completion_event(%struct.ident_t* nocapture nofree readonly, i32, i8*)
1726
1727; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1728; OPTIMISTIC-NEXT: declare noalias i8* @__kmpc_task_reduction_get_th_data(i32, i8*, i8*)
1729
1730; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1731; OPTIMISTIC-NEXT: declare noalias i8* @__kmpc_task_reduction_init(i32, i32, i8*)
1732
1733; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1734; OPTIMISTIC-NEXT: declare noalias i8* @__kmpc_task_reduction_modifier_init(i8*, i32, i32, i32, i8*)
1735
1736; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn
1737; OPTIMISTIC-NEXT: declare void @__kmpc_proxy_task_completed_ooo(i8*)
1738
1739; OPTIMISTIC: ; Function Attrs: cold convergent nounwind
1740; OPTIMISTIC-NEXT: declare void @__kmpc_barrier_simple_spmd(%struct.ident_t* nocapture nofree readonly, i32)
1741
1742!llvm.module.flags = !{!0}
1743
1744!0 = !{i32 7, !"openmp", i32 50}
1745