1; RUN: llc -march=amdgcn -verify-machineinstrs < %s | FileCheck -check-prefix=GCN -check-prefix=SI %s
2; RUN: llc -march=amdgcn -mcpu=tonga -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefix=GCN -check-prefix=VI %s
3
4; Since this intrinsic is exposed as a constant after isel, use it to
5; defeat the DAG's compare with constant canonicalizations.
6declare i32 @llvm.amdgcn.groupstaticsize() #1
7
8@lds = addrspace(3) global [512 x i32] undef, align 4
9
10; GCN-LABEL: {{^}}br_scc_eq_i32_inline_imm:
11; GCN: s_cmp_eq_u32 s{{[0-9]+}}, 4{{$}}
12define amdgpu_kernel void @br_scc_eq_i32_inline_imm(i32 %cond, i32 addrspace(1)* %out) #0 {
13entry:
14  %cmp0 = icmp eq i32 %cond, 4
15  br i1 %cmp0, label %endif, label %if
16
17if:
18  call void asm sideeffect "", ""()
19  br label %endif
20
21endif:
22  store volatile i32 1, i32 addrspace(1)* %out
23  ret void
24}
25
26; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_max:
27; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x7fff{{$}}
28define amdgpu_kernel void @br_scc_eq_i32_simm16_max(i32 %cond, i32 addrspace(1)* %out) #0 {
29entry:
30  %cmp0 = icmp eq i32 %cond, 32767
31  br i1 %cmp0, label %endif, label %if
32
33if:
34  call void asm sideeffect "", ""()
35  br label %endif
36
37endif:
38  store volatile i32 1, i32 addrspace(1)* %out
39  ret void
40}
41
42; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_max_p1:
43; GCN: s_cmpk_eq_u32 s{{[0-9]+}}, 0x8000{{$}}
44define amdgpu_kernel void @br_scc_eq_i32_simm16_max_p1(i32 %cond, i32 addrspace(1)* %out) #0 {
45entry:
46  %cmp0 = icmp eq i32 %cond, 32768
47  br i1 %cmp0, label %endif, label %if
48
49if:
50  call void asm sideeffect "", ""()
51  br label %endif
52
53endif:
54  store volatile i32 1, i32 addrspace(1)* %out
55  ret void
56}
57
58; GCN-LABEL: {{^}}br_scc_ne_i32_simm16_max_p1:
59; GCN: s_cmpk_lg_u32 s{{[0-9]+}}, 0x8000{{$}}
60define amdgpu_kernel void @br_scc_ne_i32_simm16_max_p1(i32 %cond, i32 addrspace(1)* %out) #0 {
61entry:
62  %cmp0 = icmp ne i32 %cond, 32768
63  br i1 %cmp0, label %endif, label %if
64
65if:
66  call void asm sideeffect "", ""()
67  br label %endif
68
69endif:
70  store volatile i32 1, i32 addrspace(1)* %out
71  ret void
72}
73
74; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_min:
75; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x8000{{$}}
76define amdgpu_kernel void @br_scc_eq_i32_simm16_min(i32 %cond, i32 addrspace(1)* %out) #0 {
77entry:
78  %cmp0 = icmp eq i32 %cond, -32768
79  br i1 %cmp0, label %endif, label %if
80
81if:
82  call void asm sideeffect "", ""()
83  br label %endif
84
85endif:
86  store volatile i32 1, i32 addrspace(1)* %out
87  ret void
88}
89
90; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_min_m1:
91; GCN: s_cmp_eq_u32 s{{[0-9]+}}, 0xffff7fff{{$}}
92define amdgpu_kernel void @br_scc_eq_i32_simm16_min_m1(i32 %cond, i32 addrspace(1)* %out) #0 {
93entry:
94  %cmp0 = icmp eq i32 %cond, -32769
95  br i1 %cmp0, label %endif, label %if
96
97if:
98  call void asm sideeffect "", ""()
99  br label %endif
100
101endif:
102  store volatile i32 1, i32 addrspace(1)* %out
103  ret void
104}
105
106; GCN-LABEL: {{^}}br_scc_eq_i32_uimm15_max:
107; GCN: s_cmpk_eq_u32 s{{[0-9]+}}, 0xffff{{$}}
108define amdgpu_kernel void @br_scc_eq_i32_uimm15_max(i32 %cond, i32 addrspace(1)* %out) #0 {
109entry:
110  %cmp0 = icmp eq i32 %cond, 65535
111  br i1 %cmp0, label %endif, label %if
112
113if:
114  call void asm sideeffect "", ""()
115  br label %endif
116
117endif:
118  store volatile i32 1, i32 addrspace(1)* %out
119  ret void
120}
121
122; GCN-LABEL: {{^}}br_scc_eq_i32_uimm16_max:
123; GCN: s_cmpk_eq_u32 s{{[0-9]+}}, 0xffff{{$}}
124define amdgpu_kernel void @br_scc_eq_i32_uimm16_max(i32 %cond, i32 addrspace(1)* %out) #0 {
125entry:
126  %cmp0 = icmp eq i32 %cond, 65535
127  br i1 %cmp0, label %endif, label %if
128
129if:
130  call void asm sideeffect "", ""()
131  br label %endif
132
133endif:
134  store volatile i32 1, i32 addrspace(1)* %out
135  ret void
136}
137
138; GCN-LABEL: {{^}}br_scc_eq_i32_uimm16_max_p1:
139; GCN: s_cmp_eq_u32 s{{[0-9]+}}, 0x10000{{$}}
140define amdgpu_kernel void @br_scc_eq_i32_uimm16_max_p1(i32 %cond, i32 addrspace(1)* %out) #0 {
141entry:
142  %cmp0 = icmp eq i32 %cond, 65536
143  br i1 %cmp0, label %endif, label %if
144
145if:
146  call void asm sideeffect "", ""()
147  br label %endif
148
149endif:
150  store volatile i32 1, i32 addrspace(1)* %out
151  ret void
152}
153
154
155; GCN-LABEL: {{^}}br_scc_eq_i32:
156; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x41{{$}}
157define amdgpu_kernel void @br_scc_eq_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
158entry:
159  %cmp0 = icmp eq i32 %cond, 65
160  br i1 %cmp0, label %endif, label %if
161
162if:
163  call void asm sideeffect "", ""()
164  br label %endif
165
166endif:
167  store volatile i32 1, i32 addrspace(1)* %out
168  ret void
169}
170
171; GCN-LABEL: {{^}}br_scc_ne_i32:
172; GCN: s_cmpk_lg_i32 s{{[0-9]+}}, 0x41{{$}}
173define amdgpu_kernel void @br_scc_ne_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
174entry:
175  %cmp0 = icmp ne i32 %cond, 65
176  br i1 %cmp0, label %endif, label %if
177
178if:
179  call void asm sideeffect "", ""()
180  br label %endif
181
182endif:
183  store volatile i32 1, i32 addrspace(1)* %out
184  ret void
185}
186
187; GCN-LABEL: {{^}}br_scc_sgt_i32:
188; GCN: s_cmpk_gt_i32 s{{[0-9]+}}, 0x41{{$}}
189define amdgpu_kernel void @br_scc_sgt_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
190entry:
191  %cmp0 = icmp sgt i32 %cond, 65
192  br i1 %cmp0, label %endif, label %if
193
194if:
195  call void asm sideeffect "", ""()
196  br label %endif
197
198endif:
199  store volatile i32 1, i32 addrspace(1)* %out
200  ret void
201}
202
203; GCN-LABEL: {{^}}br_scc_sgt_i32_simm16_max:
204; GCN: s_cmpk_gt_i32 s{{[0-9]+}}, 0x7fff{{$}}
205define amdgpu_kernel void @br_scc_sgt_i32_simm16_max(i32 %cond, i32 addrspace(1)* %out) #0 {
206entry:
207  %cmp0 = icmp sgt i32 %cond, 32767
208  br i1 %cmp0, label %endif, label %if
209
210if:
211  call void asm sideeffect "", ""()
212  br label %endif
213
214endif:
215  store volatile i32 1, i32 addrspace(1)* %out
216  ret void
217}
218
219; GCN-LABEL: {{^}}br_scc_sgt_i32_simm16_max_p1:
220; GCN: s_cmp_gt_i32 s{{[0-9]+}}, 0x8000{{$}}
221define amdgpu_kernel void @br_scc_sgt_i32_simm16_max_p1(i32 %cond, i32 addrspace(1)* %out) #0 {
222entry:
223  %cmp0 = icmp sgt i32 %cond, 32768
224  br i1 %cmp0, label %endif, label %if
225
226if:
227  call void asm sideeffect "", ""()
228  br label %endif
229
230endif:
231  store volatile i32 1, i32 addrspace(1)* %out
232  ret void
233}
234
235; GCN-LABEL: {{^}}br_scc_sge_i32:
236; GCN: s_cmpk_ge_i32 s{{[0-9]+}}, 0x800{{$}}
237define amdgpu_kernel void @br_scc_sge_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
238entry:
239  %size = call i32 @llvm.amdgcn.groupstaticsize()
240  %cmp0 = icmp sge i32 %cond, %size
241  br i1 %cmp0, label %endif, label %if
242
243if:
244  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
245  br label %endif
246
247endif:
248  store volatile i32 1, i32 addrspace(1)* %out
249  ret void
250}
251
252; GCN-LABEL: {{^}}br_scc_slt_i32:
253; GCN: s_cmpk_lt_i32 s{{[0-9]+}}, 0x41{{$}}
254define amdgpu_kernel void @br_scc_slt_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
255entry:
256  %cmp0 = icmp slt i32 %cond, 65
257  br i1 %cmp0, label %endif, label %if
258
259if:
260  call void asm sideeffect "", ""()
261  br label %endif
262
263endif:
264  store volatile i32 1, i32 addrspace(1)* %out
265  ret void
266}
267
268; GCN-LABEL: {{^}}br_scc_sle_i32:
269; GCN: s_cmpk_le_i32 s{{[0-9]+}}, 0x800{{$}}
270define amdgpu_kernel void @br_scc_sle_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
271entry:
272  %size = call i32 @llvm.amdgcn.groupstaticsize()
273  %cmp0 = icmp sle i32 %cond, %size
274  br i1 %cmp0, label %endif, label %if
275
276if:
277  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
278  br label %endif
279
280endif:
281  store volatile i32 1, i32 addrspace(1)* %out
282  ret void
283}
284
285; GCN-LABEL: {{^}}br_scc_ugt_i32:
286; GCN: s_cmpk_gt_u32 s{{[0-9]+}}, 0x800{{$}}
287define amdgpu_kernel void @br_scc_ugt_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
288entry:
289  %size = call i32 @llvm.amdgcn.groupstaticsize()
290  %cmp0 = icmp ugt i32 %cond, %size
291  br i1 %cmp0, label %endif, label %if
292
293if:
294  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
295  br label %endif
296
297endif:
298  store volatile i32 1, i32 addrspace(1)* %out
299  ret void
300}
301
302; GCN-LABEL: {{^}}br_scc_uge_i32:
303; GCN: s_cmpk_ge_u32 s{{[0-9]+}}, 0x800{{$}}
304define amdgpu_kernel void @br_scc_uge_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
305entry:
306  %size = call i32 @llvm.amdgcn.groupstaticsize()
307  %cmp0 = icmp uge i32 %cond, %size
308  br i1 %cmp0, label %endif, label %if
309
310if:
311  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
312  br label %endif
313
314endif:
315  store volatile i32 1, i32 addrspace(1)* %out
316  ret void
317}
318
319; GCN-LABEL: {{^}}br_scc_ult_i32:
320; GCN: s_cmpk_lt_u32 s{{[0-9]+}}, 0x41{{$}}
321define amdgpu_kernel void @br_scc_ult_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
322entry:
323  %cmp0 = icmp ult i32 %cond, 65
324  br i1 %cmp0, label %endif, label %if
325
326if:
327  call void asm sideeffect "", ""()
328  br label %endif
329
330endif:
331  store volatile i32 1, i32 addrspace(1)* %out
332  ret void
333}
334
335; GCN-LABEL: {{^}}br_scc_ult_i32_min_simm16:
336; GCN: s_cmp_lt_u32 s2, 0xffff8000
337define amdgpu_kernel void @br_scc_ult_i32_min_simm16(i32 %cond, i32 addrspace(1)* %out) #0 {
338entry:
339  %cmp0 = icmp ult i32 %cond, -32768
340  br i1 %cmp0, label %endif, label %if
341
342if:
343  call void asm sideeffect "", ""()
344  br label %endif
345
346endif:
347  store volatile i32 1, i32 addrspace(1)* %out
348  ret void
349}
350
351; GCN-LABEL: {{^}}br_scc_ult_i32_min_simm16_m1:
352; GCN: s_cmp_lt_u32 s{{[0-9]+}}, 0xffff7fff{{$}}
353define amdgpu_kernel void @br_scc_ult_i32_min_simm16_m1(i32 %cond, i32 addrspace(1)* %out) #0 {
354entry:
355  %cmp0 = icmp ult i32 %cond, -32769
356  br i1 %cmp0, label %endif, label %if
357
358if:
359  call void asm sideeffect "", ""()
360  br label %endif
361
362endif:
363  store volatile i32 1, i32 addrspace(1)* %out
364  ret void
365}
366
367; GCN-LABEL: {{^}}br_scc_ule_i32:
368; GCN: s_cmpk_le_u32 s{{[0-9]+}}, 0x800{{$}}
369define amdgpu_kernel void @br_scc_ule_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
370entry:
371  %size = call i32 @llvm.amdgcn.groupstaticsize()
372  %cmp0 = icmp ule i32 %cond, %size
373  br i1 %cmp0, label %endif, label %if
374
375if:
376  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
377  br label %endif
378
379endif:
380  store volatile i32 1, i32 addrspace(1)* %out
381  ret void
382}
383
384; GCN-LABEL: {{^}}commute_br_scc_eq_i32:
385; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x800{{$}}
386define amdgpu_kernel void @commute_br_scc_eq_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
387entry:
388  %size = call i32 @llvm.amdgcn.groupstaticsize()
389  %cmp0 = icmp eq i32 %size, %cond
390  br i1 %cmp0, label %endif, label %if
391
392if:
393  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
394  br label %endif
395
396endif:
397  store volatile i32 1, i32 addrspace(1)* %out
398  ret void
399}
400
401; GCN-LABEL: {{^}}commute_br_scc_ne_i32:
402; GCN: s_cmpk_lg_i32 s{{[0-9]+}}, 0x800{{$}}
403define amdgpu_kernel void @commute_br_scc_ne_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
404entry:
405  %size = call i32 @llvm.amdgcn.groupstaticsize()
406  %cmp0 = icmp ne i32 %size, %cond
407  br i1 %cmp0, label %endif, label %if
408
409if:
410  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
411  br label %endif
412
413endif:
414  store volatile i32 1, i32 addrspace(1)* %out
415  ret void
416}
417
418; GCN-LABEL: {{^}}commute_br_scc_sgt_i32:
419; GCN: s_cmpk_lt_i32 s{{[0-9]+}}, 0x800{{$}}
420define amdgpu_kernel void @commute_br_scc_sgt_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
421entry:
422  %size = call i32 @llvm.amdgcn.groupstaticsize()
423  %cmp0 = icmp sgt i32 %size, %cond
424  br i1 %cmp0, label %endif, label %if
425
426if:
427  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
428  br label %endif
429
430endif:
431  store volatile i32 1, i32 addrspace(1)* %out
432  ret void
433}
434
435; GCN-LABEL: {{^}}commute_br_scc_sge_i32:
436; GCN: s_cmpk_le_i32 s{{[0-9]+}}, 0x800{{$}}
437define amdgpu_kernel void @commute_br_scc_sge_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
438entry:
439  %size = call i32 @llvm.amdgcn.groupstaticsize()
440  %cmp0 = icmp sge i32 %size, %cond
441  br i1 %cmp0, label %endif, label %if
442
443if:
444  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
445  br label %endif
446
447endif:
448  store volatile i32 1, i32 addrspace(1)* %out
449  ret void
450}
451
452; GCN-LABEL: {{^}}commute_br_scc_slt_i32:
453; GCN: s_cmpk_gt_i32 s{{[0-9]+}}, 0x800{{$}}
454define amdgpu_kernel void @commute_br_scc_slt_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
455entry:
456  %size = call i32 @llvm.amdgcn.groupstaticsize()
457  %cmp0 = icmp slt i32 %size, %cond
458  br i1 %cmp0, label %endif, label %if
459
460if:
461  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
462  br label %endif
463
464endif:
465  store volatile i32 1, i32 addrspace(1)* %out
466  ret void
467}
468
469; GCN-LABEL: {{^}}commute_br_scc_sle_i32:
470; GCN: s_cmpk_ge_i32 s{{[0-9]+}}, 0x800{{$}}
471define amdgpu_kernel void @commute_br_scc_sle_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
472entry:
473  %size = call i32 @llvm.amdgcn.groupstaticsize()
474  %cmp0 = icmp sle i32 %size, %cond
475  br i1 %cmp0, label %endif, label %if
476
477if:
478  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
479  br label %endif
480
481endif:
482  store volatile i32 1, i32 addrspace(1)* %out
483  ret void
484}
485
486; GCN-LABEL: {{^}}commute_br_scc_ugt_i32:
487; GCN: s_cmpk_lt_u32 s{{[0-9]+}}, 0x800{{$}}
488define amdgpu_kernel void @commute_br_scc_ugt_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
489entry:
490  %size = call i32 @llvm.amdgcn.groupstaticsize()
491  %cmp0 = icmp ugt i32 %size, %cond
492  br i1 %cmp0, label %endif, label %if
493
494if:
495  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
496  br label %endif
497
498endif:
499  store volatile i32 1, i32 addrspace(1)* %out
500  ret void
501}
502
503; GCN-LABEL: {{^}}commute_br_scc_uge_i32:
504; GCN: s_cmpk_le_u32 s{{[0-9]+}}, 0x800{{$}}
505define amdgpu_kernel void @commute_br_scc_uge_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
506entry:
507  %size = call i32 @llvm.amdgcn.groupstaticsize()
508  %cmp0 = icmp uge i32 %size, %cond
509  br i1 %cmp0, label %endif, label %if
510
511if:
512  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
513  br label %endif
514
515endif:
516  store volatile i32 1, i32 addrspace(1)* %out
517  ret void
518}
519
520; GCN-LABEL: {{^}}commute_br_scc_ult_i32:
521; GCN: s_cmpk_gt_u32 s{{[0-9]+}}, 0x800{{$}}
522define amdgpu_kernel void @commute_br_scc_ult_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
523entry:
524  %size = call i32 @llvm.amdgcn.groupstaticsize()
525  %cmp0 = icmp ult i32 %size, %cond
526  br i1 %cmp0, label %endif, label %if
527
528if:
529  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
530  br label %endif
531
532endif:
533  store volatile i32 1, i32 addrspace(1)* %out
534  ret void
535}
536
537; GCN-LABEL: {{^}}commute_br_scc_ule_i32:
538; GCN: s_cmpk_ge_u32 s{{[0-9]+}}, 0x800{{$}}
539define amdgpu_kernel void @commute_br_scc_ule_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
540entry:
541  %size = call i32 @llvm.amdgcn.groupstaticsize()
542  %cmp0 = icmp ule i32 %size, %cond
543  br i1 %cmp0, label %endif, label %if
544
545if:
546  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
547  br label %endif
548
549endif:
550  store volatile i32 1, i32 addrspace(1)* %out
551  ret void
552}
553
554; GCN-LABEL: {{^}}br_scc_ult_i32_non_u16:
555; GCN: s_cmp_lt_u32 s2, 0xfffff7ff
556define amdgpu_kernel void @br_scc_ult_i32_non_u16(i32 %cond, i32 addrspace(1)* %out) #0 {
557entry:
558  %size = call i32 @llvm.amdgcn.groupstaticsize()
559  %not.size = xor i32 %size, -1
560  %cmp0 = icmp ult i32 %cond, %not.size
561  br i1 %cmp0, label %endif, label %if
562
563if:
564  call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
565  br label %endif
566
567endif:
568  store volatile i32 1, i32 addrspace(1)* %out
569  ret void
570}
571
572; GCN-LABEL: {{^}}br_scc_eq_i64_inline_imm:
573; VI: s_cmp_eq_u64 s{{\[[0-9]+:[0-9]+\]}}, 4
574
575; SI: v_cmp_eq_u64_e64
576define amdgpu_kernel void @br_scc_eq_i64_inline_imm(i64 %cond, i32 addrspace(1)* %out) #0 {
577entry:
578  %cmp0 = icmp eq i64 %cond, 4
579  br i1 %cmp0, label %endif, label %if
580
581if:
582  call void asm sideeffect "", ""()
583  br label %endif
584
585endif:
586  store volatile i32 1, i32 addrspace(1)* %out
587  ret void
588}
589
590; GCN-LABEL: {{^}}br_scc_eq_i64_simm16:
591; VI-DAG: s_movk_i32 s[[K_LO:[0-9]+]], 0x4d2
592; VI-DAG: s_mov_b32 s[[K_HI:[0-9]+]], 0
593; VI: s_cmp_eq_u64 s{{\[[0-9]+:[0-9]+\]}}, s{{\[}}[[K_LO]]:[[K_HI]]{{\]}}
594
595; SI: v_cmp_eq_u64_e32
596define amdgpu_kernel void @br_scc_eq_i64_simm16(i64 %cond, i32 addrspace(1)* %out) #0 {
597entry:
598  %cmp0 = icmp eq i64 %cond, 1234
599  br i1 %cmp0, label %endif, label %if
600
601if:
602  call void asm sideeffect "", ""()
603  br label %endif
604
605endif:
606  store volatile i32 1, i32 addrspace(1)* %out
607  ret void
608}
609
610; GCN-LABEL: {{^}}br_scc_ne_i64_inline_imm:
611; VI: s_cmp_lg_u64 s{{\[[0-9]+:[0-9]+\]}}, 4
612
613; SI: v_cmp_ne_u64_e64
614define amdgpu_kernel void @br_scc_ne_i64_inline_imm(i64 %cond, i32 addrspace(1)* %out) #0 {
615entry:
616  %cmp0 = icmp ne i64 %cond, 4
617  br i1 %cmp0, label %endif, label %if
618
619if:
620  call void asm sideeffect "", ""()
621  br label %endif
622
623endif:
624  store volatile i32 1, i32 addrspace(1)* %out
625  ret void
626}
627
628; GCN-LABEL: {{^}}br_scc_ne_i64_simm16:
629; VI-DAG: s_movk_i32 s[[K_LO:[0-9]+]], 0x4d2
630; VI-DAG: s_mov_b32 s[[K_HI:[0-9]+]], 0
631; VI: s_cmp_lg_u64 s{{\[[0-9]+:[0-9]+\]}}, s{{\[}}[[K_LO]]:[[K_HI]]{{\]}}
632
633; SI: v_cmp_ne_u64_e32
634define amdgpu_kernel void @br_scc_ne_i64_simm16(i64 %cond, i32 addrspace(1)* %out) #0 {
635entry:
636  %cmp0 = icmp ne i64 %cond, 1234
637  br i1 %cmp0, label %endif, label %if
638
639if:
640  call void asm sideeffect "", ""()
641  br label %endif
642
643endif:
644  store volatile i32 1, i32 addrspace(1)* %out
645  ret void
646}
647
648attributes #0 = { nounwind }
649attributes #1 = { nounwind readnone }
650