1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -mtriple=amdgcn-mesa-mesa3d -mcpu=tahiti -O0 -run-pass=legalizer -global-isel-abort=0 -o - %s | FileCheck -check-prefix=SI %s
3# RUN: llc -mtriple=amdgcn-mesa-mesa3d -mcpu=fiji -O0 -run-pass=legalizer -global-isel-abort=0 -o - %s | FileCheck -check-prefix=VI %s
4# RUN: llc -mtriple=amdgcn-mesa-mesa3d -mcpu=gfx900 -O0 -run-pass=legalizer -global-isel-abort=0 -o - %s | FileCheck -check-prefix=GFX9  %s
5
6---
7name: test_ashr_s32_s32
8body: |
9  bb.0:
10    liveins: $vgpr0, $vgpr1
11
12    ; SI-LABEL: name: test_ashr_s32_s32
13    ; SI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
14    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
15    ; SI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[COPY]], [[COPY1]](s32)
16    ; SI: $vgpr0 = COPY [[ASHR]](s32)
17    ; VI-LABEL: name: test_ashr_s32_s32
18    ; VI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
19    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
20    ; VI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[COPY]], [[COPY1]](s32)
21    ; VI: $vgpr0 = COPY [[ASHR]](s32)
22    ; GFX9-LABEL: name: test_ashr_s32_s32
23    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
24    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
25    ; GFX9: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[COPY]], [[COPY1]](s32)
26    ; GFX9: $vgpr0 = COPY [[ASHR]](s32)
27    %0:_(s32) = COPY $vgpr0
28    %1:_(s32) = COPY $vgpr1
29    %2:_(s32) = G_ASHR %0, %1
30    $vgpr0 = COPY %2
31...
32---
33name: test_ashr_s64_s64
34body: |
35  bb.0:
36    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3
37
38    ; SI-LABEL: name: test_ashr_s64_s64
39    ; SI: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
40    ; SI: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
41    ; SI: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY1]](s64)
42    ; SI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[COPY]], [[TRUNC]](s32)
43    ; SI: $vgpr0_vgpr1 = COPY [[ASHR]](s64)
44    ; VI-LABEL: name: test_ashr_s64_s64
45    ; VI: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
46    ; VI: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
47    ; VI: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY1]](s64)
48    ; VI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[COPY]], [[TRUNC]](s32)
49    ; VI: $vgpr0_vgpr1 = COPY [[ASHR]](s64)
50    ; GFX9-LABEL: name: test_ashr_s64_s64
51    ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
52    ; GFX9: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
53    ; GFX9: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY1]](s64)
54    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[COPY]], [[TRUNC]](s32)
55    ; GFX9: $vgpr0_vgpr1 = COPY [[ASHR]](s64)
56    %0:_(s64) = COPY $vgpr0_vgpr1
57    %1:_(s64) = COPY $vgpr2_vgpr3
58    %2:_(s64) = G_ASHR %0, %1
59    $vgpr0_vgpr1 = COPY %2
60...
61---
62name: test_ashr_s64_s32
63body: |
64  bb.0:
65    liveins: $vgpr0_vgpr1, $vgpr2
66
67    ; SI-LABEL: name: test_ashr_s64_s32
68    ; SI: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
69    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr2
70    ; SI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[COPY]], [[COPY1]](s32)
71    ; SI: $vgpr0_vgpr1 = COPY [[ASHR]](s64)
72    ; VI-LABEL: name: test_ashr_s64_s32
73    ; VI: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
74    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr2
75    ; VI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[COPY]], [[COPY1]](s32)
76    ; VI: $vgpr0_vgpr1 = COPY [[ASHR]](s64)
77    ; GFX9-LABEL: name: test_ashr_s64_s32
78    ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
79    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr2
80    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[COPY]], [[COPY1]](s32)
81    ; GFX9: $vgpr0_vgpr1 = COPY [[ASHR]](s64)
82    %0:_(s64) = COPY $vgpr0_vgpr1
83    %1:_(s32) = COPY $vgpr2
84    %2:_(s64) = G_ASHR %0, %1
85    $vgpr0_vgpr1 = COPY %2
86...
87---
88name: test_ashr_s64_s16
89body: |
90  bb.0:
91    liveins: $vgpr0_vgpr1, $vgpr2
92
93    ; SI-LABEL: name: test_ashr_s64_s16
94    ; SI: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
95    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr2
96    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
97    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
98    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
99    ; SI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[COPY]], [[AND]](s32)
100    ; SI: $vgpr0_vgpr1 = COPY [[ASHR]](s64)
101    ; VI-LABEL: name: test_ashr_s64_s16
102    ; VI: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
103    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr2
104    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
105    ; VI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
106    ; VI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
107    ; VI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[COPY]], [[AND]](s32)
108    ; VI: $vgpr0_vgpr1 = COPY [[ASHR]](s64)
109    ; GFX9-LABEL: name: test_ashr_s64_s16
110    ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
111    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr2
112    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
113    ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
114    ; GFX9: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
115    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[COPY]], [[AND]](s32)
116    ; GFX9: $vgpr0_vgpr1 = COPY [[ASHR]](s64)
117    %0:_(s64) = COPY $vgpr0_vgpr1
118    %1:_(s32) = COPY $vgpr2
119    %2:_(s16) = G_TRUNC %1
120    %3:_(s64) = G_ASHR %0, %2
121    $vgpr0_vgpr1 = COPY %3
122...
123
124---
125name: test_ashr_s16_s32
126body: |
127  bb.0:
128    liveins: $vgpr0, $vgpr1
129
130    ; SI-LABEL: name: test_ashr_s16_s32
131    ; SI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
132    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
133    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
134    ; SI: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY2]], 16
135    ; SI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG]], [[COPY1]](s32)
136    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[ASHR]](s32)
137    ; SI: $vgpr0 = COPY [[COPY3]](s32)
138    ; VI-LABEL: name: test_ashr_s16_s32
139    ; VI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
140    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
141    ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
142    ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
143    ; VI: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC]], [[TRUNC1]](s16)
144    ; VI: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR]](s16)
145    ; VI: $vgpr0 = COPY [[ANYEXT]](s32)
146    ; GFX9-LABEL: name: test_ashr_s16_s32
147    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
148    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
149    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
150    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
151    ; GFX9: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC]], [[TRUNC1]](s16)
152    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR]](s16)
153    ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32)
154    %0:_(s32) = COPY $vgpr0
155    %1:_(s32) = COPY $vgpr1
156    %2:_(s16) = G_TRUNC %0
157    %3:_(s16) = G_ASHR %2, %1
158    %4:_(s32) = G_ANYEXT %3
159    $vgpr0 = COPY %4
160...
161
162---
163name: test_ashr_s16_s16
164body: |
165  bb.0:
166    liveins: $vgpr0, $vgpr1
167
168    ; SI-LABEL: name: test_ashr_s16_s16
169    ; SI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
170    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
171    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
172    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
173    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
174    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
175    ; SI: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 16
176    ; SI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG]], [[AND]](s32)
177    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ASHR]](s32)
178    ; SI: $vgpr0 = COPY [[COPY4]](s32)
179    ; VI-LABEL: name: test_ashr_s16_s16
180    ; VI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
181    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
182    ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
183    ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
184    ; VI: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC]], [[TRUNC1]](s16)
185    ; VI: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR]](s16)
186    ; VI: $vgpr0 = COPY [[ANYEXT]](s32)
187    ; GFX9-LABEL: name: test_ashr_s16_s16
188    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
189    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
190    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
191    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
192    ; GFX9: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC]], [[TRUNC1]](s16)
193    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR]](s16)
194    ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32)
195    %0:_(s32) = COPY $vgpr0
196    %1:_(s32) = COPY $vgpr1
197    %2:_(s16) = G_TRUNC %0
198    %3:_(s16) = G_TRUNC %1
199    %4:_(s16) = G_ASHR %2, %3
200    %5:_(s32) = G_ANYEXT %4
201    $vgpr0 = COPY %5
202...
203
204---
205name: test_ashr_s16_i8
206body: |
207  bb.0:
208    liveins: $vgpr0, $vgpr1
209
210    ; SI-LABEL: name: test_ashr_s16_i8
211    ; SI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
212    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
213    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
214    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
215    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
216    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
217    ; SI: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 16
218    ; SI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG]], [[AND]](s32)
219    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ASHR]](s32)
220    ; SI: $vgpr0 = COPY [[COPY4]](s32)
221    ; VI-LABEL: name: test_ashr_s16_i8
222    ; VI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
223    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
224    ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
225    ; VI: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
226    ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
227    ; VI: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC1]], [[C]]
228    ; VI: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC]], [[AND]](s16)
229    ; VI: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR]](s16)
230    ; VI: $vgpr0 = COPY [[ANYEXT]](s32)
231    ; GFX9-LABEL: name: test_ashr_s16_i8
232    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
233    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
234    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
235    ; GFX9: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
236    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
237    ; GFX9: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC1]], [[C]]
238    ; GFX9: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC]], [[AND]](s16)
239    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR]](s16)
240    ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32)
241    %0:_(s32) = COPY $vgpr0
242    %1:_(s32) = COPY $vgpr1
243    %2:_(s16) = G_TRUNC %0
244    %3:_(s8) = G_TRUNC %1
245    %4:_(s16) = G_ASHR %2, %3
246    %5:_(s32) = G_ANYEXT %4
247    $vgpr0 = COPY %5
248...
249
250---
251name: test_ashr_i8_i8
252body: |
253  bb.0:
254    liveins: $vgpr0, $vgpr1
255
256    ; SI-LABEL: name: test_ashr_i8_i8
257    ; SI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
258    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
259    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
260    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
261    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
262    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
263    ; SI: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 8
264    ; SI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG]], [[AND]](s32)
265    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ASHR]](s32)
266    ; SI: $vgpr0 = COPY [[COPY4]](s32)
267    ; VI-LABEL: name: test_ashr_i8_i8
268    ; VI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
269    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
270    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
271    ; VI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
272    ; VI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
273    ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
274    ; VI: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
275    ; VI: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C1]](s16)
276    ; VI: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[SHL]], [[C1]](s16)
277    ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[AND]](s32)
278    ; VI: [[ASHR1:%[0-9]+]]:_(s16) = G_ASHR [[ASHR]], [[TRUNC1]](s16)
279    ; VI: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR1]](s16)
280    ; VI: $vgpr0 = COPY [[ANYEXT]](s32)
281    ; GFX9-LABEL: name: test_ashr_i8_i8
282    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
283    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
284    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
285    ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
286    ; GFX9: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
287    ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
288    ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 8
289    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[SEXT_INREG]](s32)
290    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[AND]](s32)
291    ; GFX9: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC]], [[TRUNC1]](s16)
292    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR]](s16)
293    ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32)
294    %0:_(s32) = COPY $vgpr0
295    %1:_(s32) = COPY $vgpr1
296    %2:_(s8) = G_TRUNC %0
297    %3:_(s8) = G_TRUNC %1
298    %4:_(s8) = G_ASHR %2, %3
299    %5:_(s32) = G_ANYEXT %4
300    $vgpr0 = COPY %5
301...
302
303---
304name: test_ashr_s7_s7
305body: |
306  bb.0:
307    liveins: $vgpr0, $vgpr1
308
309    ; SI-LABEL: name: test_ashr_s7_s7
310    ; SI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
311    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
312    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 127
313    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
314    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
315    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
316    ; SI: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 7
317    ; SI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG]], [[AND]](s32)
318    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ASHR]](s32)
319    ; SI: $vgpr0 = COPY [[COPY4]](s32)
320    ; VI-LABEL: name: test_ashr_s7_s7
321    ; VI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
322    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
323    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 127
324    ; VI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
325    ; VI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
326    ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
327    ; VI: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 9
328    ; VI: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C1]](s16)
329    ; VI: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[SHL]], [[C1]](s16)
330    ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[AND]](s32)
331    ; VI: [[ASHR1:%[0-9]+]]:_(s16) = G_ASHR [[ASHR]], [[TRUNC1]](s16)
332    ; VI: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR1]](s16)
333    ; VI: $vgpr0 = COPY [[ANYEXT]](s32)
334    ; GFX9-LABEL: name: test_ashr_s7_s7
335    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
336    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
337    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 127
338    ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
339    ; GFX9: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
340    ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
341    ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 7
342    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[SEXT_INREG]](s32)
343    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[AND]](s32)
344    ; GFX9: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC]], [[TRUNC1]](s16)
345    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR]](s16)
346    ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32)
347    %0:_(s32) = COPY $vgpr0
348    %1:_(s32) = COPY $vgpr1
349    %2:_(s7) = G_TRUNC %0
350    %3:_(s7) = G_TRUNC %1
351    %4:_(s7) = G_ASHR %2, %3
352    %5:_(s32) = G_ANYEXT %4
353    $vgpr0 = COPY %5
354...
355
356---
357name: test_ashr_s24_s24
358body: |
359  bb.0:
360    liveins: $vgpr0, $vgpr1
361
362    ; SI-LABEL: name: test_ashr_s24_s24
363    ; SI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
364    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
365    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16777215
366    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
367    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
368    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
369    ; SI: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 24
370    ; SI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG]], [[AND]](s32)
371    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ASHR]](s32)
372    ; SI: $vgpr0 = COPY [[COPY4]](s32)
373    ; VI-LABEL: name: test_ashr_s24_s24
374    ; VI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
375    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
376    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16777215
377    ; VI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
378    ; VI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
379    ; VI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
380    ; VI: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 24
381    ; VI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG]], [[AND]](s32)
382    ; VI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ASHR]](s32)
383    ; VI: $vgpr0 = COPY [[COPY4]](s32)
384    ; GFX9-LABEL: name: test_ashr_s24_s24
385    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
386    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
387    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16777215
388    ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
389    ; GFX9: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
390    ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
391    ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 24
392    ; GFX9: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG]], [[AND]](s32)
393    ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ASHR]](s32)
394    ; GFX9: $vgpr0 = COPY [[COPY4]](s32)
395    %0:_(s32) = COPY $vgpr0
396    %1:_(s32) = COPY $vgpr1
397    %2:_(s24) = G_TRUNC %0
398    %3:_(s24) = G_TRUNC %1
399    %4:_(s24) = G_ASHR %2, %3
400    %5:_(s32) = G_ANYEXT %4
401    $vgpr0 = COPY %5
402...
403
404---
405name: test_ashr_s32_s24
406body: |
407  bb.0:
408    liveins: $vgpr0, $vgpr1
409
410    ; SI-LABEL: name: test_ashr_s32_s24
411    ; SI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
412    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
413    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16777215
414    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
415    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
416    ; SI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[COPY]], [[AND]](s32)
417    ; SI: $vgpr0 = COPY [[ASHR]](s32)
418    ; VI-LABEL: name: test_ashr_s32_s24
419    ; VI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
420    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
421    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16777215
422    ; VI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
423    ; VI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
424    ; VI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[COPY]], [[AND]](s32)
425    ; VI: $vgpr0 = COPY [[ASHR]](s32)
426    ; GFX9-LABEL: name: test_ashr_s32_s24
427    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
428    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
429    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16777215
430    ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
431    ; GFX9: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
432    ; GFX9: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[COPY]], [[AND]](s32)
433    ; GFX9: $vgpr0 = COPY [[ASHR]](s32)
434    %0:_(s32) = COPY $vgpr0
435    %1:_(s32) = COPY $vgpr1
436    %2:_(s24) = G_TRUNC %1
437    %3:_(s32) = G_ASHR %0, %2
438    $vgpr0 = COPY %3
439...
440
441---
442name: test_ashr_v2s32_v2s32
443body: |
444  bb.0:
445    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3
446
447    ; SI-LABEL: name: test_ashr_v2s32_v2s32
448    ; SI: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
449    ; SI: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3
450    ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
451    ; SI: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
452    ; SI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[UV]], [[UV2]](s32)
453    ; SI: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[UV1]], [[UV3]](s32)
454    ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[ASHR]](s32), [[ASHR1]](s32)
455    ; SI: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>)
456    ; VI-LABEL: name: test_ashr_v2s32_v2s32
457    ; VI: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
458    ; VI: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3
459    ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
460    ; VI: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
461    ; VI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[UV]], [[UV2]](s32)
462    ; VI: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[UV1]], [[UV3]](s32)
463    ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[ASHR]](s32), [[ASHR1]](s32)
464    ; VI: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>)
465    ; GFX9-LABEL: name: test_ashr_v2s32_v2s32
466    ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
467    ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3
468    ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
469    ; GFX9: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
470    ; GFX9: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[UV]], [[UV2]](s32)
471    ; GFX9: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[UV1]], [[UV3]](s32)
472    ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[ASHR]](s32), [[ASHR1]](s32)
473    ; GFX9: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>)
474    %0:_(<2 x s32>) = COPY $vgpr0_vgpr1
475    %1:_(<2 x s32>) = COPY $vgpr2_vgpr3
476    %2:_(<2 x s32>) = G_ASHR %0, %1
477    $vgpr0_vgpr1 = COPY %2
478...
479
480---
481name: test_ashr_v3s32_v3s32
482body: |
483  bb.0:
484    liveins: $vgpr0_vgpr1_vgpr2, $vgpr3_vgpr4_vgpr5
485
486    ; SI-LABEL: name: test_ashr_v3s32_v3s32
487    ; SI: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
488    ; SI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr3_vgpr4_vgpr5
489    ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<3 x s32>)
490    ; SI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
491    ; SI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[UV]], [[UV3]](s32)
492    ; SI: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[UV1]], [[UV4]](s32)
493    ; SI: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[UV2]], [[UV5]](s32)
494    ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[ASHR]](s32), [[ASHR1]](s32), [[ASHR2]](s32)
495    ; SI: $vgpr0_vgpr1_vgpr2 = COPY [[BUILD_VECTOR]](<3 x s32>)
496    ; VI-LABEL: name: test_ashr_v3s32_v3s32
497    ; VI: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
498    ; VI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr3_vgpr4_vgpr5
499    ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<3 x s32>)
500    ; VI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
501    ; VI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[UV]], [[UV3]](s32)
502    ; VI: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[UV1]], [[UV4]](s32)
503    ; VI: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[UV2]], [[UV5]](s32)
504    ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[ASHR]](s32), [[ASHR1]](s32), [[ASHR2]](s32)
505    ; VI: $vgpr0_vgpr1_vgpr2 = COPY [[BUILD_VECTOR]](<3 x s32>)
506    ; GFX9-LABEL: name: test_ashr_v3s32_v3s32
507    ; GFX9: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
508    ; GFX9: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr3_vgpr4_vgpr5
509    ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<3 x s32>)
510    ; GFX9: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
511    ; GFX9: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[UV]], [[UV3]](s32)
512    ; GFX9: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[UV1]], [[UV4]](s32)
513    ; GFX9: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[UV2]], [[UV5]](s32)
514    ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[ASHR]](s32), [[ASHR1]](s32), [[ASHR2]](s32)
515    ; GFX9: $vgpr0_vgpr1_vgpr2 = COPY [[BUILD_VECTOR]](<3 x s32>)
516    %0:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
517    %1:_(<3 x s32>) = COPY $vgpr3_vgpr4_vgpr5
518    %2:_(<3 x s32>) = G_ASHR %0, %1
519    $vgpr0_vgpr1_vgpr2 = COPY %2
520...
521
522---
523name: test_ashr_v2s64_v2s32
524body: |
525  bb.0:
526    liveins: $vgpr0_vgpr1_vgpr2_vgpr3, $vgpr4_vgpr5
527
528    ; SI-LABEL: name: test_ashr_v2s64_v2s32
529    ; SI: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
530    ; SI: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr4_vgpr5
531    ; SI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](<2 x s64>)
532    ; SI: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
533    ; SI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV]], [[UV2]](s32)
534    ; SI: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[UV3]](s32)
535    ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[ASHR]](s64), [[ASHR1]](s64)
536    ; SI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<2 x s64>)
537    ; VI-LABEL: name: test_ashr_v2s64_v2s32
538    ; VI: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
539    ; VI: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr4_vgpr5
540    ; VI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](<2 x s64>)
541    ; VI: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
542    ; VI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV]], [[UV2]](s32)
543    ; VI: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[UV3]](s32)
544    ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[ASHR]](s64), [[ASHR1]](s64)
545    ; VI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<2 x s64>)
546    ; GFX9-LABEL: name: test_ashr_v2s64_v2s32
547    ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
548    ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr4_vgpr5
549    ; GFX9: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](<2 x s64>)
550    ; GFX9: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
551    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV]], [[UV2]](s32)
552    ; GFX9: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[UV3]](s32)
553    ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[ASHR]](s64), [[ASHR1]](s64)
554    ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<2 x s64>)
555    %0:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
556    %1:_(<2 x s32>) = COPY $vgpr4_vgpr5
557    %2:_(<2 x s64>) = G_ASHR %0, %1
558    $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %2
559...
560
561---
562name: test_ashr_v3s64_v3s32
563body: |
564  bb.0:
565    liveins: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7, $vgpr8_vgpr9_vgpr10
566
567    ; SI-LABEL: name: test_ashr_v3s64_v3s32
568    ; SI: [[COPY:%[0-9]+]]:_(<4 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
569    ; SI: [[EXTRACT:%[0-9]+]]:_(<3 x s64>) = G_EXTRACT [[COPY]](<4 x s64>), 0
570    ; SI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr8_vgpr9_vgpr10
571    ; SI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64), [[UV2:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[EXTRACT]](<3 x s64>)
572    ; SI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
573    ; SI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV]], [[UV3]](s32)
574    ; SI: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[UV4]](s32)
575    ; SI: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV2]], [[UV5]](s32)
576    ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[ASHR]](s64), [[ASHR1]](s64), [[ASHR2]](s64)
577    ; SI: [[DEF:%[0-9]+]]:_(<4 x s64>) = G_IMPLICIT_DEF
578    ; SI: [[INSERT:%[0-9]+]]:_(<4 x s64>) = G_INSERT [[DEF]], [[BUILD_VECTOR]](<3 x s64>), 0
579    ; SI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[INSERT]](<4 x s64>)
580    ; VI-LABEL: name: test_ashr_v3s64_v3s32
581    ; VI: [[COPY:%[0-9]+]]:_(<4 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
582    ; VI: [[EXTRACT:%[0-9]+]]:_(<3 x s64>) = G_EXTRACT [[COPY]](<4 x s64>), 0
583    ; VI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr8_vgpr9_vgpr10
584    ; VI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64), [[UV2:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[EXTRACT]](<3 x s64>)
585    ; VI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
586    ; VI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV]], [[UV3]](s32)
587    ; VI: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[UV4]](s32)
588    ; VI: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV2]], [[UV5]](s32)
589    ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[ASHR]](s64), [[ASHR1]](s64), [[ASHR2]](s64)
590    ; VI: [[DEF:%[0-9]+]]:_(<4 x s64>) = G_IMPLICIT_DEF
591    ; VI: [[INSERT:%[0-9]+]]:_(<4 x s64>) = G_INSERT [[DEF]], [[BUILD_VECTOR]](<3 x s64>), 0
592    ; VI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[INSERT]](<4 x s64>)
593    ; GFX9-LABEL: name: test_ashr_v3s64_v3s32
594    ; GFX9: [[COPY:%[0-9]+]]:_(<4 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
595    ; GFX9: [[EXTRACT:%[0-9]+]]:_(<3 x s64>) = G_EXTRACT [[COPY]](<4 x s64>), 0
596    ; GFX9: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr8_vgpr9_vgpr10
597    ; GFX9: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64), [[UV2:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[EXTRACT]](<3 x s64>)
598    ; GFX9: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
599    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV]], [[UV3]](s32)
600    ; GFX9: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[UV4]](s32)
601    ; GFX9: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV2]], [[UV5]](s32)
602    ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[ASHR]](s64), [[ASHR1]](s64), [[ASHR2]](s64)
603    ; GFX9: [[DEF:%[0-9]+]]:_(<4 x s64>) = G_IMPLICIT_DEF
604    ; GFX9: [[INSERT:%[0-9]+]]:_(<4 x s64>) = G_INSERT [[DEF]], [[BUILD_VECTOR]](<3 x s64>), 0
605    ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[INSERT]](<4 x s64>)
606    %0:_(<4 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
607    %1:_(<3 x s64>) = G_EXTRACT %0, 0
608    %2:_(<3 x s32>) = COPY $vgpr8_vgpr9_vgpr10
609    %3:_(<3 x s64>) = G_ASHR %1, %2
610    %4:_(<4 x s64>) = G_IMPLICIT_DEF
611    %5:_(<4 x s64>) = G_INSERT %4, %3, 0
612    $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY %5
613...
614
615---
616name: test_ashr_v2s16_v2s16
617body: |
618  bb.0:
619    liveins: $vgpr0, $vgpr1
620
621    ; SI-LABEL: name: test_ashr_v2s16_v2s16
622    ; SI: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
623    ; SI: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
624    ; SI: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>)
625    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
626    ; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
627    ; SI: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[COPY1]](<2 x s16>)
628    ; SI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
629    ; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
630    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32)
631    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C1]]
632    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32)
633    ; SI: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 16
634    ; SI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG]], [[AND]](s32)
635    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
636    ; SI: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C1]]
637    ; SI: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
638    ; SI: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY5]], 16
639    ; SI: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG1]], [[AND1]](s32)
640    ; SI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[ASHR]](s32)
641    ; SI: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY6]], [[C1]]
642    ; SI: [[COPY7:%[0-9]+]]:_(s32) = COPY [[ASHR1]](s32)
643    ; SI: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY7]], [[C1]]
644    ; SI: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C]](s32)
645    ; SI: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND2]], [[SHL]]
646    ; SI: [[BITCAST2:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
647    ; SI: $vgpr0 = COPY [[BITCAST2]](<2 x s16>)
648    ; VI-LABEL: name: test_ashr_v2s16_v2s16
649    ; VI: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
650    ; VI: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
651    ; VI: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>)
652    ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32)
653    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
654    ; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
655    ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
656    ; VI: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[COPY1]](<2 x s16>)
657    ; VI: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32)
658    ; VI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
659    ; VI: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32)
660    ; VI: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC]], [[TRUNC2]](s16)
661    ; VI: [[ASHR1:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC1]], [[TRUNC3]](s16)
662    ; VI: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR]](s16)
663    ; VI: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR1]](s16)
664    ; VI: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32)
665    ; VI: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL]]
666    ; VI: [[BITCAST2:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
667    ; VI: $vgpr0 = COPY [[BITCAST2]](<2 x s16>)
668    ; GFX9-LABEL: name: test_ashr_v2s16_v2s16
669    ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
670    ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
671    ; GFX9: [[ASHR:%[0-9]+]]:_(<2 x s16>) = G_ASHR [[COPY]], [[COPY1]](<2 x s16>)
672    ; GFX9: $vgpr0 = COPY [[ASHR]](<2 x s16>)
673    %0:_(<2 x s16>) = COPY $vgpr0
674    %1:_(<2 x s16>) = COPY $vgpr1
675    %2:_(<2 x s16>) = G_ASHR %0, %1
676    $vgpr0 = COPY %2
677...
678
679---
680name: test_ashr_v2s16_v2s32
681body: |
682  bb.0:
683    liveins: $vgpr0, $vgpr0_vgpr1
684
685    ; SI-LABEL: name: test_ashr_v2s16_v2s32
686    ; SI: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
687    ; SI: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
688    ; SI: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>)
689    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
690    ; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
691    ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
692    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32)
693    ; SI: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY2]], 16
694    ; SI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG]], [[UV]](s32)
695    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
696    ; SI: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 16
697    ; SI: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG1]], [[UV1]](s32)
698    ; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
699    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ASHR]](s32)
700    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C1]]
701    ; SI: [[COPY5:%[0-9]+]]:_(s32) = COPY [[ASHR1]](s32)
702    ; SI: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY5]], [[C1]]
703    ; SI: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32)
704    ; SI: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
705    ; SI: [[BITCAST1:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
706    ; SI: $vgpr0 = COPY [[BITCAST1]](<2 x s16>)
707    ; VI-LABEL: name: test_ashr_v2s16_v2s32
708    ; VI: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
709    ; VI: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
710    ; VI: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>)
711    ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32)
712    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
713    ; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
714    ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
715    ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
716    ; VI: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[UV]](s32)
717    ; VI: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC]], [[TRUNC2]](s16)
718    ; VI: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[UV1]](s32)
719    ; VI: [[ASHR1:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC1]], [[TRUNC3]](s16)
720    ; VI: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR]](s16)
721    ; VI: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR1]](s16)
722    ; VI: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32)
723    ; VI: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL]]
724    ; VI: [[BITCAST1:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
725    ; VI: $vgpr0 = COPY [[BITCAST1]](<2 x s16>)
726    ; GFX9-LABEL: name: test_ashr_v2s16_v2s32
727    ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
728    ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
729    ; GFX9: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>)
730    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32)
731    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
732    ; GFX9: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
733    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
734    ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
735    ; GFX9: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[UV]](s32)
736    ; GFX9: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC]], [[TRUNC2]](s16)
737    ; GFX9: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[UV1]](s32)
738    ; GFX9: [[ASHR1:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC1]], [[TRUNC3]](s16)
739    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR]](s16)
740    ; GFX9: [[ANYEXT1:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR1]](s16)
741    ; GFX9: [[BUILD_VECTOR_TRUNC:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[ANYEXT]](s32), [[ANYEXT1]](s32)
742    ; GFX9: $vgpr0 = COPY [[BUILD_VECTOR_TRUNC]](<2 x s16>)
743    %0:_(<2 x s16>) = COPY $vgpr0
744    %1:_(<2 x s32>) = COPY $vgpr0_vgpr1
745    %2:_(<2 x s16>) = G_ASHR %0, %1
746    $vgpr0 = COPY %2
747...
748
749---
750name: test_ashr_v3s16_v3s16
751body: |
752  bb.0:
753    liveins: $vgpr0_vgpr1_vgpr2, $vgpr3_vgpr4_vgpr5
754    ; SI-LABEL: name: test_ashr_v3s16_v3s16
755    ; SI: [[COPY:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2
756    ; SI: [[COPY1:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr3_vgpr4_vgpr5
757    ; SI: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>)
758    ; SI: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>)
759    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
760    ; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
761    ; SI: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>)
762    ; SI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
763    ; SI: [[UV3:%[0-9]+]]:_(<2 x s16>), [[UV4:%[0-9]+]]:_(<2 x s16>), [[UV5:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY1]](<6 x s16>)
764    ; SI: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>)
765    ; SI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32)
766    ; SI: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV4]](<2 x s16>)
767    ; SI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32)
768    ; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
769    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[BITCAST2]](s32)
770    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C1]]
771    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32)
772    ; SI: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 16
773    ; SI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG]], [[AND]](s32)
774    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
775    ; SI: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C1]]
776    ; SI: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
777    ; SI: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY5]], 16
778    ; SI: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG1]], [[AND1]](s32)
779    ; SI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[BITCAST3]](s32)
780    ; SI: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY6]], [[C1]]
781    ; SI: [[COPY7:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32)
782    ; SI: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY7]], 16
783    ; SI: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG2]], [[AND2]](s32)
784    ; SI: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
785    ; SI: [[DEF1:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF
786    ; SI: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
787    ; SI: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
788    ; SI: [[UV6:%[0-9]+]]:_(<2 x s16>), [[UV7:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF2]](<4 x s16>)
789    ; SI: [[BITCAST4:%[0-9]+]]:_(s32) = G_BITCAST [[UV6]](<2 x s16>)
790    ; SI: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST4]], [[C]](s32)
791    ; SI: [[BITCAST5:%[0-9]+]]:_(s32) = G_BITCAST [[UV7]](<2 x s16>)
792    ; SI: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST5]], [[C]](s32)
793    ; SI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[ASHR]](s32)
794    ; SI: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY8]], [[C1]]
795    ; SI: [[COPY9:%[0-9]+]]:_(s32) = COPY [[ASHR1]](s32)
796    ; SI: [[AND4:%[0-9]+]]:_(s32) = G_AND [[COPY9]], [[C1]]
797    ; SI: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND4]], [[C]](s32)
798    ; SI: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND3]], [[SHL]]
799    ; SI: [[BITCAST6:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
800    ; SI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[ASHR2]](s32)
801    ; SI: [[AND5:%[0-9]+]]:_(s32) = G_AND [[COPY10]], [[C1]]
802    ; SI: [[COPY11:%[0-9]+]]:_(s32) = COPY [[BITCAST4]](s32)
803    ; SI: [[AND6:%[0-9]+]]:_(s32) = G_AND [[COPY11]], [[C1]]
804    ; SI: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND6]], [[C]](s32)
805    ; SI: [[OR1:%[0-9]+]]:_(s32) = G_OR [[AND5]], [[SHL1]]
806    ; SI: [[BITCAST7:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32)
807    ; SI: [[COPY12:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32)
808    ; SI: [[AND7:%[0-9]+]]:_(s32) = G_AND [[COPY12]], [[C1]]
809    ; SI: [[COPY13:%[0-9]+]]:_(s32) = COPY [[BITCAST5]](s32)
810    ; SI: [[AND8:%[0-9]+]]:_(s32) = G_AND [[COPY13]], [[C1]]
811    ; SI: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND8]], [[C]](s32)
812    ; SI: [[OR2:%[0-9]+]]:_(s32) = G_OR [[AND7]], [[SHL2]]
813    ; SI: [[BITCAST8:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR2]](s32)
814    ; SI: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[BITCAST6]](<2 x s16>), [[BITCAST7]](<2 x s16>), [[BITCAST8]](<2 x s16>)
815    ; SI: $vgpr0_vgpr1_vgpr2 = COPY [[CONCAT_VECTORS]](<6 x s16>)
816    ; VI-LABEL: name: test_ashr_v3s16_v3s16
817    ; VI: [[COPY:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2
818    ; VI: [[COPY1:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr3_vgpr4_vgpr5
819    ; VI: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>)
820    ; VI: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>)
821    ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32)
822    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
823    ; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
824    ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
825    ; VI: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>)
826    ; VI: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32)
827    ; VI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
828    ; VI: [[UV3:%[0-9]+]]:_(<2 x s16>), [[UV4:%[0-9]+]]:_(<2 x s16>), [[UV5:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY1]](<6 x s16>)
829    ; VI: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>)
830    ; VI: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST2]](s32)
831    ; VI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32)
832    ; VI: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR2]](s32)
833    ; VI: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV4]](<2 x s16>)
834    ; VI: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST3]](s32)
835    ; VI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32)
836    ; VI: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC]], [[TRUNC3]](s16)
837    ; VI: [[ASHR1:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC1]], [[TRUNC4]](s16)
838    ; VI: [[ASHR2:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC2]], [[TRUNC5]](s16)
839    ; VI: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
840    ; VI: [[DEF1:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF
841    ; VI: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
842    ; VI: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
843    ; VI: [[UV6:%[0-9]+]]:_(<2 x s16>), [[UV7:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF2]](<4 x s16>)
844    ; VI: [[BITCAST4:%[0-9]+]]:_(s32) = G_BITCAST [[UV6]](<2 x s16>)
845    ; VI: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST4]], [[C]](s32)
846    ; VI: [[BITCAST5:%[0-9]+]]:_(s32) = G_BITCAST [[UV7]](<2 x s16>)
847    ; VI: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST5]], [[C]](s32)
848    ; VI: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR]](s16)
849    ; VI: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR1]](s16)
850    ; VI: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32)
851    ; VI: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL]]
852    ; VI: [[BITCAST6:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
853    ; VI: [[ZEXT2:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR2]](s16)
854    ; VI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
855    ; VI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[BITCAST4]](s32)
856    ; VI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C1]]
857    ; VI: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND]], [[C]](s32)
858    ; VI: [[OR1:%[0-9]+]]:_(s32) = G_OR [[ZEXT2]], [[SHL1]]
859    ; VI: [[BITCAST7:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32)
860    ; VI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32)
861    ; VI: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C1]]
862    ; VI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[BITCAST5]](s32)
863    ; VI: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C1]]
864    ; VI: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C]](s32)
865    ; VI: [[OR2:%[0-9]+]]:_(s32) = G_OR [[AND1]], [[SHL2]]
866    ; VI: [[BITCAST8:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR2]](s32)
867    ; VI: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[BITCAST6]](<2 x s16>), [[BITCAST7]](<2 x s16>), [[BITCAST8]](<2 x s16>)
868    ; VI: $vgpr0_vgpr1_vgpr2 = COPY [[CONCAT_VECTORS]](<6 x s16>)
869    ; GFX9-LABEL: name: test_ashr_v3s16_v3s16
870    ; GFX9: [[COPY:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2
871    ; GFX9: [[COPY1:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr3_vgpr4_vgpr5
872    ; GFX9: [[UV:%[0-9]+]]:_(<3 x s16>), [[UV1:%[0-9]+]]:_(<3 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>)
873    ; GFX9: [[UV2:%[0-9]+]]:_(<3 x s16>), [[UV3:%[0-9]+]]:_(<3 x s16>) = G_UNMERGE_VALUES [[COPY1]](<6 x s16>)
874    ; GFX9: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
875    ; GFX9: [[INSERT:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF]], [[UV]](<3 x s16>), 0
876    ; GFX9: [[EXTRACT:%[0-9]+]]:_(<2 x s16>) = G_EXTRACT [[INSERT]](<4 x s16>), 0
877    ; GFX9: [[INSERT1:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF]], [[UV]](<3 x s16>), 0
878    ; GFX9: [[EXTRACT1:%[0-9]+]]:_(s16) = G_EXTRACT [[INSERT1]](<4 x s16>), 32
879    ; GFX9: [[INSERT2:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF]], [[UV2]](<3 x s16>), 0
880    ; GFX9: [[EXTRACT2:%[0-9]+]]:_(<2 x s16>) = G_EXTRACT [[INSERT2]](<4 x s16>), 0
881    ; GFX9: [[INSERT3:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF]], [[UV2]](<3 x s16>), 0
882    ; GFX9: [[EXTRACT3:%[0-9]+]]:_(s16) = G_EXTRACT [[INSERT3]](<4 x s16>), 32
883    ; GFX9: [[ASHR:%[0-9]+]]:_(<2 x s16>) = G_ASHR [[EXTRACT]], [[EXTRACT2]](<2 x s16>)
884    ; GFX9: [[ASHR1:%[0-9]+]]:_(s16) = G_ASHR [[EXTRACT1]], [[EXTRACT3]](s16)
885    ; GFX9: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
886    ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<12 x s16>) = G_CONCAT_VECTORS [[DEF1]](<4 x s16>), [[DEF]](<4 x s16>), [[DEF]](<4 x s16>)
887    ; GFX9: [[UV4:%[0-9]+]]:_(<3 x s16>), [[UV5:%[0-9]+]]:_(<3 x s16>), [[UV6:%[0-9]+]]:_(<3 x s16>), [[UV7:%[0-9]+]]:_(<3 x s16>) = G_UNMERGE_VALUES [[CONCAT_VECTORS]](<12 x s16>)
888    ; GFX9: [[INSERT4:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF]], [[UV4]](<3 x s16>), 0
889    ; GFX9: [[INSERT5:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT4]], [[ASHR]](<2 x s16>), 0
890    ; GFX9: [[CONCAT_VECTORS1:%[0-9]+]]:_(<12 x s16>) = G_CONCAT_VECTORS [[INSERT5]](<4 x s16>), [[DEF]](<4 x s16>), [[DEF]](<4 x s16>)
891    ; GFX9: [[UV8:%[0-9]+]]:_(<3 x s16>), [[UV9:%[0-9]+]]:_(<3 x s16>), [[UV10:%[0-9]+]]:_(<3 x s16>), [[UV11:%[0-9]+]]:_(<3 x s16>) = G_UNMERGE_VALUES [[CONCAT_VECTORS1]](<12 x s16>)
892    ; GFX9: [[INSERT6:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF]], [[UV8]](<3 x s16>), 0
893    ; GFX9: [[INSERT7:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT6]], [[ASHR1]](s16), 32
894    ; GFX9: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
895    ; GFX9: [[UV12:%[0-9]+]]:_(<2 x s16>), [[UV13:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[INSERT7]](<4 x s16>)
896    ; GFX9: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV12]](<2 x s16>)
897    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
898    ; GFX9: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
899    ; GFX9: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV13]](<2 x s16>)
900    ; GFX9: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
901    ; GFX9: [[UV14:%[0-9]+]]:_(<2 x s16>), [[UV15:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF2]](<4 x s16>)
902    ; GFX9: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV14]](<2 x s16>)
903    ; GFX9: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32)
904    ; GFX9: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV15]](<2 x s16>)
905    ; GFX9: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32)
906    ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32)
907    ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
908    ; GFX9: [[BUILD_VECTOR_TRUNC:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY2]](s32), [[COPY3]](s32)
909    ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32)
910    ; GFX9: [[COPY5:%[0-9]+]]:_(s32) = COPY [[BITCAST2]](s32)
911    ; GFX9: [[BUILD_VECTOR_TRUNC1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY4]](s32), [[COPY5]](s32)
912    ; GFX9: [[COPY6:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
913    ; GFX9: [[COPY7:%[0-9]+]]:_(s32) = COPY [[BITCAST3]](s32)
914    ; GFX9: [[BUILD_VECTOR_TRUNC2:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY6]](s32), [[COPY7]](s32)
915    ; GFX9: [[CONCAT_VECTORS2:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR_TRUNC]](<2 x s16>), [[BUILD_VECTOR_TRUNC1]](<2 x s16>), [[BUILD_VECTOR_TRUNC2]](<2 x s16>)
916    ; GFX9: $vgpr0_vgpr1_vgpr2 = COPY [[CONCAT_VECTORS2]](<6 x s16>)
917    %0:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2
918    %1:_(<6 x s16>) = COPY $vgpr3_vgpr4_vgpr5
919    %2:_(<3 x s16>), %3:_(<3 x s16>) = G_UNMERGE_VALUES %0
920    %4:_(<3 x s16>), %5:_(<3 x s16>) = G_UNMERGE_VALUES %1
921    %6:_(<3 x s16>) = G_ASHR %2, %4
922    %7:_(<3 x s16>) = G_IMPLICIT_DEF
923    %8:_(<6 x s16>) = G_CONCAT_VECTORS %6, %7
924    $vgpr0_vgpr1_vgpr2 = COPY %8
925
926...
927
928---
929name: test_ashr_v4s16_v4s16
930body: |
931  bb.0:
932    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3
933
934    ; SI-LABEL: name: test_ashr_v4s16_v4s16
935    ; SI: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1
936    ; SI: [[COPY1:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr2_vgpr3
937    ; SI: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>)
938    ; SI: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>)
939    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
940    ; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
941    ; SI: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>)
942    ; SI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
943    ; SI: [[UV2:%[0-9]+]]:_(<2 x s16>), [[UV3:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY1]](<4 x s16>)
944    ; SI: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV2]](<2 x s16>)
945    ; SI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32)
946    ; SI: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>)
947    ; SI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32)
948    ; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
949    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[BITCAST2]](s32)
950    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C1]]
951    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32)
952    ; SI: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 16
953    ; SI: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG]], [[AND]](s32)
954    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
955    ; SI: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C1]]
956    ; SI: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
957    ; SI: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY5]], 16
958    ; SI: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG1]], [[AND1]](s32)
959    ; SI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[BITCAST3]](s32)
960    ; SI: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY6]], [[C1]]
961    ; SI: [[COPY7:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32)
962    ; SI: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY7]], 16
963    ; SI: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG2]], [[AND2]](s32)
964    ; SI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
965    ; SI: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY8]], [[C1]]
966    ; SI: [[COPY9:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
967    ; SI: [[SEXT_INREG3:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY9]], 16
968    ; SI: [[ASHR3:%[0-9]+]]:_(s32) = G_ASHR [[SEXT_INREG3]], [[AND3]](s32)
969    ; SI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[ASHR]](s32)
970    ; SI: [[AND4:%[0-9]+]]:_(s32) = G_AND [[COPY10]], [[C1]]
971    ; SI: [[COPY11:%[0-9]+]]:_(s32) = COPY [[ASHR1]](s32)
972    ; SI: [[AND5:%[0-9]+]]:_(s32) = G_AND [[COPY11]], [[C1]]
973    ; SI: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND5]], [[C]](s32)
974    ; SI: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND4]], [[SHL]]
975    ; SI: [[BITCAST4:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
976    ; SI: [[COPY12:%[0-9]+]]:_(s32) = COPY [[ASHR2]](s32)
977    ; SI: [[AND6:%[0-9]+]]:_(s32) = G_AND [[COPY12]], [[C1]]
978    ; SI: [[COPY13:%[0-9]+]]:_(s32) = COPY [[ASHR3]](s32)
979    ; SI: [[AND7:%[0-9]+]]:_(s32) = G_AND [[COPY13]], [[C1]]
980    ; SI: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND7]], [[C]](s32)
981    ; SI: [[OR1:%[0-9]+]]:_(s32) = G_OR [[AND6]], [[SHL1]]
982    ; SI: [[BITCAST5:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32)
983    ; SI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BITCAST4]](<2 x s16>), [[BITCAST5]](<2 x s16>)
984    ; SI: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>)
985    ; VI-LABEL: name: test_ashr_v4s16_v4s16
986    ; VI: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1
987    ; VI: [[COPY1:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr2_vgpr3
988    ; VI: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>)
989    ; VI: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>)
990    ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32)
991    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
992    ; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
993    ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
994    ; VI: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>)
995    ; VI: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32)
996    ; VI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
997    ; VI: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32)
998    ; VI: [[UV2:%[0-9]+]]:_(<2 x s16>), [[UV3:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY1]](<4 x s16>)
999    ; VI: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV2]](<2 x s16>)
1000    ; VI: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST2]](s32)
1001    ; VI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32)
1002    ; VI: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR2]](s32)
1003    ; VI: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>)
1004    ; VI: [[TRUNC6:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST3]](s32)
1005    ; VI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32)
1006    ; VI: [[TRUNC7:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR3]](s32)
1007    ; VI: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC]], [[TRUNC4]](s16)
1008    ; VI: [[ASHR1:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC1]], [[TRUNC5]](s16)
1009    ; VI: [[ASHR2:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC2]], [[TRUNC6]](s16)
1010    ; VI: [[ASHR3:%[0-9]+]]:_(s16) = G_ASHR [[TRUNC3]], [[TRUNC7]](s16)
1011    ; VI: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR]](s16)
1012    ; VI: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR1]](s16)
1013    ; VI: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32)
1014    ; VI: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL]]
1015    ; VI: [[BITCAST4:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
1016    ; VI: [[ZEXT2:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR2]](s16)
1017    ; VI: [[ZEXT3:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR3]](s16)
1018    ; VI: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[ZEXT3]], [[C]](s32)
1019    ; VI: [[OR1:%[0-9]+]]:_(s32) = G_OR [[ZEXT2]], [[SHL1]]
1020    ; VI: [[BITCAST5:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32)
1021    ; VI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BITCAST4]](<2 x s16>), [[BITCAST5]](<2 x s16>)
1022    ; VI: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>)
1023    ; GFX9-LABEL: name: test_ashr_v4s16_v4s16
1024    ; GFX9: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1
1025    ; GFX9: [[COPY1:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr2_vgpr3
1026    ; GFX9: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>)
1027    ; GFX9: [[UV2:%[0-9]+]]:_(<2 x s16>), [[UV3:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY1]](<4 x s16>)
1028    ; GFX9: [[ASHR:%[0-9]+]]:_(<2 x s16>) = G_ASHR [[UV]], [[UV2]](<2 x s16>)
1029    ; GFX9: [[ASHR1:%[0-9]+]]:_(<2 x s16>) = G_ASHR [[UV1]], [[UV3]](<2 x s16>)
1030    ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[ASHR]](<2 x s16>), [[ASHR1]](<2 x s16>)
1031    ; GFX9: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>)
1032    %0:_(<4 x s16>) = COPY $vgpr0_vgpr1
1033    %1:_(<4 x s16>) = COPY $vgpr2_vgpr3
1034    %2:_(<4 x s16>) = G_ASHR %0, %1
1035    $vgpr0_vgpr1 = COPY %2
1036...
1037
1038---
1039name: test_ashr_s128_s128
1040body: |
1041  bb.0:
1042    liveins: $vgpr0_vgpr1_vgpr2_vgpr3, $vgpr4
1043
1044    ; SI-LABEL: name: test_ashr_s128_s128
1045    ; SI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1046    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr4
1047    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1048    ; SI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
1049    ; SI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 64
1050    ; SI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1051    ; SI: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C2]]
1052    ; SI: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[COPY1]]
1053    ; SI: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C2]]
1054    ; SI: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C]]
1055    ; SI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[COPY1]](s32)
1056    ; SI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[COPY1]](s32)
1057    ; SI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[SUB1]](s32)
1058    ; SI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1059    ; SI: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1060    ; SI: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C3]](s32)
1061    ; SI: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[SUB]](s32)
1062    ; SI: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR]], [[ASHR2]]
1063    ; SI: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV]], [[SELECT]]
1064    ; SI: [[SELECT2:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[ASHR]], [[ASHR1]]
1065    ; SI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT1]](s64), [[SELECT2]](s64)
1066    ; SI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1067    ; VI-LABEL: name: test_ashr_s128_s128
1068    ; VI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1069    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr4
1070    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1071    ; VI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
1072    ; VI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 64
1073    ; VI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1074    ; VI: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C2]]
1075    ; VI: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[COPY1]]
1076    ; VI: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C2]]
1077    ; VI: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C]]
1078    ; VI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[COPY1]](s32)
1079    ; VI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[COPY1]](s32)
1080    ; VI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[SUB1]](s32)
1081    ; VI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1082    ; VI: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1083    ; VI: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C3]](s32)
1084    ; VI: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[SUB]](s32)
1085    ; VI: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR]], [[ASHR2]]
1086    ; VI: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV]], [[SELECT]]
1087    ; VI: [[SELECT2:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[ASHR]], [[ASHR1]]
1088    ; VI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT1]](s64), [[SELECT2]](s64)
1089    ; VI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1090    ; GFX9-LABEL: name: test_ashr_s128_s128
1091    ; GFX9: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1092    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr4
1093    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1094    ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
1095    ; GFX9: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 64
1096    ; GFX9: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1097    ; GFX9: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C2]]
1098    ; GFX9: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[COPY1]]
1099    ; GFX9: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C2]]
1100    ; GFX9: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C]]
1101    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[COPY1]](s32)
1102    ; GFX9: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[COPY1]](s32)
1103    ; GFX9: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[SUB1]](s32)
1104    ; GFX9: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1105    ; GFX9: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1106    ; GFX9: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C3]](s32)
1107    ; GFX9: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[SUB]](s32)
1108    ; GFX9: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR]], [[ASHR2]]
1109    ; GFX9: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV]], [[SELECT]]
1110    ; GFX9: [[SELECT2:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[ASHR]], [[ASHR1]]
1111    ; GFX9: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT1]](s64), [[SELECT2]](s64)
1112    ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1113    %0:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1114    %1:_(s32) = COPY $vgpr4
1115    %2:_(s128) = G_ZEXT %1
1116    %3:_(s128) = G_ASHR %0, %2
1117    $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %3
1118...
1119
1120---
1121name: test_ashr_s128_s132
1122body: |
1123  bb.0:
1124    liveins: $vgpr0_vgpr1_vgpr2_vgpr3, $vgpr4
1125
1126    ; SI-LABEL: name: test_ashr_s128_s132
1127    ; SI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1128    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr4
1129    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 64
1130    ; SI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1131    ; SI: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C]]
1132    ; SI: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C]], [[COPY1]]
1133    ; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1134    ; SI: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C]]
1135    ; SI: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C1]]
1136    ; SI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[COPY1]](s32)
1137    ; SI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[COPY1]](s32)
1138    ; SI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[SUB1]](s32)
1139    ; SI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1140    ; SI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1141    ; SI: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C2]](s32)
1142    ; SI: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[SUB]](s32)
1143    ; SI: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR]], [[ASHR2]]
1144    ; SI: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV]], [[SELECT]]
1145    ; SI: [[SELECT2:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[ASHR]], [[ASHR1]]
1146    ; SI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT1]](s64), [[SELECT2]](s64)
1147    ; SI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1148    ; VI-LABEL: name: test_ashr_s128_s132
1149    ; VI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1150    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr4
1151    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 64
1152    ; VI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1153    ; VI: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C]]
1154    ; VI: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C]], [[COPY1]]
1155    ; VI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1156    ; VI: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C]]
1157    ; VI: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C1]]
1158    ; VI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[COPY1]](s32)
1159    ; VI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[COPY1]](s32)
1160    ; VI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[SUB1]](s32)
1161    ; VI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1162    ; VI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1163    ; VI: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C2]](s32)
1164    ; VI: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[SUB]](s32)
1165    ; VI: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR]], [[ASHR2]]
1166    ; VI: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV]], [[SELECT]]
1167    ; VI: [[SELECT2:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[ASHR]], [[ASHR1]]
1168    ; VI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT1]](s64), [[SELECT2]](s64)
1169    ; VI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1170    ; GFX9-LABEL: name: test_ashr_s128_s132
1171    ; GFX9: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1172    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr4
1173    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 64
1174    ; GFX9: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1175    ; GFX9: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C]]
1176    ; GFX9: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C]], [[COPY1]]
1177    ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1178    ; GFX9: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C]]
1179    ; GFX9: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C1]]
1180    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[COPY1]](s32)
1181    ; GFX9: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[COPY1]](s32)
1182    ; GFX9: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[SUB1]](s32)
1183    ; GFX9: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1184    ; GFX9: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1185    ; GFX9: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C2]](s32)
1186    ; GFX9: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[SUB]](s32)
1187    ; GFX9: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR]], [[ASHR2]]
1188    ; GFX9: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV]], [[SELECT]]
1189    ; GFX9: [[SELECT2:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[ASHR]], [[ASHR1]]
1190    ; GFX9: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT1]](s64), [[SELECT2]](s64)
1191    ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1192    %0:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1193    %1:_(s32) = COPY $vgpr4
1194    %2:_(s128) = G_ASHR %0, %1
1195    $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %2
1196...
1197
1198---
1199name: test_ashr_s128_s32_0
1200body: |
1201  bb.0:
1202    liveins: $vgpr0_vgpr1_vgpr2_vgpr3, $vgpr4
1203
1204    ; SI-LABEL: name: test_ashr_s128_s32_0
1205    ; SI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1206    ; SI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1207    ; SI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[UV]](s64), [[UV1]](s64)
1208    ; SI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1209    ; VI-LABEL: name: test_ashr_s128_s32_0
1210    ; VI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1211    ; VI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1212    ; VI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[UV]](s64), [[UV1]](s64)
1213    ; VI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1214    ; GFX9-LABEL: name: test_ashr_s128_s32_0
1215    ; GFX9: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1216    ; GFX9: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1217    ; GFX9: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[UV]](s64), [[UV1]](s64)
1218    ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1219    %0:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1220    %1:_(s32) = G_CONSTANT i32 0
1221    %3:_(s128) = G_ASHR %0, %1
1222    $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %3
1223...
1224
1225
1226---
1227name: test_ashr_s128_s32_23
1228body: |
1229  bb.0:
1230    liveins: $vgpr0_vgpr1_vgpr2_vgpr3, $vgpr4
1231
1232    ; SI-LABEL: name: test_ashr_s128_s32_23
1233    ; SI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1234    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 23
1235    ; SI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1236    ; SI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[C]](s32)
1237    ; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 41
1238    ; SI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[C1]](s32)
1239    ; SI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1240    ; SI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1241    ; SI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[OR]](s64), [[ASHR]](s64)
1242    ; SI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1243    ; VI-LABEL: name: test_ashr_s128_s32_23
1244    ; VI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1245    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 23
1246    ; VI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1247    ; VI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[C]](s32)
1248    ; VI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 41
1249    ; VI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[C1]](s32)
1250    ; VI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1251    ; VI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1252    ; VI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[OR]](s64), [[ASHR]](s64)
1253    ; VI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1254    ; GFX9-LABEL: name: test_ashr_s128_s32_23
1255    ; GFX9: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1256    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 23
1257    ; GFX9: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1258    ; GFX9: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[C]](s32)
1259    ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 41
1260    ; GFX9: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[C1]](s32)
1261    ; GFX9: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1262    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1263    ; GFX9: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[OR]](s64), [[ASHR]](s64)
1264    ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1265    %0:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1266    %1:_(s32) = G_CONSTANT i32 23
1267    %3:_(s128) = G_ASHR %0, %1
1268    $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %3
1269...
1270
1271---
1272name: test_ashr_s128_s32_31
1273body: |
1274  bb.0:
1275    liveins: $vgpr0_vgpr1_vgpr2_vgpr3, $vgpr4
1276
1277    ; SI-LABEL: name: test_ashr_s128_s32_31
1278    ; SI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1279    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 31
1280    ; SI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1281    ; SI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[C]](s32)
1282    ; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 33
1283    ; SI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[C1]](s32)
1284    ; SI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1285    ; SI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1286    ; SI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[OR]](s64), [[ASHR]](s64)
1287    ; SI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1288    ; VI-LABEL: name: test_ashr_s128_s32_31
1289    ; VI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1290    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 31
1291    ; VI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1292    ; VI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[C]](s32)
1293    ; VI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 33
1294    ; VI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[C1]](s32)
1295    ; VI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1296    ; VI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1297    ; VI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[OR]](s64), [[ASHR]](s64)
1298    ; VI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1299    ; GFX9-LABEL: name: test_ashr_s128_s32_31
1300    ; GFX9: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1301    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 31
1302    ; GFX9: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1303    ; GFX9: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[C]](s32)
1304    ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 33
1305    ; GFX9: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[C1]](s32)
1306    ; GFX9: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1307    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1308    ; GFX9: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[OR]](s64), [[ASHR]](s64)
1309    ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1310    %0:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1311    %1:_(s32) = G_CONSTANT i32 31
1312    %3:_(s128) = G_ASHR %0, %1
1313    $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %3
1314...
1315
1316---
1317name: test_ashr_s128_s32_32
1318body: |
1319  bb.0:
1320    liveins: $vgpr0_vgpr1_vgpr2_vgpr3, $vgpr4
1321
1322    ; SI-LABEL: name: test_ashr_s128_s32_32
1323    ; SI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1324    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 32
1325    ; SI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1326    ; SI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[C]](s32)
1327    ; SI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[C]](s32)
1328    ; SI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1329    ; SI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1330    ; SI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[OR]](s64), [[ASHR]](s64)
1331    ; SI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1332    ; VI-LABEL: name: test_ashr_s128_s32_32
1333    ; VI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1334    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 32
1335    ; VI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1336    ; VI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[C]](s32)
1337    ; VI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[C]](s32)
1338    ; VI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1339    ; VI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1340    ; VI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[OR]](s64), [[ASHR]](s64)
1341    ; VI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1342    ; GFX9-LABEL: name: test_ashr_s128_s32_32
1343    ; GFX9: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1344    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 32
1345    ; GFX9: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1346    ; GFX9: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[C]](s32)
1347    ; GFX9: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[C]](s32)
1348    ; GFX9: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1349    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1350    ; GFX9: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[OR]](s64), [[ASHR]](s64)
1351    ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1352    %0:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1353    %1:_(s32) = G_CONSTANT i32 32
1354    %3:_(s128) = G_ASHR %0, %1
1355    $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %3
1356...
1357
1358---
1359name: test_ashr_s128_s32_33
1360body: |
1361  bb.0:
1362    liveins: $vgpr0_vgpr1_vgpr2_vgpr3, $vgpr4
1363
1364    ; SI-LABEL: name: test_ashr_s128_s32_33
1365    ; SI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1366    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 33
1367    ; SI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1368    ; SI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[C]](s32)
1369    ; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 31
1370    ; SI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[C1]](s32)
1371    ; SI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1372    ; SI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1373    ; SI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[OR]](s64), [[ASHR]](s64)
1374    ; SI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1375    ; VI-LABEL: name: test_ashr_s128_s32_33
1376    ; VI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1377    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 33
1378    ; VI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1379    ; VI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[C]](s32)
1380    ; VI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 31
1381    ; VI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[C1]](s32)
1382    ; VI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1383    ; VI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1384    ; VI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[OR]](s64), [[ASHR]](s64)
1385    ; VI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1386    ; GFX9-LABEL: name: test_ashr_s128_s32_33
1387    ; GFX9: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1388    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 33
1389    ; GFX9: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1390    ; GFX9: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV]], [[C]](s32)
1391    ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 31
1392    ; GFX9: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV1]], [[C1]](s32)
1393    ; GFX9: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1394    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1395    ; GFX9: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[OR]](s64), [[ASHR]](s64)
1396    ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1397    %0:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1398    %1:_(s32) = G_CONSTANT i32 33
1399    %3:_(s128) = G_ASHR %0, %1
1400    $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %3
1401...
1402
1403---
1404name: test_ashr_s128_s32_127
1405body: |
1406  bb.0:
1407    liveins: $vgpr0_vgpr1_vgpr2_vgpr3, $vgpr4
1408
1409    ; SI-LABEL: name: test_ashr_s128_s32_127
1410    ; SI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1411    ; SI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1412    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1413    ; SI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1414    ; SI: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1415    ; SI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[ASHR]](s64), [[ASHR1]](s64)
1416    ; SI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1417    ; VI-LABEL: name: test_ashr_s128_s32_127
1418    ; VI: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1419    ; VI: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1420    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1421    ; VI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1422    ; VI: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1423    ; VI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[ASHR]](s64), [[ASHR1]](s64)
1424    ; VI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1425    ; GFX9-LABEL: name: test_ashr_s128_s32_127
1426    ; GFX9: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1427    ; GFX9: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](s128)
1428    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1429    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1430    ; GFX9: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV1]], [[C]](s32)
1431    ; GFX9: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[ASHR]](s64), [[ASHR1]](s64)
1432    ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128)
1433    %0:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1434    %1:_(s32) = G_CONSTANT i32 127
1435    %3:_(s128) = G_ASHR %0, %1
1436    $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %3
1437...
1438
1439---
1440name: test_ashr_s256_s256
1441body: |
1442  bb.0:
1443    liveins: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7, $vgpr8
1444
1445    ; SI-LABEL: name: test_ashr_s256_s256
1446    ; SI: [[COPY:%[0-9]+]]:_(s256) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
1447    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr8
1448    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1449    ; SI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
1450    ; SI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 128
1451    ; SI: [[UV:%[0-9]+]]:_(s128), [[UV1:%[0-9]+]]:_(s128) = G_UNMERGE_VALUES [[COPY]](s256)
1452    ; SI: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C2]]
1453    ; SI: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[COPY1]]
1454    ; SI: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C2]]
1455    ; SI: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C]]
1456    ; SI: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 64
1457    ; SI: [[UV2:%[0-9]+]]:_(s64), [[UV3:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1458    ; SI: [[SUB2:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C3]]
1459    ; SI: [[SUB3:%[0-9]+]]:_(s32) = G_SUB [[C3]], [[COPY1]]
1460    ; SI: [[ICMP2:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C3]]
1461    ; SI: [[ICMP3:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C]]
1462    ; SI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV3]], [[COPY1]](s32)
1463    ; SI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV2]], [[COPY1]](s32)
1464    ; SI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV3]], [[SUB3]](s32)
1465    ; SI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1466    ; SI: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1467    ; SI: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV3]], [[C4]](s32)
1468    ; SI: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV3]], [[SUB2]](s32)
1469    ; SI: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP2]](s1), [[OR]], [[ASHR2]]
1470    ; SI: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP3]](s1), [[UV2]], [[SELECT]]
1471    ; SI: [[SELECT2:%[0-9]+]]:_(s64) = G_SELECT [[ICMP2]](s1), [[ASHR]], [[ASHR1]]
1472    ; SI: [[UV4:%[0-9]+]]:_(s64), [[UV5:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV]](s128)
1473    ; SI: [[SUB4:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C3]]
1474    ; SI: [[SUB5:%[0-9]+]]:_(s32) = G_SUB [[C3]], [[COPY1]]
1475    ; SI: [[ICMP4:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C3]]
1476    ; SI: [[ICMP5:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C]]
1477    ; SI: [[LSHR1:%[0-9]+]]:_(s64) = G_LSHR [[UV5]], [[COPY1]](s32)
1478    ; SI: [[LSHR2:%[0-9]+]]:_(s64) = G_LSHR [[UV4]], [[COPY1]](s32)
1479    ; SI: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[UV5]], [[SUB5]](s32)
1480    ; SI: [[OR1:%[0-9]+]]:_(s64) = G_OR [[LSHR2]], [[SHL1]]
1481    ; SI: [[LSHR3:%[0-9]+]]:_(s64) = G_LSHR [[UV5]], [[SUB4]](s32)
1482    ; SI: [[SELECT3:%[0-9]+]]:_(s64) = G_SELECT [[ICMP4]](s1), [[OR1]], [[LSHR3]]
1483    ; SI: [[SELECT4:%[0-9]+]]:_(s64) = G_SELECT [[ICMP5]](s1), [[UV4]], [[SELECT3]]
1484    ; SI: [[SELECT5:%[0-9]+]]:_(s64) = G_SELECT [[ICMP4]](s1), [[LSHR1]], [[C1]]
1485    ; SI: [[UV6:%[0-9]+]]:_(s64), [[UV7:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1486    ; SI: [[SUB6:%[0-9]+]]:_(s32) = G_SUB [[SUB1]], [[C3]]
1487    ; SI: [[SUB7:%[0-9]+]]:_(s32) = G_SUB [[C3]], [[SUB1]]
1488    ; SI: [[ICMP6:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[SUB1]](s32), [[C3]]
1489    ; SI: [[ICMP7:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[SUB1]](s32), [[C]]
1490    ; SI: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[UV6]], [[SUB1]](s32)
1491    ; SI: [[LSHR4:%[0-9]+]]:_(s64) = G_LSHR [[UV6]], [[SUB7]](s32)
1492    ; SI: [[SHL3:%[0-9]+]]:_(s64) = G_SHL [[UV7]], [[SUB1]](s32)
1493    ; SI: [[OR2:%[0-9]+]]:_(s64) = G_OR [[LSHR4]], [[SHL3]]
1494    ; SI: [[SHL4:%[0-9]+]]:_(s64) = G_SHL [[UV6]], [[SUB6]](s32)
1495    ; SI: [[SELECT6:%[0-9]+]]:_(s64) = G_SELECT [[ICMP6]](s1), [[SHL2]], [[C1]]
1496    ; SI: [[SELECT7:%[0-9]+]]:_(s64) = G_SELECT [[ICMP6]](s1), [[OR2]], [[SHL4]]
1497    ; SI: [[SELECT8:%[0-9]+]]:_(s64) = G_SELECT [[ICMP7]](s1), [[UV7]], [[SELECT7]]
1498    ; SI: [[OR3:%[0-9]+]]:_(s64) = G_OR [[SELECT4]], [[SELECT6]]
1499    ; SI: [[OR4:%[0-9]+]]:_(s64) = G_OR [[SELECT5]], [[SELECT8]]
1500    ; SI: [[UV8:%[0-9]+]]:_(s64), [[UV9:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1501    ; SI: [[ASHR3:%[0-9]+]]:_(s64) = G_ASHR [[UV9]], [[C4]](s32)
1502    ; SI: [[ASHR4:%[0-9]+]]:_(s64) = G_ASHR [[UV9]], [[C4]](s32)
1503    ; SI: [[UV10:%[0-9]+]]:_(s64), [[UV11:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1504    ; SI: [[SUB8:%[0-9]+]]:_(s32) = G_SUB [[SUB]], [[C3]]
1505    ; SI: [[SUB9:%[0-9]+]]:_(s32) = G_SUB [[C3]], [[SUB]]
1506    ; SI: [[ICMP8:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[SUB]](s32), [[C3]]
1507    ; SI: [[ICMP9:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[SUB]](s32), [[C]]
1508    ; SI: [[ASHR5:%[0-9]+]]:_(s64) = G_ASHR [[UV11]], [[SUB]](s32)
1509    ; SI: [[LSHR5:%[0-9]+]]:_(s64) = G_LSHR [[UV10]], [[SUB]](s32)
1510    ; SI: [[SHL5:%[0-9]+]]:_(s64) = G_SHL [[UV11]], [[SUB9]](s32)
1511    ; SI: [[OR5:%[0-9]+]]:_(s64) = G_OR [[LSHR5]], [[SHL5]]
1512    ; SI: [[ASHR6:%[0-9]+]]:_(s64) = G_ASHR [[UV11]], [[C4]](s32)
1513    ; SI: [[ASHR7:%[0-9]+]]:_(s64) = G_ASHR [[UV11]], [[SUB8]](s32)
1514    ; SI: [[SELECT9:%[0-9]+]]:_(s64) = G_SELECT [[ICMP8]](s1), [[OR5]], [[ASHR7]]
1515    ; SI: [[SELECT10:%[0-9]+]]:_(s64) = G_SELECT [[ICMP9]](s1), [[UV10]], [[SELECT9]]
1516    ; SI: [[SELECT11:%[0-9]+]]:_(s64) = G_SELECT [[ICMP8]](s1), [[ASHR5]], [[ASHR6]]
1517    ; SI: [[SELECT12:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR3]], [[SELECT10]]
1518    ; SI: [[SELECT13:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR4]], [[SELECT11]]
1519    ; SI: [[UV12:%[0-9]+]]:_(s64), [[UV13:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV]](s128)
1520    ; SI: [[SELECT14:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV12]], [[SELECT12]]
1521    ; SI: [[SELECT15:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV13]], [[SELECT13]]
1522    ; SI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT14]](s64), [[SELECT15]](s64)
1523    ; SI: [[SELECT16:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[SELECT1]], [[ASHR3]]
1524    ; SI: [[SELECT17:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[SELECT2]], [[ASHR4]]
1525    ; SI: [[MV1:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT16]](s64), [[SELECT17]](s64)
1526    ; SI: [[MV2:%[0-9]+]]:_(s256) = G_MERGE_VALUES [[MV]](s128), [[MV1]](s128)
1527    ; SI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[MV2]](s256)
1528    ; VI-LABEL: name: test_ashr_s256_s256
1529    ; VI: [[COPY:%[0-9]+]]:_(s256) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
1530    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr8
1531    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1532    ; VI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
1533    ; VI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 128
1534    ; VI: [[UV:%[0-9]+]]:_(s128), [[UV1:%[0-9]+]]:_(s128) = G_UNMERGE_VALUES [[COPY]](s256)
1535    ; VI: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C2]]
1536    ; VI: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[COPY1]]
1537    ; VI: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C2]]
1538    ; VI: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C]]
1539    ; VI: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 64
1540    ; VI: [[UV2:%[0-9]+]]:_(s64), [[UV3:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1541    ; VI: [[SUB2:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C3]]
1542    ; VI: [[SUB3:%[0-9]+]]:_(s32) = G_SUB [[C3]], [[COPY1]]
1543    ; VI: [[ICMP2:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C3]]
1544    ; VI: [[ICMP3:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C]]
1545    ; VI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV3]], [[COPY1]](s32)
1546    ; VI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV2]], [[COPY1]](s32)
1547    ; VI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV3]], [[SUB3]](s32)
1548    ; VI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1549    ; VI: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1550    ; VI: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV3]], [[C4]](s32)
1551    ; VI: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV3]], [[SUB2]](s32)
1552    ; VI: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP2]](s1), [[OR]], [[ASHR2]]
1553    ; VI: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP3]](s1), [[UV2]], [[SELECT]]
1554    ; VI: [[SELECT2:%[0-9]+]]:_(s64) = G_SELECT [[ICMP2]](s1), [[ASHR]], [[ASHR1]]
1555    ; VI: [[UV4:%[0-9]+]]:_(s64), [[UV5:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV]](s128)
1556    ; VI: [[SUB4:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C3]]
1557    ; VI: [[SUB5:%[0-9]+]]:_(s32) = G_SUB [[C3]], [[COPY1]]
1558    ; VI: [[ICMP4:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C3]]
1559    ; VI: [[ICMP5:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C]]
1560    ; VI: [[LSHR1:%[0-9]+]]:_(s64) = G_LSHR [[UV5]], [[COPY1]](s32)
1561    ; VI: [[LSHR2:%[0-9]+]]:_(s64) = G_LSHR [[UV4]], [[COPY1]](s32)
1562    ; VI: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[UV5]], [[SUB5]](s32)
1563    ; VI: [[OR1:%[0-9]+]]:_(s64) = G_OR [[LSHR2]], [[SHL1]]
1564    ; VI: [[LSHR3:%[0-9]+]]:_(s64) = G_LSHR [[UV5]], [[SUB4]](s32)
1565    ; VI: [[SELECT3:%[0-9]+]]:_(s64) = G_SELECT [[ICMP4]](s1), [[OR1]], [[LSHR3]]
1566    ; VI: [[SELECT4:%[0-9]+]]:_(s64) = G_SELECT [[ICMP5]](s1), [[UV4]], [[SELECT3]]
1567    ; VI: [[SELECT5:%[0-9]+]]:_(s64) = G_SELECT [[ICMP4]](s1), [[LSHR1]], [[C1]]
1568    ; VI: [[UV6:%[0-9]+]]:_(s64), [[UV7:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1569    ; VI: [[SUB6:%[0-9]+]]:_(s32) = G_SUB [[SUB1]], [[C3]]
1570    ; VI: [[SUB7:%[0-9]+]]:_(s32) = G_SUB [[C3]], [[SUB1]]
1571    ; VI: [[ICMP6:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[SUB1]](s32), [[C3]]
1572    ; VI: [[ICMP7:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[SUB1]](s32), [[C]]
1573    ; VI: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[UV6]], [[SUB1]](s32)
1574    ; VI: [[LSHR4:%[0-9]+]]:_(s64) = G_LSHR [[UV6]], [[SUB7]](s32)
1575    ; VI: [[SHL3:%[0-9]+]]:_(s64) = G_SHL [[UV7]], [[SUB1]](s32)
1576    ; VI: [[OR2:%[0-9]+]]:_(s64) = G_OR [[LSHR4]], [[SHL3]]
1577    ; VI: [[SHL4:%[0-9]+]]:_(s64) = G_SHL [[UV6]], [[SUB6]](s32)
1578    ; VI: [[SELECT6:%[0-9]+]]:_(s64) = G_SELECT [[ICMP6]](s1), [[SHL2]], [[C1]]
1579    ; VI: [[SELECT7:%[0-9]+]]:_(s64) = G_SELECT [[ICMP6]](s1), [[OR2]], [[SHL4]]
1580    ; VI: [[SELECT8:%[0-9]+]]:_(s64) = G_SELECT [[ICMP7]](s1), [[UV7]], [[SELECT7]]
1581    ; VI: [[OR3:%[0-9]+]]:_(s64) = G_OR [[SELECT4]], [[SELECT6]]
1582    ; VI: [[OR4:%[0-9]+]]:_(s64) = G_OR [[SELECT5]], [[SELECT8]]
1583    ; VI: [[UV8:%[0-9]+]]:_(s64), [[UV9:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1584    ; VI: [[ASHR3:%[0-9]+]]:_(s64) = G_ASHR [[UV9]], [[C4]](s32)
1585    ; VI: [[ASHR4:%[0-9]+]]:_(s64) = G_ASHR [[UV9]], [[C4]](s32)
1586    ; VI: [[UV10:%[0-9]+]]:_(s64), [[UV11:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1587    ; VI: [[SUB8:%[0-9]+]]:_(s32) = G_SUB [[SUB]], [[C3]]
1588    ; VI: [[SUB9:%[0-9]+]]:_(s32) = G_SUB [[C3]], [[SUB]]
1589    ; VI: [[ICMP8:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[SUB]](s32), [[C3]]
1590    ; VI: [[ICMP9:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[SUB]](s32), [[C]]
1591    ; VI: [[ASHR5:%[0-9]+]]:_(s64) = G_ASHR [[UV11]], [[SUB]](s32)
1592    ; VI: [[LSHR5:%[0-9]+]]:_(s64) = G_LSHR [[UV10]], [[SUB]](s32)
1593    ; VI: [[SHL5:%[0-9]+]]:_(s64) = G_SHL [[UV11]], [[SUB9]](s32)
1594    ; VI: [[OR5:%[0-9]+]]:_(s64) = G_OR [[LSHR5]], [[SHL5]]
1595    ; VI: [[ASHR6:%[0-9]+]]:_(s64) = G_ASHR [[UV11]], [[C4]](s32)
1596    ; VI: [[ASHR7:%[0-9]+]]:_(s64) = G_ASHR [[UV11]], [[SUB8]](s32)
1597    ; VI: [[SELECT9:%[0-9]+]]:_(s64) = G_SELECT [[ICMP8]](s1), [[OR5]], [[ASHR7]]
1598    ; VI: [[SELECT10:%[0-9]+]]:_(s64) = G_SELECT [[ICMP9]](s1), [[UV10]], [[SELECT9]]
1599    ; VI: [[SELECT11:%[0-9]+]]:_(s64) = G_SELECT [[ICMP8]](s1), [[ASHR5]], [[ASHR6]]
1600    ; VI: [[SELECT12:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR3]], [[SELECT10]]
1601    ; VI: [[SELECT13:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR4]], [[SELECT11]]
1602    ; VI: [[UV12:%[0-9]+]]:_(s64), [[UV13:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV]](s128)
1603    ; VI: [[SELECT14:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV12]], [[SELECT12]]
1604    ; VI: [[SELECT15:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV13]], [[SELECT13]]
1605    ; VI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT14]](s64), [[SELECT15]](s64)
1606    ; VI: [[SELECT16:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[SELECT1]], [[ASHR3]]
1607    ; VI: [[SELECT17:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[SELECT2]], [[ASHR4]]
1608    ; VI: [[MV1:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT16]](s64), [[SELECT17]](s64)
1609    ; VI: [[MV2:%[0-9]+]]:_(s256) = G_MERGE_VALUES [[MV]](s128), [[MV1]](s128)
1610    ; VI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[MV2]](s256)
1611    ; GFX9-LABEL: name: test_ashr_s256_s256
1612    ; GFX9: [[COPY:%[0-9]+]]:_(s256) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
1613    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr8
1614    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1615    ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
1616    ; GFX9: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 128
1617    ; GFX9: [[UV:%[0-9]+]]:_(s128), [[UV1:%[0-9]+]]:_(s128) = G_UNMERGE_VALUES [[COPY]](s256)
1618    ; GFX9: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C2]]
1619    ; GFX9: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[COPY1]]
1620    ; GFX9: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C2]]
1621    ; GFX9: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C]]
1622    ; GFX9: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 64
1623    ; GFX9: [[UV2:%[0-9]+]]:_(s64), [[UV3:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1624    ; GFX9: [[SUB2:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C3]]
1625    ; GFX9: [[SUB3:%[0-9]+]]:_(s32) = G_SUB [[C3]], [[COPY1]]
1626    ; GFX9: [[ICMP2:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C3]]
1627    ; GFX9: [[ICMP3:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C]]
1628    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV3]], [[COPY1]](s32)
1629    ; GFX9: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV2]], [[COPY1]](s32)
1630    ; GFX9: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV3]], [[SUB3]](s32)
1631    ; GFX9: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1632    ; GFX9: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1633    ; GFX9: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV3]], [[C4]](s32)
1634    ; GFX9: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV3]], [[SUB2]](s32)
1635    ; GFX9: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP2]](s1), [[OR]], [[ASHR2]]
1636    ; GFX9: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP3]](s1), [[UV2]], [[SELECT]]
1637    ; GFX9: [[SELECT2:%[0-9]+]]:_(s64) = G_SELECT [[ICMP2]](s1), [[ASHR]], [[ASHR1]]
1638    ; GFX9: [[UV4:%[0-9]+]]:_(s64), [[UV5:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV]](s128)
1639    ; GFX9: [[SUB4:%[0-9]+]]:_(s32) = G_SUB [[COPY1]], [[C3]]
1640    ; GFX9: [[SUB5:%[0-9]+]]:_(s32) = G_SUB [[C3]], [[COPY1]]
1641    ; GFX9: [[ICMP4:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[COPY1]](s32), [[C3]]
1642    ; GFX9: [[ICMP5:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY1]](s32), [[C]]
1643    ; GFX9: [[LSHR1:%[0-9]+]]:_(s64) = G_LSHR [[UV5]], [[COPY1]](s32)
1644    ; GFX9: [[LSHR2:%[0-9]+]]:_(s64) = G_LSHR [[UV4]], [[COPY1]](s32)
1645    ; GFX9: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[UV5]], [[SUB5]](s32)
1646    ; GFX9: [[OR1:%[0-9]+]]:_(s64) = G_OR [[LSHR2]], [[SHL1]]
1647    ; GFX9: [[LSHR3:%[0-9]+]]:_(s64) = G_LSHR [[UV5]], [[SUB4]](s32)
1648    ; GFX9: [[SELECT3:%[0-9]+]]:_(s64) = G_SELECT [[ICMP4]](s1), [[OR1]], [[LSHR3]]
1649    ; GFX9: [[SELECT4:%[0-9]+]]:_(s64) = G_SELECT [[ICMP5]](s1), [[UV4]], [[SELECT3]]
1650    ; GFX9: [[SELECT5:%[0-9]+]]:_(s64) = G_SELECT [[ICMP4]](s1), [[LSHR1]], [[C1]]
1651    ; GFX9: [[UV6:%[0-9]+]]:_(s64), [[UV7:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1652    ; GFX9: [[SUB6:%[0-9]+]]:_(s32) = G_SUB [[SUB1]], [[C3]]
1653    ; GFX9: [[SUB7:%[0-9]+]]:_(s32) = G_SUB [[C3]], [[SUB1]]
1654    ; GFX9: [[ICMP6:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[SUB1]](s32), [[C3]]
1655    ; GFX9: [[ICMP7:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[SUB1]](s32), [[C]]
1656    ; GFX9: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[UV6]], [[SUB1]](s32)
1657    ; GFX9: [[LSHR4:%[0-9]+]]:_(s64) = G_LSHR [[UV6]], [[SUB7]](s32)
1658    ; GFX9: [[SHL3:%[0-9]+]]:_(s64) = G_SHL [[UV7]], [[SUB1]](s32)
1659    ; GFX9: [[OR2:%[0-9]+]]:_(s64) = G_OR [[LSHR4]], [[SHL3]]
1660    ; GFX9: [[SHL4:%[0-9]+]]:_(s64) = G_SHL [[UV6]], [[SUB6]](s32)
1661    ; GFX9: [[SELECT6:%[0-9]+]]:_(s64) = G_SELECT [[ICMP6]](s1), [[SHL2]], [[C1]]
1662    ; GFX9: [[SELECT7:%[0-9]+]]:_(s64) = G_SELECT [[ICMP6]](s1), [[OR2]], [[SHL4]]
1663    ; GFX9: [[SELECT8:%[0-9]+]]:_(s64) = G_SELECT [[ICMP7]](s1), [[UV7]], [[SELECT7]]
1664    ; GFX9: [[OR3:%[0-9]+]]:_(s64) = G_OR [[SELECT4]], [[SELECT6]]
1665    ; GFX9: [[OR4:%[0-9]+]]:_(s64) = G_OR [[SELECT5]], [[SELECT8]]
1666    ; GFX9: [[UV8:%[0-9]+]]:_(s64), [[UV9:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1667    ; GFX9: [[ASHR3:%[0-9]+]]:_(s64) = G_ASHR [[UV9]], [[C4]](s32)
1668    ; GFX9: [[ASHR4:%[0-9]+]]:_(s64) = G_ASHR [[UV9]], [[C4]](s32)
1669    ; GFX9: [[UV10:%[0-9]+]]:_(s64), [[UV11:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1670    ; GFX9: [[SUB8:%[0-9]+]]:_(s32) = G_SUB [[SUB]], [[C3]]
1671    ; GFX9: [[SUB9:%[0-9]+]]:_(s32) = G_SUB [[C3]], [[SUB]]
1672    ; GFX9: [[ICMP8:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[SUB]](s32), [[C3]]
1673    ; GFX9: [[ICMP9:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[SUB]](s32), [[C]]
1674    ; GFX9: [[ASHR5:%[0-9]+]]:_(s64) = G_ASHR [[UV11]], [[SUB]](s32)
1675    ; GFX9: [[LSHR5:%[0-9]+]]:_(s64) = G_LSHR [[UV10]], [[SUB]](s32)
1676    ; GFX9: [[SHL5:%[0-9]+]]:_(s64) = G_SHL [[UV11]], [[SUB9]](s32)
1677    ; GFX9: [[OR5:%[0-9]+]]:_(s64) = G_OR [[LSHR5]], [[SHL5]]
1678    ; GFX9: [[ASHR6:%[0-9]+]]:_(s64) = G_ASHR [[UV11]], [[C4]](s32)
1679    ; GFX9: [[ASHR7:%[0-9]+]]:_(s64) = G_ASHR [[UV11]], [[SUB8]](s32)
1680    ; GFX9: [[SELECT9:%[0-9]+]]:_(s64) = G_SELECT [[ICMP8]](s1), [[OR5]], [[ASHR7]]
1681    ; GFX9: [[SELECT10:%[0-9]+]]:_(s64) = G_SELECT [[ICMP9]](s1), [[UV10]], [[SELECT9]]
1682    ; GFX9: [[SELECT11:%[0-9]+]]:_(s64) = G_SELECT [[ICMP8]](s1), [[ASHR5]], [[ASHR6]]
1683    ; GFX9: [[SELECT12:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR3]], [[SELECT10]]
1684    ; GFX9: [[SELECT13:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR4]], [[SELECT11]]
1685    ; GFX9: [[UV12:%[0-9]+]]:_(s64), [[UV13:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV]](s128)
1686    ; GFX9: [[SELECT14:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV12]], [[SELECT12]]
1687    ; GFX9: [[SELECT15:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV13]], [[SELECT13]]
1688    ; GFX9: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT14]](s64), [[SELECT15]](s64)
1689    ; GFX9: [[SELECT16:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[SELECT1]], [[ASHR3]]
1690    ; GFX9: [[SELECT17:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[SELECT2]], [[ASHR4]]
1691    ; GFX9: [[MV1:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT16]](s64), [[SELECT17]](s64)
1692    ; GFX9: [[MV2:%[0-9]+]]:_(s256) = G_MERGE_VALUES [[MV]](s128), [[MV1]](s128)
1693    ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[MV2]](s256)
1694    %0:_(s256) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
1695    %1:_(s32) = COPY $vgpr8
1696    %2:_(s256) = G_ZEXT %1
1697    %3:_(s256) = G_ASHR %0, %2
1698    $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY %3
1699...
1700
1701---
1702name: test_ashr_v2s128_v2s32
1703body: |
1704  bb.0:
1705    liveins: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7, $vgpr4_vgpr5
1706
1707    ; SI-LABEL: name: test_ashr_v2s128_v2s32
1708    ; SI: [[COPY:%[0-9]+]]:_(<2 x s128>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
1709    ; SI: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr4_vgpr5
1710    ; SI: [[UV:%[0-9]+]]:_(s128), [[UV1:%[0-9]+]]:_(s128) = G_UNMERGE_VALUES [[COPY]](<2 x s128>)
1711    ; SI: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
1712    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 64
1713    ; SI: [[UV4:%[0-9]+]]:_(s64), [[UV5:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV]](s128)
1714    ; SI: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[UV2]], [[C]]
1715    ; SI: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C]], [[UV2]]
1716    ; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1717    ; SI: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[UV2]](s32), [[C]]
1718    ; SI: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[UV2]](s32), [[C1]]
1719    ; SI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV5]], [[UV2]](s32)
1720    ; SI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV4]], [[UV2]](s32)
1721    ; SI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV5]], [[SUB1]](s32)
1722    ; SI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1723    ; SI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1724    ; SI: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV5]], [[C2]](s32)
1725    ; SI: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV5]], [[SUB]](s32)
1726    ; SI: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR]], [[ASHR2]]
1727    ; SI: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV4]], [[SELECT]]
1728    ; SI: [[SELECT2:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[ASHR]], [[ASHR1]]
1729    ; SI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT1]](s64), [[SELECT2]](s64)
1730    ; SI: [[UV6:%[0-9]+]]:_(s64), [[UV7:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1731    ; SI: [[SUB2:%[0-9]+]]:_(s32) = G_SUB [[UV3]], [[C]]
1732    ; SI: [[SUB3:%[0-9]+]]:_(s32) = G_SUB [[C]], [[UV3]]
1733    ; SI: [[ICMP2:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[UV3]](s32), [[C]]
1734    ; SI: [[ICMP3:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[UV3]](s32), [[C1]]
1735    ; SI: [[ASHR3:%[0-9]+]]:_(s64) = G_ASHR [[UV7]], [[UV3]](s32)
1736    ; SI: [[LSHR1:%[0-9]+]]:_(s64) = G_LSHR [[UV6]], [[UV3]](s32)
1737    ; SI: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[UV7]], [[SUB3]](s32)
1738    ; SI: [[OR1:%[0-9]+]]:_(s64) = G_OR [[LSHR1]], [[SHL1]]
1739    ; SI: [[ASHR4:%[0-9]+]]:_(s64) = G_ASHR [[UV7]], [[C2]](s32)
1740    ; SI: [[ASHR5:%[0-9]+]]:_(s64) = G_ASHR [[UV7]], [[SUB2]](s32)
1741    ; SI: [[SELECT3:%[0-9]+]]:_(s64) = G_SELECT [[ICMP2]](s1), [[OR1]], [[ASHR5]]
1742    ; SI: [[SELECT4:%[0-9]+]]:_(s64) = G_SELECT [[ICMP3]](s1), [[UV6]], [[SELECT3]]
1743    ; SI: [[SELECT5:%[0-9]+]]:_(s64) = G_SELECT [[ICMP2]](s1), [[ASHR3]], [[ASHR4]]
1744    ; SI: [[MV1:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT4]](s64), [[SELECT5]](s64)
1745    ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s128>) = G_BUILD_VECTOR [[MV]](s128), [[MV1]](s128)
1746    ; SI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[BUILD_VECTOR]](<2 x s128>)
1747    ; VI-LABEL: name: test_ashr_v2s128_v2s32
1748    ; VI: [[COPY:%[0-9]+]]:_(<2 x s128>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
1749    ; VI: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr4_vgpr5
1750    ; VI: [[UV:%[0-9]+]]:_(s128), [[UV1:%[0-9]+]]:_(s128) = G_UNMERGE_VALUES [[COPY]](<2 x s128>)
1751    ; VI: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
1752    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 64
1753    ; VI: [[UV4:%[0-9]+]]:_(s64), [[UV5:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV]](s128)
1754    ; VI: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[UV2]], [[C]]
1755    ; VI: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C]], [[UV2]]
1756    ; VI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1757    ; VI: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[UV2]](s32), [[C]]
1758    ; VI: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[UV2]](s32), [[C1]]
1759    ; VI: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV5]], [[UV2]](s32)
1760    ; VI: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV4]], [[UV2]](s32)
1761    ; VI: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV5]], [[SUB1]](s32)
1762    ; VI: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1763    ; VI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1764    ; VI: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV5]], [[C2]](s32)
1765    ; VI: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV5]], [[SUB]](s32)
1766    ; VI: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR]], [[ASHR2]]
1767    ; VI: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV4]], [[SELECT]]
1768    ; VI: [[SELECT2:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[ASHR]], [[ASHR1]]
1769    ; VI: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT1]](s64), [[SELECT2]](s64)
1770    ; VI: [[UV6:%[0-9]+]]:_(s64), [[UV7:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1771    ; VI: [[SUB2:%[0-9]+]]:_(s32) = G_SUB [[UV3]], [[C]]
1772    ; VI: [[SUB3:%[0-9]+]]:_(s32) = G_SUB [[C]], [[UV3]]
1773    ; VI: [[ICMP2:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[UV3]](s32), [[C]]
1774    ; VI: [[ICMP3:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[UV3]](s32), [[C1]]
1775    ; VI: [[ASHR3:%[0-9]+]]:_(s64) = G_ASHR [[UV7]], [[UV3]](s32)
1776    ; VI: [[LSHR1:%[0-9]+]]:_(s64) = G_LSHR [[UV6]], [[UV3]](s32)
1777    ; VI: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[UV7]], [[SUB3]](s32)
1778    ; VI: [[OR1:%[0-9]+]]:_(s64) = G_OR [[LSHR1]], [[SHL1]]
1779    ; VI: [[ASHR4:%[0-9]+]]:_(s64) = G_ASHR [[UV7]], [[C2]](s32)
1780    ; VI: [[ASHR5:%[0-9]+]]:_(s64) = G_ASHR [[UV7]], [[SUB2]](s32)
1781    ; VI: [[SELECT3:%[0-9]+]]:_(s64) = G_SELECT [[ICMP2]](s1), [[OR1]], [[ASHR5]]
1782    ; VI: [[SELECT4:%[0-9]+]]:_(s64) = G_SELECT [[ICMP3]](s1), [[UV6]], [[SELECT3]]
1783    ; VI: [[SELECT5:%[0-9]+]]:_(s64) = G_SELECT [[ICMP2]](s1), [[ASHR3]], [[ASHR4]]
1784    ; VI: [[MV1:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT4]](s64), [[SELECT5]](s64)
1785    ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s128>) = G_BUILD_VECTOR [[MV]](s128), [[MV1]](s128)
1786    ; VI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[BUILD_VECTOR]](<2 x s128>)
1787    ; GFX9-LABEL: name: test_ashr_v2s128_v2s32
1788    ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s128>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
1789    ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr4_vgpr5
1790    ; GFX9: [[UV:%[0-9]+]]:_(s128), [[UV1:%[0-9]+]]:_(s128) = G_UNMERGE_VALUES [[COPY]](<2 x s128>)
1791    ; GFX9: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
1792    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 64
1793    ; GFX9: [[UV4:%[0-9]+]]:_(s64), [[UV5:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV]](s128)
1794    ; GFX9: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[UV2]], [[C]]
1795    ; GFX9: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C]], [[UV2]]
1796    ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1797    ; GFX9: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[UV2]](s32), [[C]]
1798    ; GFX9: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[UV2]](s32), [[C1]]
1799    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[UV5]], [[UV2]](s32)
1800    ; GFX9: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[UV4]], [[UV2]](s32)
1801    ; GFX9: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[UV5]], [[SUB1]](s32)
1802    ; GFX9: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
1803    ; GFX9: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1804    ; GFX9: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[UV5]], [[C2]](s32)
1805    ; GFX9: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[UV5]], [[SUB]](s32)
1806    ; GFX9: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[OR]], [[ASHR2]]
1807    ; GFX9: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[UV4]], [[SELECT]]
1808    ; GFX9: [[SELECT2:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[ASHR]], [[ASHR1]]
1809    ; GFX9: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT1]](s64), [[SELECT2]](s64)
1810    ; GFX9: [[UV6:%[0-9]+]]:_(s64), [[UV7:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[UV1]](s128)
1811    ; GFX9: [[SUB2:%[0-9]+]]:_(s32) = G_SUB [[UV3]], [[C]]
1812    ; GFX9: [[SUB3:%[0-9]+]]:_(s32) = G_SUB [[C]], [[UV3]]
1813    ; GFX9: [[ICMP2:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[UV3]](s32), [[C]]
1814    ; GFX9: [[ICMP3:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[UV3]](s32), [[C1]]
1815    ; GFX9: [[ASHR3:%[0-9]+]]:_(s64) = G_ASHR [[UV7]], [[UV3]](s32)
1816    ; GFX9: [[LSHR1:%[0-9]+]]:_(s64) = G_LSHR [[UV6]], [[UV3]](s32)
1817    ; GFX9: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[UV7]], [[SUB3]](s32)
1818    ; GFX9: [[OR1:%[0-9]+]]:_(s64) = G_OR [[LSHR1]], [[SHL1]]
1819    ; GFX9: [[ASHR4:%[0-9]+]]:_(s64) = G_ASHR [[UV7]], [[C2]](s32)
1820    ; GFX9: [[ASHR5:%[0-9]+]]:_(s64) = G_ASHR [[UV7]], [[SUB2]](s32)
1821    ; GFX9: [[SELECT3:%[0-9]+]]:_(s64) = G_SELECT [[ICMP2]](s1), [[OR1]], [[ASHR5]]
1822    ; GFX9: [[SELECT4:%[0-9]+]]:_(s64) = G_SELECT [[ICMP3]](s1), [[UV6]], [[SELECT3]]
1823    ; GFX9: [[SELECT5:%[0-9]+]]:_(s64) = G_SELECT [[ICMP2]](s1), [[ASHR3]], [[ASHR4]]
1824    ; GFX9: [[MV1:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SELECT4]](s64), [[SELECT5]](s64)
1825    ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s128>) = G_BUILD_VECTOR [[MV]](s128), [[MV1]](s128)
1826    ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[BUILD_VECTOR]](<2 x s128>)
1827    %0:_(<2 x s128>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
1828    %1:_(<2 x s32>) = COPY $vgpr4_vgpr5
1829    %2:_(<2 x s128>) = G_ASHR %0, %1
1830    $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY %2
1831...
1832