1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -global-isel-abort=0 -march=amdgcn -mcpu=tahiti -run-pass=legalizer %s -o - | FileCheck -check-prefix=GFX6 %s
3# RUN: llc -global-isel-abort=0 -march=amdgcn -mcpu=fiji -run-pass=legalizer %s -o - | FileCheck -check-prefix=GFX8 %s
4# RUN: llc -global-isel-abort=0 -march=amdgcn -mcpu=gfx900 -run-pass=legalizer %s -o - | FileCheck -check-prefix=GFX9 %s
5# RUN: llc -global-isel-abort=0 -march=amdgcn -mcpu=gfx1010 -run-pass=legalizer %s -o - | FileCheck -check-prefix=GFX9 %s
6
7---
8name: saddsat_s7
9body: |
10  bb.0:
11    liveins: $vgpr0, $vgpr1
12
13    ; GFX6-LABEL: name: saddsat_s7
14    ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
15    ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
16    ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 25
17    ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
18    ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C]](s32)
19    ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 2147483647
20    ; GFX6: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 -2147483648
21    ; GFX6: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
22    ; GFX6: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[SHL]], [[C3]]
23    ; GFX6: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMAX]]
24    ; GFX6: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[SHL]], [[C3]]
25    ; GFX6: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[SMIN]]
26    ; GFX6: [[SMAX1:%[0-9]+]]:_(s32) = G_SMAX [[SUB1]], [[SHL1]]
27    ; GFX6: [[SMIN1:%[0-9]+]]:_(s32) = G_SMIN [[SMAX1]], [[SUB]]
28    ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[SHL]], [[SMIN1]]
29    ; GFX6: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[ADD]], [[C]](s32)
30    ; GFX6: $vgpr0 = COPY [[ASHR]](s32)
31    ; GFX8-LABEL: name: saddsat_s7
32    ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
33    ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
34    ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
35    ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
36    ; GFX8: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 9
37    ; GFX8: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C]](s16)
38    ; GFX8: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[TRUNC1]], [[C]](s16)
39    ; GFX8: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 32767
40    ; GFX8: [[C2:%[0-9]+]]:_(s16) = G_CONSTANT i16 -32768
41    ; GFX8: [[C3:%[0-9]+]]:_(s16) = G_CONSTANT i16 0
42    ; GFX8: [[SMAX:%[0-9]+]]:_(s16) = G_SMAX [[SHL]], [[C3]]
43    ; GFX8: [[SUB:%[0-9]+]]:_(s16) = G_SUB [[C1]], [[SMAX]]
44    ; GFX8: [[SMIN:%[0-9]+]]:_(s16) = G_SMIN [[SHL]], [[C3]]
45    ; GFX8: [[SUB1:%[0-9]+]]:_(s16) = G_SUB [[C2]], [[SMIN]]
46    ; GFX8: [[SMAX1:%[0-9]+]]:_(s16) = G_SMAX [[SUB1]], [[SHL1]]
47    ; GFX8: [[SMIN1:%[0-9]+]]:_(s16) = G_SMIN [[SMAX1]], [[SUB]]
48    ; GFX8: [[ADD:%[0-9]+]]:_(s16) = G_ADD [[SHL]], [[SMIN1]]
49    ; GFX8: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[ADD]], [[C]](s16)
50    ; GFX8: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR]](s16)
51    ; GFX8: $vgpr0 = COPY [[ANYEXT]](s32)
52    ; GFX9-LABEL: name: saddsat_s7
53    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
54    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
55    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
56    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
57    ; GFX9: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 9
58    ; GFX9: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C]](s16)
59    ; GFX9: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[TRUNC1]], [[C]](s16)
60    ; GFX9: [[SADDSAT:%[0-9]+]]:_(s16) = G_SADDSAT [[SHL]], [[SHL1]]
61    ; GFX9: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[SADDSAT]], [[C]](s16)
62    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR]](s16)
63    ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32)
64    %0:_(s32) = COPY $vgpr0
65    %1:_(s32) = COPY $vgpr1
66    %2:_(s7) = G_TRUNC %0
67    %3:_(s7) = G_TRUNC %1
68    %4:_(s7) = G_SADDSAT %2, %3
69    %5:_(s32) = G_ANYEXT %4
70    $vgpr0 = COPY %5
71...
72
73---
74name: saddsat_s8
75body: |
76  bb.0:
77    liveins: $vgpr0, $vgpr1
78
79    ; GFX6-LABEL: name: saddsat_s8
80    ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
81    ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
82    ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
83    ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
84    ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C]](s32)
85    ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 2147483647
86    ; GFX6: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 -2147483648
87    ; GFX6: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
88    ; GFX6: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[SHL]], [[C3]]
89    ; GFX6: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMAX]]
90    ; GFX6: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[SHL]], [[C3]]
91    ; GFX6: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[SMIN]]
92    ; GFX6: [[SMAX1:%[0-9]+]]:_(s32) = G_SMAX [[SUB1]], [[SHL1]]
93    ; GFX6: [[SMIN1:%[0-9]+]]:_(s32) = G_SMIN [[SMAX1]], [[SUB]]
94    ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[SHL]], [[SMIN1]]
95    ; GFX6: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[ADD]], [[C]](s32)
96    ; GFX6: $vgpr0 = COPY [[ASHR]](s32)
97    ; GFX8-LABEL: name: saddsat_s8
98    ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
99    ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
100    ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
101    ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
102    ; GFX8: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
103    ; GFX8: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C]](s16)
104    ; GFX8: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[TRUNC1]], [[C]](s16)
105    ; GFX8: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 32767
106    ; GFX8: [[C2:%[0-9]+]]:_(s16) = G_CONSTANT i16 -32768
107    ; GFX8: [[C3:%[0-9]+]]:_(s16) = G_CONSTANT i16 0
108    ; GFX8: [[SMAX:%[0-9]+]]:_(s16) = G_SMAX [[SHL]], [[C3]]
109    ; GFX8: [[SUB:%[0-9]+]]:_(s16) = G_SUB [[C1]], [[SMAX]]
110    ; GFX8: [[SMIN:%[0-9]+]]:_(s16) = G_SMIN [[SHL]], [[C3]]
111    ; GFX8: [[SUB1:%[0-9]+]]:_(s16) = G_SUB [[C2]], [[SMIN]]
112    ; GFX8: [[SMAX1:%[0-9]+]]:_(s16) = G_SMAX [[SUB1]], [[SHL1]]
113    ; GFX8: [[SMIN1:%[0-9]+]]:_(s16) = G_SMIN [[SMAX1]], [[SUB]]
114    ; GFX8: [[ADD:%[0-9]+]]:_(s16) = G_ADD [[SHL]], [[SMIN1]]
115    ; GFX8: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[ADD]], [[C]](s16)
116    ; GFX8: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR]](s16)
117    ; GFX8: $vgpr0 = COPY [[ANYEXT]](s32)
118    ; GFX9-LABEL: name: saddsat_s8
119    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
120    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
121    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
122    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
123    ; GFX9: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
124    ; GFX9: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C]](s16)
125    ; GFX9: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[TRUNC1]], [[C]](s16)
126    ; GFX9: [[SADDSAT:%[0-9]+]]:_(s16) = G_SADDSAT [[SHL]], [[SHL1]]
127    ; GFX9: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[SADDSAT]], [[C]](s16)
128    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ASHR]](s16)
129    ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32)
130    %0:_(s32) = COPY $vgpr0
131    %1:_(s32) = COPY $vgpr1
132    %2:_(s8) = G_TRUNC %0
133    %3:_(s8) = G_TRUNC %1
134    %4:_(s8) = G_SADDSAT %2, %3
135    %5:_(s32) = G_ANYEXT %4
136    $vgpr0 = COPY %5
137...
138
139---
140name: saddsat_v2s8
141body: |
142  bb.0:
143    liveins: $vgpr0, $vgpr1
144
145    ; GFX6-LABEL: name: saddsat_v2s8
146    ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
147    ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
148    ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
149    ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
150    ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
151    ; GFX6: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C]](s32)
152    ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C1]](s32)
153    ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
154    ; GFX6: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 2147483647
155    ; GFX6: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 -2147483648
156    ; GFX6: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
157    ; GFX6: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[SHL]], [[C4]]
158    ; GFX6: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[SMAX]]
159    ; GFX6: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[SHL]], [[C4]]
160    ; GFX6: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C3]], [[SMIN]]
161    ; GFX6: [[SMAX1:%[0-9]+]]:_(s32) = G_SMAX [[SUB1]], [[SHL1]]
162    ; GFX6: [[SMIN1:%[0-9]+]]:_(s32) = G_SMIN [[SMAX1]], [[SUB]]
163    ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[SHL]], [[SMIN1]]
164    ; GFX6: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[ADD]], [[C1]](s32)
165    ; GFX6: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[LSHR]], [[C1]](s32)
166    ; GFX6: [[SHL3:%[0-9]+]]:_(s32) = G_SHL [[LSHR1]], [[C1]](s32)
167    ; GFX6: [[SMAX2:%[0-9]+]]:_(s32) = G_SMAX [[SHL2]], [[C4]]
168    ; GFX6: [[SUB2:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[SMAX2]]
169    ; GFX6: [[SMIN2:%[0-9]+]]:_(s32) = G_SMIN [[SHL2]], [[C4]]
170    ; GFX6: [[SUB3:%[0-9]+]]:_(s32) = G_SUB [[C3]], [[SMIN2]]
171    ; GFX6: [[SMAX3:%[0-9]+]]:_(s32) = G_SMAX [[SUB3]], [[SHL3]]
172    ; GFX6: [[SMIN3:%[0-9]+]]:_(s32) = G_SMIN [[SMAX3]], [[SUB2]]
173    ; GFX6: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[SHL2]], [[SMIN3]]
174    ; GFX6: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[ADD1]], [[C1]](s32)
175    ; GFX6: [[C5:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
176    ; GFX6: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[ASHR]](s32)
177    ; GFX6: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC]], [[C5]]
178    ; GFX6: [[COPY2:%[0-9]+]]:_(s32) = COPY [[C]](s32)
179    ; GFX6: [[C6:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
180    ; GFX6: [[AND1:%[0-9]+]]:_(s32) = G_AND [[ASHR1]], [[C6]]
181    ; GFX6: [[SHL4:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[COPY2]](s32)
182    ; GFX6: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[SHL4]](s32)
183    ; GFX6: [[OR:%[0-9]+]]:_(s16) = G_OR [[AND]], [[TRUNC1]]
184    ; GFX6: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[OR]](s16)
185    ; GFX6: $vgpr0 = COPY [[ANYEXT]](s32)
186    ; GFX8-LABEL: name: saddsat_v2s8
187    ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
188    ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
189    ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
190    ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
191    ; GFX8: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C]](s32)
192    ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
193    ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
194    ; GFX8: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
195    ; GFX8: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C1]](s16)
196    ; GFX8: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[TRUNC1]], [[C1]](s16)
197    ; GFX8: [[C2:%[0-9]+]]:_(s16) = G_CONSTANT i16 32767
198    ; GFX8: [[C3:%[0-9]+]]:_(s16) = G_CONSTANT i16 -32768
199    ; GFX8: [[C4:%[0-9]+]]:_(s16) = G_CONSTANT i16 0
200    ; GFX8: [[SMAX:%[0-9]+]]:_(s16) = G_SMAX [[SHL]], [[C4]]
201    ; GFX8: [[SUB:%[0-9]+]]:_(s16) = G_SUB [[C2]], [[SMAX]]
202    ; GFX8: [[SMIN:%[0-9]+]]:_(s16) = G_SMIN [[SHL]], [[C4]]
203    ; GFX8: [[SUB1:%[0-9]+]]:_(s16) = G_SUB [[C3]], [[SMIN]]
204    ; GFX8: [[SMAX1:%[0-9]+]]:_(s16) = G_SMAX [[SUB1]], [[SHL1]]
205    ; GFX8: [[SMIN1:%[0-9]+]]:_(s16) = G_SMIN [[SMAX1]], [[SUB]]
206    ; GFX8: [[ADD:%[0-9]+]]:_(s16) = G_ADD [[SHL]], [[SMIN1]]
207    ; GFX8: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[ADD]], [[C1]](s16)
208    ; GFX8: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
209    ; GFX8: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32)
210    ; GFX8: [[SHL2:%[0-9]+]]:_(s16) = G_SHL [[TRUNC2]], [[C1]](s16)
211    ; GFX8: [[SHL3:%[0-9]+]]:_(s16) = G_SHL [[TRUNC3]], [[C1]](s16)
212    ; GFX8: [[SMAX2:%[0-9]+]]:_(s16) = G_SMAX [[SHL2]], [[C4]]
213    ; GFX8: [[SUB2:%[0-9]+]]:_(s16) = G_SUB [[C2]], [[SMAX2]]
214    ; GFX8: [[SMIN2:%[0-9]+]]:_(s16) = G_SMIN [[SHL2]], [[C4]]
215    ; GFX8: [[SUB3:%[0-9]+]]:_(s16) = G_SUB [[C3]], [[SMIN2]]
216    ; GFX8: [[SMAX3:%[0-9]+]]:_(s16) = G_SMAX [[SUB3]], [[SHL3]]
217    ; GFX8: [[SMIN3:%[0-9]+]]:_(s16) = G_SMIN [[SMAX3]], [[SUB2]]
218    ; GFX8: [[ADD1:%[0-9]+]]:_(s16) = G_ADD [[SHL2]], [[SMIN3]]
219    ; GFX8: [[ASHR1:%[0-9]+]]:_(s16) = G_ASHR [[ADD1]], [[C1]](s16)
220    ; GFX8: [[C5:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
221    ; GFX8: [[AND:%[0-9]+]]:_(s16) = G_AND [[ASHR]], [[C5]]
222    ; GFX8: [[AND1:%[0-9]+]]:_(s16) = G_AND [[ASHR1]], [[C5]]
223    ; GFX8: [[SHL4:%[0-9]+]]:_(s16) = G_SHL [[AND1]], [[C1]](s16)
224    ; GFX8: [[OR:%[0-9]+]]:_(s16) = G_OR [[AND]], [[SHL4]]
225    ; GFX8: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[OR]](s16)
226    ; GFX8: $vgpr0 = COPY [[ANYEXT]](s32)
227    ; GFX9-LABEL: name: saddsat_v2s8
228    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
229    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
230    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
231    ; GFX9: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
232    ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
233    ; GFX9: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C]](s32)
234    ; GFX9: [[BUILD_VECTOR_TRUNC:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY]](s32), [[LSHR]](s32)
235    ; GFX9: [[BUILD_VECTOR_TRUNC1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY1]](s32), [[LSHR1]](s32)
236    ; GFX9: [[C2:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
237    ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[C]](s32)
238    ; GFX9: [[BUILD_VECTOR_TRUNC2:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY2]](s32), [[COPY2]](s32)
239    ; GFX9: [[SHL:%[0-9]+]]:_(<2 x s16>) = G_SHL [[BUILD_VECTOR_TRUNC]], [[BUILD_VECTOR_TRUNC2]](<2 x s16>)
240    ; GFX9: [[SHL1:%[0-9]+]]:_(<2 x s16>) = G_SHL [[BUILD_VECTOR_TRUNC1]], [[BUILD_VECTOR_TRUNC2]](<2 x s16>)
241    ; GFX9: [[SADDSAT:%[0-9]+]]:_(<2 x s16>) = G_SADDSAT [[SHL]], [[SHL1]]
242    ; GFX9: [[ASHR:%[0-9]+]]:_(<2 x s16>) = G_ASHR [[SADDSAT]], [[BUILD_VECTOR_TRUNC2]](<2 x s16>)
243    ; GFX9: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[ASHR]](<2 x s16>)
244    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32)
245    ; GFX9: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C1]](s32)
246    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR2]](s32)
247    ; GFX9: [[C3:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
248    ; GFX9: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC]], [[C3]]
249    ; GFX9: [[AND1:%[0-9]+]]:_(s16) = G_AND [[TRUNC1]], [[C3]]
250    ; GFX9: [[SHL2:%[0-9]+]]:_(s16) = G_SHL [[AND1]], [[C2]](s16)
251    ; GFX9: [[OR:%[0-9]+]]:_(s16) = G_OR [[AND]], [[SHL2]]
252    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[OR]](s16)
253    ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32)
254    %0:_(s32) = COPY $vgpr0
255    %1:_(s32) = COPY $vgpr1
256    %2:_(s16) = G_TRUNC %0
257    %3:_(s16) = G_TRUNC %1
258    %4:_(<2 x s8>) = G_BITCAST %2
259    %5:_(<2 x s8>) = G_BITCAST %3
260    %6:_(<2 x s8>) = G_SADDSAT %4, %5
261    %7:_(s16) = G_BITCAST %6
262    %8:_(s32) = G_ANYEXT %7
263    $vgpr0 = COPY %8
264...
265
266---
267name: saddsat_s16
268body: |
269  bb.0:
270    liveins: $vgpr0, $vgpr1
271
272    ; GFX6-LABEL: name: saddsat_s16
273    ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
274    ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
275    ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
276    ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
277    ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C]](s32)
278    ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 2147483647
279    ; GFX6: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 -2147483648
280    ; GFX6: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
281    ; GFX6: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[SHL]], [[C3]]
282    ; GFX6: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMAX]]
283    ; GFX6: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[SHL]], [[C3]]
284    ; GFX6: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[SMIN]]
285    ; GFX6: [[SMAX1:%[0-9]+]]:_(s32) = G_SMAX [[SUB1]], [[SHL1]]
286    ; GFX6: [[SMIN1:%[0-9]+]]:_(s32) = G_SMIN [[SMAX1]], [[SUB]]
287    ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[SHL]], [[SMIN1]]
288    ; GFX6: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[ADD]], [[C]](s32)
289    ; GFX6: $vgpr0 = COPY [[ASHR]](s32)
290    ; GFX8-LABEL: name: saddsat_s16
291    ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
292    ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
293    ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
294    ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
295    ; GFX8: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 32767
296    ; GFX8: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 -32768
297    ; GFX8: [[C2:%[0-9]+]]:_(s16) = G_CONSTANT i16 0
298    ; GFX8: [[SMAX:%[0-9]+]]:_(s16) = G_SMAX [[TRUNC]], [[C2]]
299    ; GFX8: [[SUB:%[0-9]+]]:_(s16) = G_SUB [[C]], [[SMAX]]
300    ; GFX8: [[SMIN:%[0-9]+]]:_(s16) = G_SMIN [[TRUNC]], [[C2]]
301    ; GFX8: [[SUB1:%[0-9]+]]:_(s16) = G_SUB [[C1]], [[SMIN]]
302    ; GFX8: [[SMAX1:%[0-9]+]]:_(s16) = G_SMAX [[SUB1]], [[TRUNC1]]
303    ; GFX8: [[SMIN1:%[0-9]+]]:_(s16) = G_SMIN [[SMAX1]], [[SUB]]
304    ; GFX8: [[ADD:%[0-9]+]]:_(s16) = G_ADD [[TRUNC]], [[SMIN1]]
305    ; GFX8: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ADD]](s16)
306    ; GFX8: $vgpr0 = COPY [[ANYEXT]](s32)
307    ; GFX9-LABEL: name: saddsat_s16
308    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
309    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
310    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
311    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
312    ; GFX9: [[SADDSAT:%[0-9]+]]:_(s16) = G_SADDSAT [[TRUNC]], [[TRUNC1]]
313    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[SADDSAT]](s16)
314    ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32)
315    %0:_(s32) = COPY $vgpr0
316    %1:_(s32) = COPY $vgpr1
317    %2:_(s16) = G_TRUNC %0
318    %3:_(s16) = G_TRUNC %1
319    %4:_(s16) = G_SADDSAT %2, %3
320    %5:_(s32) = G_ANYEXT %4
321    $vgpr0 = COPY %5
322...
323
324---
325name: saddsat_v2s16
326body: |
327  bb.0:
328    liveins: $vgpr0, $vgpr1
329
330    ; GFX6-LABEL: name: saddsat_v2s16
331    ; GFX6: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
332    ; GFX6: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
333    ; GFX6: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>)
334    ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
335    ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
336    ; GFX6: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[COPY1]](<2 x s16>)
337    ; GFX6: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
338    ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[BITCAST]], [[C]](s32)
339    ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[BITCAST1]], [[C]](s32)
340    ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 2147483647
341    ; GFX6: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 -2147483648
342    ; GFX6: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
343    ; GFX6: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[SHL]], [[C3]]
344    ; GFX6: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMAX]]
345    ; GFX6: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[SHL]], [[C3]]
346    ; GFX6: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[SMIN]]
347    ; GFX6: [[SMAX1:%[0-9]+]]:_(s32) = G_SMAX [[SUB1]], [[SHL1]]
348    ; GFX6: [[SMIN1:%[0-9]+]]:_(s32) = G_SMIN [[SMAX1]], [[SUB]]
349    ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[SHL]], [[SMIN1]]
350    ; GFX6: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[ADD]], [[C]](s32)
351    ; GFX6: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[LSHR]], [[C]](s32)
352    ; GFX6: [[SHL3:%[0-9]+]]:_(s32) = G_SHL [[LSHR1]], [[C]](s32)
353    ; GFX6: [[SMAX2:%[0-9]+]]:_(s32) = G_SMAX [[SHL2]], [[C3]]
354    ; GFX6: [[SUB2:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMAX2]]
355    ; GFX6: [[SMIN2:%[0-9]+]]:_(s32) = G_SMIN [[SHL2]], [[C3]]
356    ; GFX6: [[SUB3:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[SMIN2]]
357    ; GFX6: [[SMAX3:%[0-9]+]]:_(s32) = G_SMAX [[SUB3]], [[SHL3]]
358    ; GFX6: [[SMIN3:%[0-9]+]]:_(s32) = G_SMIN [[SMAX3]], [[SUB2]]
359    ; GFX6: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[SHL2]], [[SMIN3]]
360    ; GFX6: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[ADD1]], [[C]](s32)
361    ; GFX6: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
362    ; GFX6: [[AND:%[0-9]+]]:_(s32) = G_AND [[ASHR]], [[C4]]
363    ; GFX6: [[AND1:%[0-9]+]]:_(s32) = G_AND [[ASHR1]], [[C4]]
364    ; GFX6: [[SHL4:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32)
365    ; GFX6: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL4]]
366    ; GFX6: [[BITCAST2:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
367    ; GFX6: $vgpr0 = COPY [[BITCAST2]](<2 x s16>)
368    ; GFX8-LABEL: name: saddsat_v2s16
369    ; GFX8: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
370    ; GFX8: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
371    ; GFX8: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>)
372    ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32)
373    ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
374    ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
375    ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
376    ; GFX8: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[COPY1]](<2 x s16>)
377    ; GFX8: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32)
378    ; GFX8: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
379    ; GFX8: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32)
380    ; GFX8: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 32767
381    ; GFX8: [[C2:%[0-9]+]]:_(s16) = G_CONSTANT i16 -32768
382    ; GFX8: [[C3:%[0-9]+]]:_(s16) = G_CONSTANT i16 0
383    ; GFX8: [[SMAX:%[0-9]+]]:_(s16) = G_SMAX [[TRUNC]], [[C3]]
384    ; GFX8: [[SUB:%[0-9]+]]:_(s16) = G_SUB [[C1]], [[SMAX]]
385    ; GFX8: [[SMIN:%[0-9]+]]:_(s16) = G_SMIN [[TRUNC]], [[C3]]
386    ; GFX8: [[SUB1:%[0-9]+]]:_(s16) = G_SUB [[C2]], [[SMIN]]
387    ; GFX8: [[SMAX1:%[0-9]+]]:_(s16) = G_SMAX [[SUB1]], [[TRUNC2]]
388    ; GFX8: [[SMIN1:%[0-9]+]]:_(s16) = G_SMIN [[SMAX1]], [[SUB]]
389    ; GFX8: [[ADD:%[0-9]+]]:_(s16) = G_ADD [[TRUNC]], [[SMIN1]]
390    ; GFX8: [[SMAX2:%[0-9]+]]:_(s16) = G_SMAX [[TRUNC1]], [[C3]]
391    ; GFX8: [[SUB2:%[0-9]+]]:_(s16) = G_SUB [[C1]], [[SMAX2]]
392    ; GFX8: [[SMIN2:%[0-9]+]]:_(s16) = G_SMIN [[TRUNC1]], [[C3]]
393    ; GFX8: [[SUB3:%[0-9]+]]:_(s16) = G_SUB [[C2]], [[SMIN2]]
394    ; GFX8: [[SMAX3:%[0-9]+]]:_(s16) = G_SMAX [[SUB3]], [[TRUNC3]]
395    ; GFX8: [[SMIN3:%[0-9]+]]:_(s16) = G_SMIN [[SMAX3]], [[SUB2]]
396    ; GFX8: [[ADD1:%[0-9]+]]:_(s16) = G_ADD [[TRUNC1]], [[SMIN3]]
397    ; GFX8: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[ADD]](s16)
398    ; GFX8: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[ADD1]](s16)
399    ; GFX8: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32)
400    ; GFX8: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL]]
401    ; GFX8: [[BITCAST2:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
402    ; GFX8: $vgpr0 = COPY [[BITCAST2]](<2 x s16>)
403    ; GFX9-LABEL: name: saddsat_v2s16
404    ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
405    ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
406    ; GFX9: [[SADDSAT:%[0-9]+]]:_(<2 x s16>) = G_SADDSAT [[COPY]], [[COPY1]]
407    ; GFX9: $vgpr0 = COPY [[SADDSAT]](<2 x s16>)
408    %0:_(<2 x s16>) = COPY $vgpr0
409    %1:_(<2 x s16>) = COPY $vgpr1
410    %2:_(<2 x s16>) = G_SADDSAT %0, %1
411    $vgpr0 = COPY %2
412...
413
414---
415name: saddsat_v3s16
416body: |
417  bb.0:
418    liveins: $vgpr0_vgpr1_vgpr2
419
420    ; GFX6-LABEL: name: saddsat_v3s16
421    ; GFX6: [[COPY:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2
422    ; GFX6: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>)
423    ; GFX6: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>)
424    ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
425    ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
426    ; GFX6: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>)
427    ; GFX6: [[UV3:%[0-9]+]]:_(<2 x s16>), [[UV4:%[0-9]+]]:_(<2 x s16>), [[UV5:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>)
428    ; GFX6: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV4]](<2 x s16>)
429    ; GFX6: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32)
430    ; GFX6: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV5]](<2 x s16>)
431    ; GFX6: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32)
432    ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[BITCAST]], [[C]](s32)
433    ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[LSHR1]], [[C]](s32)
434    ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 2147483647
435    ; GFX6: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 -2147483648
436    ; GFX6: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
437    ; GFX6: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[SHL]], [[C3]]
438    ; GFX6: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMAX]]
439    ; GFX6: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[SHL]], [[C3]]
440    ; GFX6: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[SMIN]]
441    ; GFX6: [[SMAX1:%[0-9]+]]:_(s32) = G_SMAX [[SUB1]], [[SHL1]]
442    ; GFX6: [[SMIN1:%[0-9]+]]:_(s32) = G_SMIN [[SMAX1]], [[SUB]]
443    ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[SHL]], [[SMIN1]]
444    ; GFX6: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[ADD]], [[C]](s32)
445    ; GFX6: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[LSHR]], [[C]](s32)
446    ; GFX6: [[SHL3:%[0-9]+]]:_(s32) = G_SHL [[BITCAST3]], [[C]](s32)
447    ; GFX6: [[SMAX2:%[0-9]+]]:_(s32) = G_SMAX [[SHL2]], [[C3]]
448    ; GFX6: [[SUB2:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMAX2]]
449    ; GFX6: [[SMIN2:%[0-9]+]]:_(s32) = G_SMIN [[SHL2]], [[C3]]
450    ; GFX6: [[SUB3:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[SMIN2]]
451    ; GFX6: [[SMAX3:%[0-9]+]]:_(s32) = G_SMAX [[SUB3]], [[SHL3]]
452    ; GFX6: [[SMIN3:%[0-9]+]]:_(s32) = G_SMIN [[SMAX3]], [[SUB2]]
453    ; GFX6: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[SHL2]], [[SMIN3]]
454    ; GFX6: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[ADD1]], [[C]](s32)
455    ; GFX6: [[SHL4:%[0-9]+]]:_(s32) = G_SHL [[BITCAST1]], [[C]](s32)
456    ; GFX6: [[SHL5:%[0-9]+]]:_(s32) = G_SHL [[LSHR2]], [[C]](s32)
457    ; GFX6: [[SMAX4:%[0-9]+]]:_(s32) = G_SMAX [[SHL4]], [[C3]]
458    ; GFX6: [[SUB4:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMAX4]]
459    ; GFX6: [[SMIN4:%[0-9]+]]:_(s32) = G_SMIN [[SHL4]], [[C3]]
460    ; GFX6: [[SUB5:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[SMIN4]]
461    ; GFX6: [[SMAX5:%[0-9]+]]:_(s32) = G_SMAX [[SUB5]], [[SHL5]]
462    ; GFX6: [[SMIN5:%[0-9]+]]:_(s32) = G_SMIN [[SMAX5]], [[SUB4]]
463    ; GFX6: [[ADD2:%[0-9]+]]:_(s32) = G_ADD [[SHL4]], [[SMIN5]]
464    ; GFX6: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[ADD2]], [[C]](s32)
465    ; GFX6: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
466    ; GFX6: [[UV6:%[0-9]+]]:_(<2 x s16>), [[UV7:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF]](<4 x s16>)
467    ; GFX6: [[BITCAST4:%[0-9]+]]:_(s32) = G_BITCAST [[UV6]](<2 x s16>)
468    ; GFX6: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST4]], [[C]](s32)
469    ; GFX6: [[BITCAST5:%[0-9]+]]:_(s32) = G_BITCAST [[UV7]](<2 x s16>)
470    ; GFX6: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
471    ; GFX6: [[AND:%[0-9]+]]:_(s32) = G_AND [[ASHR]], [[C4]]
472    ; GFX6: [[AND1:%[0-9]+]]:_(s32) = G_AND [[ASHR1]], [[C4]]
473    ; GFX6: [[SHL6:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32)
474    ; GFX6: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL6]]
475    ; GFX6: [[BITCAST6:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
476    ; GFX6: [[AND2:%[0-9]+]]:_(s32) = G_AND [[ASHR2]], [[C4]]
477    ; GFX6: [[AND3:%[0-9]+]]:_(s32) = G_AND [[BITCAST4]], [[C4]]
478    ; GFX6: [[SHL7:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C]](s32)
479    ; GFX6: [[OR1:%[0-9]+]]:_(s32) = G_OR [[AND2]], [[SHL7]]
480    ; GFX6: [[BITCAST7:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32)
481    ; GFX6: [[AND4:%[0-9]+]]:_(s32) = G_AND [[LSHR3]], [[C4]]
482    ; GFX6: [[AND5:%[0-9]+]]:_(s32) = G_AND [[BITCAST5]], [[C4]]
483    ; GFX6: [[SHL8:%[0-9]+]]:_(s32) = G_SHL [[AND5]], [[C]](s32)
484    ; GFX6: [[OR2:%[0-9]+]]:_(s32) = G_OR [[AND4]], [[SHL8]]
485    ; GFX6: [[BITCAST8:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR2]](s32)
486    ; GFX6: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[BITCAST6]](<2 x s16>), [[BITCAST7]](<2 x s16>), [[BITCAST8]](<2 x s16>)
487    ; GFX6: $vgpr0_vgpr1_vgpr2 = COPY [[CONCAT_VECTORS]](<6 x s16>)
488    ; GFX8-LABEL: name: saddsat_v3s16
489    ; GFX8: [[COPY:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2
490    ; GFX8: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>)
491    ; GFX8: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>)
492    ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32)
493    ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
494    ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
495    ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
496    ; GFX8: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>)
497    ; GFX8: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32)
498    ; GFX8: [[UV3:%[0-9]+]]:_(<2 x s16>), [[UV4:%[0-9]+]]:_(<2 x s16>), [[UV5:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>)
499    ; GFX8: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV4]](<2 x s16>)
500    ; GFX8: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32)
501    ; GFX8: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32)
502    ; GFX8: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV5]](<2 x s16>)
503    ; GFX8: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST3]](s32)
504    ; GFX8: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32)
505    ; GFX8: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR2]](s32)
506    ; GFX8: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 32767
507    ; GFX8: [[C2:%[0-9]+]]:_(s16) = G_CONSTANT i16 -32768
508    ; GFX8: [[C3:%[0-9]+]]:_(s16) = G_CONSTANT i16 0
509    ; GFX8: [[SMAX:%[0-9]+]]:_(s16) = G_SMAX [[TRUNC]], [[C3]]
510    ; GFX8: [[SUB:%[0-9]+]]:_(s16) = G_SUB [[C1]], [[SMAX]]
511    ; GFX8: [[SMIN:%[0-9]+]]:_(s16) = G_SMIN [[TRUNC]], [[C3]]
512    ; GFX8: [[SUB1:%[0-9]+]]:_(s16) = G_SUB [[C2]], [[SMIN]]
513    ; GFX8: [[SMAX1:%[0-9]+]]:_(s16) = G_SMAX [[SUB1]], [[TRUNC3]]
514    ; GFX8: [[SMIN1:%[0-9]+]]:_(s16) = G_SMIN [[SMAX1]], [[SUB]]
515    ; GFX8: [[ADD:%[0-9]+]]:_(s16) = G_ADD [[TRUNC]], [[SMIN1]]
516    ; GFX8: [[SMAX2:%[0-9]+]]:_(s16) = G_SMAX [[TRUNC1]], [[C3]]
517    ; GFX8: [[SUB2:%[0-9]+]]:_(s16) = G_SUB [[C1]], [[SMAX2]]
518    ; GFX8: [[SMIN2:%[0-9]+]]:_(s16) = G_SMIN [[TRUNC1]], [[C3]]
519    ; GFX8: [[SUB3:%[0-9]+]]:_(s16) = G_SUB [[C2]], [[SMIN2]]
520    ; GFX8: [[SMAX3:%[0-9]+]]:_(s16) = G_SMAX [[SUB3]], [[TRUNC4]]
521    ; GFX8: [[SMIN3:%[0-9]+]]:_(s16) = G_SMIN [[SMAX3]], [[SUB2]]
522    ; GFX8: [[ADD1:%[0-9]+]]:_(s16) = G_ADD [[TRUNC1]], [[SMIN3]]
523    ; GFX8: [[SMAX4:%[0-9]+]]:_(s16) = G_SMAX [[TRUNC2]], [[C3]]
524    ; GFX8: [[SUB4:%[0-9]+]]:_(s16) = G_SUB [[C1]], [[SMAX4]]
525    ; GFX8: [[SMIN4:%[0-9]+]]:_(s16) = G_SMIN [[TRUNC2]], [[C3]]
526    ; GFX8: [[SUB5:%[0-9]+]]:_(s16) = G_SUB [[C2]], [[SMIN4]]
527    ; GFX8: [[SMAX5:%[0-9]+]]:_(s16) = G_SMAX [[SUB5]], [[TRUNC5]]
528    ; GFX8: [[SMIN5:%[0-9]+]]:_(s16) = G_SMIN [[SMAX5]], [[SUB4]]
529    ; GFX8: [[ADD2:%[0-9]+]]:_(s16) = G_ADD [[TRUNC2]], [[SMIN5]]
530    ; GFX8: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
531    ; GFX8: [[UV6:%[0-9]+]]:_(<2 x s16>), [[UV7:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF]](<4 x s16>)
532    ; GFX8: [[BITCAST4:%[0-9]+]]:_(s32) = G_BITCAST [[UV6]](<2 x s16>)
533    ; GFX8: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST4]], [[C]](s32)
534    ; GFX8: [[BITCAST5:%[0-9]+]]:_(s32) = G_BITCAST [[UV7]](<2 x s16>)
535    ; GFX8: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[ADD]](s16)
536    ; GFX8: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[ADD1]](s16)
537    ; GFX8: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32)
538    ; GFX8: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL]]
539    ; GFX8: [[BITCAST6:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
540    ; GFX8: [[ZEXT2:%[0-9]+]]:_(s32) = G_ZEXT [[ADD2]](s16)
541    ; GFX8: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
542    ; GFX8: [[AND:%[0-9]+]]:_(s32) = G_AND [[BITCAST4]], [[C4]]
543    ; GFX8: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND]], [[C]](s32)
544    ; GFX8: [[OR1:%[0-9]+]]:_(s32) = G_OR [[ZEXT2]], [[SHL1]]
545    ; GFX8: [[BITCAST7:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32)
546    ; GFX8: [[AND1:%[0-9]+]]:_(s32) = G_AND [[LSHR3]], [[C4]]
547    ; GFX8: [[AND2:%[0-9]+]]:_(s32) = G_AND [[BITCAST5]], [[C4]]
548    ; GFX8: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C]](s32)
549    ; GFX8: [[OR2:%[0-9]+]]:_(s32) = G_OR [[AND1]], [[SHL2]]
550    ; GFX8: [[BITCAST8:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR2]](s32)
551    ; GFX8: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[BITCAST6]](<2 x s16>), [[BITCAST7]](<2 x s16>), [[BITCAST8]](<2 x s16>)
552    ; GFX8: $vgpr0_vgpr1_vgpr2 = COPY [[CONCAT_VECTORS]](<6 x s16>)
553    ; GFX9-LABEL: name: saddsat_v3s16
554    ; GFX9: [[COPY:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2
555    ; GFX9: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>)
556    ; GFX9: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>)
557    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
558    ; GFX9: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
559    ; GFX9: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>)
560    ; GFX9: [[BUILD_VECTOR_TRUNC:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[BITCAST]](s32), [[LSHR]](s32)
561    ; GFX9: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
562    ; GFX9: [[BUILD_VECTOR_TRUNC1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[BITCAST1]](s32), [[DEF]](s32)
563    ; GFX9: [[UV3:%[0-9]+]]:_(<2 x s16>), [[UV4:%[0-9]+]]:_(<2 x s16>), [[UV5:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>)
564    ; GFX9: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV4]](<2 x s16>)
565    ; GFX9: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32)
566    ; GFX9: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV5]](<2 x s16>)
567    ; GFX9: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32)
568    ; GFX9: [[BUILD_VECTOR_TRUNC2:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[LSHR1]](s32), [[BITCAST3]](s32)
569    ; GFX9: [[BUILD_VECTOR_TRUNC3:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[LSHR2]](s32), [[DEF]](s32)
570    ; GFX9: [[SADDSAT:%[0-9]+]]:_(<2 x s16>) = G_SADDSAT [[BUILD_VECTOR_TRUNC]], [[BUILD_VECTOR_TRUNC2]]
571    ; GFX9: [[SADDSAT1:%[0-9]+]]:_(<2 x s16>) = G_SADDSAT [[BUILD_VECTOR_TRUNC1]], [[BUILD_VECTOR_TRUNC3]]
572    ; GFX9: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
573    ; GFX9: [[BITCAST4:%[0-9]+]]:_(s32) = G_BITCAST [[SADDSAT]](<2 x s16>)
574    ; GFX9: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST4]], [[C]](s32)
575    ; GFX9: [[BITCAST5:%[0-9]+]]:_(s32) = G_BITCAST [[SADDSAT1]](<2 x s16>)
576    ; GFX9: [[UV6:%[0-9]+]]:_(<2 x s16>), [[UV7:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF1]](<4 x s16>)
577    ; GFX9: [[BITCAST6:%[0-9]+]]:_(s32) = G_BITCAST [[UV6]](<2 x s16>)
578    ; GFX9: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST6]], [[C]](s32)
579    ; GFX9: [[BITCAST7:%[0-9]+]]:_(s32) = G_BITCAST [[UV7]](<2 x s16>)
580    ; GFX9: [[BUILD_VECTOR_TRUNC4:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[BITCAST4]](s32), [[LSHR3]](s32)
581    ; GFX9: [[BUILD_VECTOR_TRUNC5:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[BITCAST5]](s32), [[BITCAST6]](s32)
582    ; GFX9: [[BUILD_VECTOR_TRUNC6:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[LSHR4]](s32), [[BITCAST7]](s32)
583    ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR_TRUNC4]](<2 x s16>), [[BUILD_VECTOR_TRUNC5]](<2 x s16>), [[BUILD_VECTOR_TRUNC6]](<2 x s16>)
584    ; GFX9: $vgpr0_vgpr1_vgpr2 = COPY [[CONCAT_VECTORS]](<6 x s16>)
585    %0:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2
586    %1:_(<3 x s16>), %2:_(<3 x s16>) = G_UNMERGE_VALUES %0
587    %3:_(<3 x s16>) = G_SADDSAT %1, %2
588    %4:_(<3 x s16>) = G_IMPLICIT_DEF
589    %5:_(<6 x s16>) = G_CONCAT_VECTORS %3, %4
590    $vgpr0_vgpr1_vgpr2 = COPY %5
591...
592
593---
594name: saddsat_v4s16
595body: |
596  bb.0:
597    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3
598
599    ; GFX6-LABEL: name: saddsat_v4s16
600    ; GFX6: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1
601    ; GFX6: [[COPY1:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr2_vgpr3
602    ; GFX6: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>)
603    ; GFX6: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>)
604    ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
605    ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
606    ; GFX6: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>)
607    ; GFX6: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
608    ; GFX6: [[UV2:%[0-9]+]]:_(<2 x s16>), [[UV3:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY1]](<4 x s16>)
609    ; GFX6: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV2]](<2 x s16>)
610    ; GFX6: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32)
611    ; GFX6: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>)
612    ; GFX6: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32)
613    ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[BITCAST]], [[C]](s32)
614    ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[BITCAST2]], [[C]](s32)
615    ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 2147483647
616    ; GFX6: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 -2147483648
617    ; GFX6: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
618    ; GFX6: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[SHL]], [[C3]]
619    ; GFX6: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMAX]]
620    ; GFX6: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[SHL]], [[C3]]
621    ; GFX6: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[SMIN]]
622    ; GFX6: [[SMAX1:%[0-9]+]]:_(s32) = G_SMAX [[SUB1]], [[SHL1]]
623    ; GFX6: [[SMIN1:%[0-9]+]]:_(s32) = G_SMIN [[SMAX1]], [[SUB]]
624    ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[SHL]], [[SMIN1]]
625    ; GFX6: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[ADD]], [[C]](s32)
626    ; GFX6: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[LSHR]], [[C]](s32)
627    ; GFX6: [[SHL3:%[0-9]+]]:_(s32) = G_SHL [[LSHR2]], [[C]](s32)
628    ; GFX6: [[SMAX2:%[0-9]+]]:_(s32) = G_SMAX [[SHL2]], [[C3]]
629    ; GFX6: [[SUB2:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMAX2]]
630    ; GFX6: [[SMIN2:%[0-9]+]]:_(s32) = G_SMIN [[SHL2]], [[C3]]
631    ; GFX6: [[SUB3:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[SMIN2]]
632    ; GFX6: [[SMAX3:%[0-9]+]]:_(s32) = G_SMAX [[SUB3]], [[SHL3]]
633    ; GFX6: [[SMIN3:%[0-9]+]]:_(s32) = G_SMIN [[SMAX3]], [[SUB2]]
634    ; GFX6: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[SHL2]], [[SMIN3]]
635    ; GFX6: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[ADD1]], [[C]](s32)
636    ; GFX6: [[SHL4:%[0-9]+]]:_(s32) = G_SHL [[BITCAST1]], [[C]](s32)
637    ; GFX6: [[SHL5:%[0-9]+]]:_(s32) = G_SHL [[BITCAST3]], [[C]](s32)
638    ; GFX6: [[SMAX4:%[0-9]+]]:_(s32) = G_SMAX [[SHL4]], [[C3]]
639    ; GFX6: [[SUB4:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMAX4]]
640    ; GFX6: [[SMIN4:%[0-9]+]]:_(s32) = G_SMIN [[SHL4]], [[C3]]
641    ; GFX6: [[SUB5:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[SMIN4]]
642    ; GFX6: [[SMAX5:%[0-9]+]]:_(s32) = G_SMAX [[SUB5]], [[SHL5]]
643    ; GFX6: [[SMIN5:%[0-9]+]]:_(s32) = G_SMIN [[SMAX5]], [[SUB4]]
644    ; GFX6: [[ADD2:%[0-9]+]]:_(s32) = G_ADD [[SHL4]], [[SMIN5]]
645    ; GFX6: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[ADD2]], [[C]](s32)
646    ; GFX6: [[SHL6:%[0-9]+]]:_(s32) = G_SHL [[LSHR1]], [[C]](s32)
647    ; GFX6: [[SHL7:%[0-9]+]]:_(s32) = G_SHL [[LSHR3]], [[C]](s32)
648    ; GFX6: [[SMAX6:%[0-9]+]]:_(s32) = G_SMAX [[SHL6]], [[C3]]
649    ; GFX6: [[SUB6:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMAX6]]
650    ; GFX6: [[SMIN6:%[0-9]+]]:_(s32) = G_SMIN [[SHL6]], [[C3]]
651    ; GFX6: [[SUB7:%[0-9]+]]:_(s32) = G_SUB [[C2]], [[SMIN6]]
652    ; GFX6: [[SMAX7:%[0-9]+]]:_(s32) = G_SMAX [[SUB7]], [[SHL7]]
653    ; GFX6: [[SMIN7:%[0-9]+]]:_(s32) = G_SMIN [[SMAX7]], [[SUB6]]
654    ; GFX6: [[ADD3:%[0-9]+]]:_(s32) = G_ADD [[SHL6]], [[SMIN7]]
655    ; GFX6: [[ASHR3:%[0-9]+]]:_(s32) = G_ASHR [[ADD3]], [[C]](s32)
656    ; GFX6: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
657    ; GFX6: [[AND:%[0-9]+]]:_(s32) = G_AND [[ASHR]], [[C4]]
658    ; GFX6: [[AND1:%[0-9]+]]:_(s32) = G_AND [[ASHR1]], [[C4]]
659    ; GFX6: [[SHL8:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32)
660    ; GFX6: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL8]]
661    ; GFX6: [[BITCAST4:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
662    ; GFX6: [[AND2:%[0-9]+]]:_(s32) = G_AND [[ASHR2]], [[C4]]
663    ; GFX6: [[AND3:%[0-9]+]]:_(s32) = G_AND [[ASHR3]], [[C4]]
664    ; GFX6: [[SHL9:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C]](s32)
665    ; GFX6: [[OR1:%[0-9]+]]:_(s32) = G_OR [[AND2]], [[SHL9]]
666    ; GFX6: [[BITCAST5:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32)
667    ; GFX6: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BITCAST4]](<2 x s16>), [[BITCAST5]](<2 x s16>)
668    ; GFX6: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>)
669    ; GFX8-LABEL: name: saddsat_v4s16
670    ; GFX8: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1
671    ; GFX8: [[COPY1:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr2_vgpr3
672    ; GFX8: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>)
673    ; GFX8: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>)
674    ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32)
675    ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
676    ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
677    ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
678    ; GFX8: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>)
679    ; GFX8: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32)
680    ; GFX8: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
681    ; GFX8: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32)
682    ; GFX8: [[UV2:%[0-9]+]]:_(<2 x s16>), [[UV3:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY1]](<4 x s16>)
683    ; GFX8: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV2]](<2 x s16>)
684    ; GFX8: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST2]](s32)
685    ; GFX8: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32)
686    ; GFX8: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR2]](s32)
687    ; GFX8: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>)
688    ; GFX8: [[TRUNC6:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST3]](s32)
689    ; GFX8: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32)
690    ; GFX8: [[TRUNC7:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR3]](s32)
691    ; GFX8: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 32767
692    ; GFX8: [[C2:%[0-9]+]]:_(s16) = G_CONSTANT i16 -32768
693    ; GFX8: [[C3:%[0-9]+]]:_(s16) = G_CONSTANT i16 0
694    ; GFX8: [[SMAX:%[0-9]+]]:_(s16) = G_SMAX [[TRUNC]], [[C3]]
695    ; GFX8: [[SUB:%[0-9]+]]:_(s16) = G_SUB [[C1]], [[SMAX]]
696    ; GFX8: [[SMIN:%[0-9]+]]:_(s16) = G_SMIN [[TRUNC]], [[C3]]
697    ; GFX8: [[SUB1:%[0-9]+]]:_(s16) = G_SUB [[C2]], [[SMIN]]
698    ; GFX8: [[SMAX1:%[0-9]+]]:_(s16) = G_SMAX [[SUB1]], [[TRUNC4]]
699    ; GFX8: [[SMIN1:%[0-9]+]]:_(s16) = G_SMIN [[SMAX1]], [[SUB]]
700    ; GFX8: [[ADD:%[0-9]+]]:_(s16) = G_ADD [[TRUNC]], [[SMIN1]]
701    ; GFX8: [[SMAX2:%[0-9]+]]:_(s16) = G_SMAX [[TRUNC1]], [[C3]]
702    ; GFX8: [[SUB2:%[0-9]+]]:_(s16) = G_SUB [[C1]], [[SMAX2]]
703    ; GFX8: [[SMIN2:%[0-9]+]]:_(s16) = G_SMIN [[TRUNC1]], [[C3]]
704    ; GFX8: [[SUB3:%[0-9]+]]:_(s16) = G_SUB [[C2]], [[SMIN2]]
705    ; GFX8: [[SMAX3:%[0-9]+]]:_(s16) = G_SMAX [[SUB3]], [[TRUNC5]]
706    ; GFX8: [[SMIN3:%[0-9]+]]:_(s16) = G_SMIN [[SMAX3]], [[SUB2]]
707    ; GFX8: [[ADD1:%[0-9]+]]:_(s16) = G_ADD [[TRUNC1]], [[SMIN3]]
708    ; GFX8: [[SMAX4:%[0-9]+]]:_(s16) = G_SMAX [[TRUNC2]], [[C3]]
709    ; GFX8: [[SUB4:%[0-9]+]]:_(s16) = G_SUB [[C1]], [[SMAX4]]
710    ; GFX8: [[SMIN4:%[0-9]+]]:_(s16) = G_SMIN [[TRUNC2]], [[C3]]
711    ; GFX8: [[SUB5:%[0-9]+]]:_(s16) = G_SUB [[C2]], [[SMIN4]]
712    ; GFX8: [[SMAX5:%[0-9]+]]:_(s16) = G_SMAX [[SUB5]], [[TRUNC6]]
713    ; GFX8: [[SMIN5:%[0-9]+]]:_(s16) = G_SMIN [[SMAX5]], [[SUB4]]
714    ; GFX8: [[ADD2:%[0-9]+]]:_(s16) = G_ADD [[TRUNC2]], [[SMIN5]]
715    ; GFX8: [[SMAX6:%[0-9]+]]:_(s16) = G_SMAX [[TRUNC3]], [[C3]]
716    ; GFX8: [[SUB6:%[0-9]+]]:_(s16) = G_SUB [[C1]], [[SMAX6]]
717    ; GFX8: [[SMIN6:%[0-9]+]]:_(s16) = G_SMIN [[TRUNC3]], [[C3]]
718    ; GFX8: [[SUB7:%[0-9]+]]:_(s16) = G_SUB [[C2]], [[SMIN6]]
719    ; GFX8: [[SMAX7:%[0-9]+]]:_(s16) = G_SMAX [[SUB7]], [[TRUNC7]]
720    ; GFX8: [[SMIN7:%[0-9]+]]:_(s16) = G_SMIN [[SMAX7]], [[SUB6]]
721    ; GFX8: [[ADD3:%[0-9]+]]:_(s16) = G_ADD [[TRUNC3]], [[SMIN7]]
722    ; GFX8: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[ADD]](s16)
723    ; GFX8: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[ADD1]](s16)
724    ; GFX8: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32)
725    ; GFX8: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL]]
726    ; GFX8: [[BITCAST4:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
727    ; GFX8: [[ZEXT2:%[0-9]+]]:_(s32) = G_ZEXT [[ADD2]](s16)
728    ; GFX8: [[ZEXT3:%[0-9]+]]:_(s32) = G_ZEXT [[ADD3]](s16)
729    ; GFX8: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[ZEXT3]], [[C]](s32)
730    ; GFX8: [[OR1:%[0-9]+]]:_(s32) = G_OR [[ZEXT2]], [[SHL1]]
731    ; GFX8: [[BITCAST5:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32)
732    ; GFX8: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BITCAST4]](<2 x s16>), [[BITCAST5]](<2 x s16>)
733    ; GFX8: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>)
734    ; GFX9-LABEL: name: saddsat_v4s16
735    ; GFX9: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1
736    ; GFX9: [[COPY1:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr2_vgpr3
737    ; GFX9: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>)
738    ; GFX9: [[UV2:%[0-9]+]]:_(<2 x s16>), [[UV3:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY1]](<4 x s16>)
739    ; GFX9: [[SADDSAT:%[0-9]+]]:_(<2 x s16>) = G_SADDSAT [[UV]], [[UV2]]
740    ; GFX9: [[SADDSAT1:%[0-9]+]]:_(<2 x s16>) = G_SADDSAT [[UV1]], [[UV3]]
741    ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[SADDSAT]](<2 x s16>), [[SADDSAT1]](<2 x s16>)
742    ; GFX9: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>)
743    %0:_(<4 x s16>) = COPY $vgpr0_vgpr1
744    %1:_(<4 x s16>) = COPY $vgpr2_vgpr3
745    %2:_(<4 x s16>) = G_SADDSAT %0, %1
746    $vgpr0_vgpr1 = COPY %2
747...
748
749---
750name: saddsat_s32
751body: |
752  bb.0:
753    liveins: $vgpr0, $vgpr1
754
755    ; GFX6-LABEL: name: saddsat_s32
756    ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
757    ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
758    ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 2147483647
759    ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 -2147483648
760    ; GFX6: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
761    ; GFX6: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[COPY]], [[C2]]
762    ; GFX6: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C]], [[SMAX]]
763    ; GFX6: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[COPY]], [[C2]]
764    ; GFX6: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMIN]]
765    ; GFX6: [[SMAX1:%[0-9]+]]:_(s32) = G_SMAX [[SUB1]], [[COPY1]]
766    ; GFX6: [[SMIN1:%[0-9]+]]:_(s32) = G_SMIN [[SMAX1]], [[SUB]]
767    ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY]], [[SMIN1]]
768    ; GFX6: $vgpr0 = COPY [[ADD]](s32)
769    ; GFX8-LABEL: name: saddsat_s32
770    ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
771    ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
772    ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 2147483647
773    ; GFX8: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 -2147483648
774    ; GFX8: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
775    ; GFX8: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[COPY]], [[C2]]
776    ; GFX8: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C]], [[SMAX]]
777    ; GFX8: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[COPY]], [[C2]]
778    ; GFX8: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMIN]]
779    ; GFX8: [[SMAX1:%[0-9]+]]:_(s32) = G_SMAX [[SUB1]], [[COPY1]]
780    ; GFX8: [[SMIN1:%[0-9]+]]:_(s32) = G_SMIN [[SMAX1]], [[SUB]]
781    ; GFX8: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY]], [[SMIN1]]
782    ; GFX8: $vgpr0 = COPY [[ADD]](s32)
783    ; GFX9-LABEL: name: saddsat_s32
784    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
785    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
786    ; GFX9: [[SADDSAT:%[0-9]+]]:_(s32) = G_SADDSAT [[COPY]], [[COPY1]]
787    ; GFX9: $vgpr0 = COPY [[SADDSAT]](s32)
788    %0:_(s32) = COPY $vgpr0
789    %1:_(s32) = COPY $vgpr1
790    %2:_(s32) = G_SADDSAT %0, %1
791    $vgpr0 = COPY %2
792...
793
794---
795name: saddsat_v2s32
796body: |
797  bb.0:
798    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3
799
800    ; GFX6-LABEL: name: saddsat_v2s32
801    ; GFX6: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
802    ; GFX6: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3
803    ; GFX6: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
804    ; GFX6: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
805    ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 2147483647
806    ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 -2147483648
807    ; GFX6: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
808    ; GFX6: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[UV]], [[C2]]
809    ; GFX6: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C]], [[SMAX]]
810    ; GFX6: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[UV]], [[C2]]
811    ; GFX6: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMIN]]
812    ; GFX6: [[SMAX1:%[0-9]+]]:_(s32) = G_SMAX [[SUB1]], [[UV2]]
813    ; GFX6: [[SMIN1:%[0-9]+]]:_(s32) = G_SMIN [[SMAX1]], [[SUB]]
814    ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[UV]], [[SMIN1]]
815    ; GFX6: [[SMAX2:%[0-9]+]]:_(s32) = G_SMAX [[UV1]], [[C2]]
816    ; GFX6: [[SUB2:%[0-9]+]]:_(s32) = G_SUB [[C]], [[SMAX2]]
817    ; GFX6: [[SMIN2:%[0-9]+]]:_(s32) = G_SMIN [[UV1]], [[C2]]
818    ; GFX6: [[SUB3:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMIN2]]
819    ; GFX6: [[SMAX3:%[0-9]+]]:_(s32) = G_SMAX [[SUB3]], [[UV3]]
820    ; GFX6: [[SMIN3:%[0-9]+]]:_(s32) = G_SMIN [[SMAX3]], [[SUB2]]
821    ; GFX6: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[UV1]], [[SMIN3]]
822    ; GFX6: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[ADD]](s32), [[ADD1]](s32)
823    ; GFX6: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>)
824    ; GFX8-LABEL: name: saddsat_v2s32
825    ; GFX8: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
826    ; GFX8: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3
827    ; GFX8: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
828    ; GFX8: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
829    ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 2147483647
830    ; GFX8: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 -2147483648
831    ; GFX8: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
832    ; GFX8: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[UV]], [[C2]]
833    ; GFX8: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C]], [[SMAX]]
834    ; GFX8: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[UV]], [[C2]]
835    ; GFX8: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMIN]]
836    ; GFX8: [[SMAX1:%[0-9]+]]:_(s32) = G_SMAX [[SUB1]], [[UV2]]
837    ; GFX8: [[SMIN1:%[0-9]+]]:_(s32) = G_SMIN [[SMAX1]], [[SUB]]
838    ; GFX8: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[UV]], [[SMIN1]]
839    ; GFX8: [[SMAX2:%[0-9]+]]:_(s32) = G_SMAX [[UV1]], [[C2]]
840    ; GFX8: [[SUB2:%[0-9]+]]:_(s32) = G_SUB [[C]], [[SMAX2]]
841    ; GFX8: [[SMIN2:%[0-9]+]]:_(s32) = G_SMIN [[UV1]], [[C2]]
842    ; GFX8: [[SUB3:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[SMIN2]]
843    ; GFX8: [[SMAX3:%[0-9]+]]:_(s32) = G_SMAX [[SUB3]], [[UV3]]
844    ; GFX8: [[SMIN3:%[0-9]+]]:_(s32) = G_SMIN [[SMAX3]], [[SUB2]]
845    ; GFX8: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[UV1]], [[SMIN3]]
846    ; GFX8: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[ADD]](s32), [[ADD1]](s32)
847    ; GFX8: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>)
848    ; GFX9-LABEL: name: saddsat_v2s32
849    ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
850    ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3
851    ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
852    ; GFX9: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
853    ; GFX9: [[SADDSAT:%[0-9]+]]:_(s32) = G_SADDSAT [[UV]], [[UV2]]
854    ; GFX9: [[SADDSAT1:%[0-9]+]]:_(s32) = G_SADDSAT [[UV1]], [[UV3]]
855    ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[SADDSAT]](s32), [[SADDSAT1]](s32)
856    ; GFX9: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>)
857    %0:_(<2 x s32>) = COPY $vgpr0_vgpr1
858    %1:_(<2 x s32>) = COPY $vgpr2_vgpr3
859    %2:_(<2 x s32>) = G_SADDSAT %0, %1
860    $vgpr0_vgpr1 = COPY %2
861...
862
863---
864name: saddsat_s64
865body: |
866  bb.0:
867    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3
868
869    ; GFX6-LABEL: name: saddsat_s64
870    ; GFX6: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
871    ; GFX6: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
872    ; GFX6: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64)
873    ; GFX6: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](s64)
874    ; GFX6: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[UV]], [[UV2]]
875    ; GFX6: [[UADDE:%[0-9]+]]:_(s32), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[UV1]], [[UV3]], [[UADDO1]]
876    ; GFX6: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO]](s32), [[UADDE]](s32)
877    ; GFX6: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
878    ; GFX6: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[MV]](s64), [[COPY]]
879    ; GFX6: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[COPY1]](s64), [[C]]
880    ; GFX6: [[XOR:%[0-9]+]]:_(s1) = G_XOR [[ICMP1]], [[ICMP]]
881    ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
882    ; GFX6: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[MV]], [[C1]](s32)
883    ; GFX6: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 -9223372036854775808
884    ; GFX6: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[ASHR]](s64)
885    ; GFX6: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[C2]](s64)
886    ; GFX6: [[UADDO2:%[0-9]+]]:_(s32), [[UADDO3:%[0-9]+]]:_(s1) = G_UADDO [[UV4]], [[UV6]]
887    ; GFX6: [[UADDE2:%[0-9]+]]:_(s32), [[UADDE3:%[0-9]+]]:_(s1) = G_UADDE [[UV5]], [[UV7]], [[UADDO3]]
888    ; GFX6: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO2]](s32), [[UADDE2]](s32)
889    ; GFX6: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[XOR]](s1), [[MV1]], [[MV]]
890    ; GFX6: $vgpr0_vgpr1 = COPY [[SELECT]](s64)
891    ; GFX8-LABEL: name: saddsat_s64
892    ; GFX8: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
893    ; GFX8: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
894    ; GFX8: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64)
895    ; GFX8: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](s64)
896    ; GFX8: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[UV]], [[UV2]]
897    ; GFX8: [[UADDE:%[0-9]+]]:_(s32), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[UV1]], [[UV3]], [[UADDO1]]
898    ; GFX8: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO]](s32), [[UADDE]](s32)
899    ; GFX8: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
900    ; GFX8: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[MV]](s64), [[COPY]]
901    ; GFX8: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[COPY1]](s64), [[C]]
902    ; GFX8: [[XOR:%[0-9]+]]:_(s1) = G_XOR [[ICMP1]], [[ICMP]]
903    ; GFX8: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
904    ; GFX8: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[MV]], [[C1]](s32)
905    ; GFX8: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 -9223372036854775808
906    ; GFX8: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[ASHR]](s64)
907    ; GFX8: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[C2]](s64)
908    ; GFX8: [[UADDO2:%[0-9]+]]:_(s32), [[UADDO3:%[0-9]+]]:_(s1) = G_UADDO [[UV4]], [[UV6]]
909    ; GFX8: [[UADDE2:%[0-9]+]]:_(s32), [[UADDE3:%[0-9]+]]:_(s1) = G_UADDE [[UV5]], [[UV7]], [[UADDO3]]
910    ; GFX8: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO2]](s32), [[UADDE2]](s32)
911    ; GFX8: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[XOR]](s1), [[MV1]], [[MV]]
912    ; GFX8: $vgpr0_vgpr1 = COPY [[SELECT]](s64)
913    ; GFX9-LABEL: name: saddsat_s64
914    ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
915    ; GFX9: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
916    ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64)
917    ; GFX9: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](s64)
918    ; GFX9: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[UV]], [[UV2]]
919    ; GFX9: [[UADDE:%[0-9]+]]:_(s32), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[UV1]], [[UV3]], [[UADDO1]]
920    ; GFX9: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO]](s32), [[UADDE]](s32)
921    ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
922    ; GFX9: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[MV]](s64), [[COPY]]
923    ; GFX9: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[COPY1]](s64), [[C]]
924    ; GFX9: [[XOR:%[0-9]+]]:_(s1) = G_XOR [[ICMP1]], [[ICMP]]
925    ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
926    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[MV]], [[C1]](s32)
927    ; GFX9: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 -9223372036854775808
928    ; GFX9: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[ASHR]](s64)
929    ; GFX9: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[C2]](s64)
930    ; GFX9: [[UADDO2:%[0-9]+]]:_(s32), [[UADDO3:%[0-9]+]]:_(s1) = G_UADDO [[UV4]], [[UV6]]
931    ; GFX9: [[UADDE2:%[0-9]+]]:_(s32), [[UADDE3:%[0-9]+]]:_(s1) = G_UADDE [[UV5]], [[UV7]], [[UADDO3]]
932    ; GFX9: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO2]](s32), [[UADDE2]](s32)
933    ; GFX9: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[XOR]](s1), [[MV1]], [[MV]]
934    ; GFX9: $vgpr0_vgpr1 = COPY [[SELECT]](s64)
935    %0:_(s64) = COPY $vgpr0_vgpr1
936    %1:_(s64) = COPY $vgpr2_vgpr3
937    %2:_(s64) = G_SADDSAT %0, %1
938    $vgpr0_vgpr1 = COPY %2
939...
940
941---
942name: saddsat_v2s64
943body: |
944  bb.0:
945    liveins: $vgpr0_vgpr1_vgpr2_vgpr3, $vgpr4_vgpr5_vgpr6_vgpr7
946
947    ; GFX6-LABEL: name: saddsat_v2s64
948    ; GFX6: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
949    ; GFX6: [[COPY1:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr4_vgpr5_vgpr6_vgpr7
950    ; GFX6: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](<2 x s64>)
951    ; GFX6: [[UV2:%[0-9]+]]:_(s64), [[UV3:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY1]](<2 x s64>)
952    ; GFX6: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV]](s64)
953    ; GFX6: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV2]](s64)
954    ; GFX6: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[UV4]], [[UV6]]
955    ; GFX6: [[UADDE:%[0-9]+]]:_(s32), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[UV5]], [[UV7]], [[UADDO1]]
956    ; GFX6: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO]](s32), [[UADDE]](s32)
957    ; GFX6: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
958    ; GFX6: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[MV]](s64), [[UV]]
959    ; GFX6: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[UV2]](s64), [[C]]
960    ; GFX6: [[XOR:%[0-9]+]]:_(s1) = G_XOR [[ICMP1]], [[ICMP]]
961    ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
962    ; GFX6: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[MV]], [[C1]](s32)
963    ; GFX6: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 -9223372036854775808
964    ; GFX6: [[UV8:%[0-9]+]]:_(s32), [[UV9:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[ASHR]](s64)
965    ; GFX6: [[UV10:%[0-9]+]]:_(s32), [[UV11:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[C2]](s64)
966    ; GFX6: [[UADDO2:%[0-9]+]]:_(s32), [[UADDO3:%[0-9]+]]:_(s1) = G_UADDO [[UV8]], [[UV10]]
967    ; GFX6: [[UADDE2:%[0-9]+]]:_(s32), [[UADDE3:%[0-9]+]]:_(s1) = G_UADDE [[UV9]], [[UV11]], [[UADDO3]]
968    ; GFX6: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO2]](s32), [[UADDE2]](s32)
969    ; GFX6: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[XOR]](s1), [[MV1]], [[MV]]
970    ; GFX6: [[UV12:%[0-9]+]]:_(s32), [[UV13:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV1]](s64)
971    ; GFX6: [[UV14:%[0-9]+]]:_(s32), [[UV15:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV3]](s64)
972    ; GFX6: [[UADDO4:%[0-9]+]]:_(s32), [[UADDO5:%[0-9]+]]:_(s1) = G_UADDO [[UV12]], [[UV14]]
973    ; GFX6: [[UADDE4:%[0-9]+]]:_(s32), [[UADDE5:%[0-9]+]]:_(s1) = G_UADDE [[UV13]], [[UV15]], [[UADDO5]]
974    ; GFX6: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO4]](s32), [[UADDE4]](s32)
975    ; GFX6: [[ICMP2:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[MV2]](s64), [[UV1]]
976    ; GFX6: [[ICMP3:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[UV3]](s64), [[C]]
977    ; GFX6: [[XOR1:%[0-9]+]]:_(s1) = G_XOR [[ICMP3]], [[ICMP2]]
978    ; GFX6: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[MV2]], [[C1]](s32)
979    ; GFX6: [[UV16:%[0-9]+]]:_(s32), [[UV17:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[ASHR1]](s64)
980    ; GFX6: [[UV18:%[0-9]+]]:_(s32), [[UV19:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[C2]](s64)
981    ; GFX6: [[UADDO6:%[0-9]+]]:_(s32), [[UADDO7:%[0-9]+]]:_(s1) = G_UADDO [[UV16]], [[UV18]]
982    ; GFX6: [[UADDE6:%[0-9]+]]:_(s32), [[UADDE7:%[0-9]+]]:_(s1) = G_UADDE [[UV17]], [[UV19]], [[UADDO7]]
983    ; GFX6: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO6]](s32), [[UADDE6]](s32)
984    ; GFX6: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[XOR1]](s1), [[MV3]], [[MV2]]
985    ; GFX6: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[SELECT]](s64), [[SELECT1]](s64)
986    ; GFX6: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<2 x s64>)
987    ; GFX8-LABEL: name: saddsat_v2s64
988    ; GFX8: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
989    ; GFX8: [[COPY1:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr4_vgpr5_vgpr6_vgpr7
990    ; GFX8: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](<2 x s64>)
991    ; GFX8: [[UV2:%[0-9]+]]:_(s64), [[UV3:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY1]](<2 x s64>)
992    ; GFX8: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV]](s64)
993    ; GFX8: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV2]](s64)
994    ; GFX8: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[UV4]], [[UV6]]
995    ; GFX8: [[UADDE:%[0-9]+]]:_(s32), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[UV5]], [[UV7]], [[UADDO1]]
996    ; GFX8: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO]](s32), [[UADDE]](s32)
997    ; GFX8: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
998    ; GFX8: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[MV]](s64), [[UV]]
999    ; GFX8: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[UV2]](s64), [[C]]
1000    ; GFX8: [[XOR:%[0-9]+]]:_(s1) = G_XOR [[ICMP1]], [[ICMP]]
1001    ; GFX8: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1002    ; GFX8: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[MV]], [[C1]](s32)
1003    ; GFX8: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 -9223372036854775808
1004    ; GFX8: [[UV8:%[0-9]+]]:_(s32), [[UV9:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[ASHR]](s64)
1005    ; GFX8: [[UV10:%[0-9]+]]:_(s32), [[UV11:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[C2]](s64)
1006    ; GFX8: [[UADDO2:%[0-9]+]]:_(s32), [[UADDO3:%[0-9]+]]:_(s1) = G_UADDO [[UV8]], [[UV10]]
1007    ; GFX8: [[UADDE2:%[0-9]+]]:_(s32), [[UADDE3:%[0-9]+]]:_(s1) = G_UADDE [[UV9]], [[UV11]], [[UADDO3]]
1008    ; GFX8: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO2]](s32), [[UADDE2]](s32)
1009    ; GFX8: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[XOR]](s1), [[MV1]], [[MV]]
1010    ; GFX8: [[UV12:%[0-9]+]]:_(s32), [[UV13:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV1]](s64)
1011    ; GFX8: [[UV14:%[0-9]+]]:_(s32), [[UV15:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV3]](s64)
1012    ; GFX8: [[UADDO4:%[0-9]+]]:_(s32), [[UADDO5:%[0-9]+]]:_(s1) = G_UADDO [[UV12]], [[UV14]]
1013    ; GFX8: [[UADDE4:%[0-9]+]]:_(s32), [[UADDE5:%[0-9]+]]:_(s1) = G_UADDE [[UV13]], [[UV15]], [[UADDO5]]
1014    ; GFX8: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO4]](s32), [[UADDE4]](s32)
1015    ; GFX8: [[ICMP2:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[MV2]](s64), [[UV1]]
1016    ; GFX8: [[ICMP3:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[UV3]](s64), [[C]]
1017    ; GFX8: [[XOR1:%[0-9]+]]:_(s1) = G_XOR [[ICMP3]], [[ICMP2]]
1018    ; GFX8: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[MV2]], [[C1]](s32)
1019    ; GFX8: [[UV16:%[0-9]+]]:_(s32), [[UV17:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[ASHR1]](s64)
1020    ; GFX8: [[UV18:%[0-9]+]]:_(s32), [[UV19:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[C2]](s64)
1021    ; GFX8: [[UADDO6:%[0-9]+]]:_(s32), [[UADDO7:%[0-9]+]]:_(s1) = G_UADDO [[UV16]], [[UV18]]
1022    ; GFX8: [[UADDE6:%[0-9]+]]:_(s32), [[UADDE7:%[0-9]+]]:_(s1) = G_UADDE [[UV17]], [[UV19]], [[UADDO7]]
1023    ; GFX8: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO6]](s32), [[UADDE6]](s32)
1024    ; GFX8: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[XOR1]](s1), [[MV3]], [[MV2]]
1025    ; GFX8: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[SELECT]](s64), [[SELECT1]](s64)
1026    ; GFX8: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<2 x s64>)
1027    ; GFX9-LABEL: name: saddsat_v2s64
1028    ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1029    ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr4_vgpr5_vgpr6_vgpr7
1030    ; GFX9: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](<2 x s64>)
1031    ; GFX9: [[UV2:%[0-9]+]]:_(s64), [[UV3:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY1]](<2 x s64>)
1032    ; GFX9: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV]](s64)
1033    ; GFX9: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV2]](s64)
1034    ; GFX9: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[UV4]], [[UV6]]
1035    ; GFX9: [[UADDE:%[0-9]+]]:_(s32), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[UV5]], [[UV7]], [[UADDO1]]
1036    ; GFX9: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO]](s32), [[UADDE]](s32)
1037    ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
1038    ; GFX9: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[MV]](s64), [[UV]]
1039    ; GFX9: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[UV2]](s64), [[C]]
1040    ; GFX9: [[XOR:%[0-9]+]]:_(s1) = G_XOR [[ICMP1]], [[ICMP]]
1041    ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1042    ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[MV]], [[C1]](s32)
1043    ; GFX9: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 -9223372036854775808
1044    ; GFX9: [[UV8:%[0-9]+]]:_(s32), [[UV9:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[ASHR]](s64)
1045    ; GFX9: [[UV10:%[0-9]+]]:_(s32), [[UV11:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[C2]](s64)
1046    ; GFX9: [[UADDO2:%[0-9]+]]:_(s32), [[UADDO3:%[0-9]+]]:_(s1) = G_UADDO [[UV8]], [[UV10]]
1047    ; GFX9: [[UADDE2:%[0-9]+]]:_(s32), [[UADDE3:%[0-9]+]]:_(s1) = G_UADDE [[UV9]], [[UV11]], [[UADDO3]]
1048    ; GFX9: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO2]](s32), [[UADDE2]](s32)
1049    ; GFX9: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[XOR]](s1), [[MV1]], [[MV]]
1050    ; GFX9: [[UV12:%[0-9]+]]:_(s32), [[UV13:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV1]](s64)
1051    ; GFX9: [[UV14:%[0-9]+]]:_(s32), [[UV15:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV3]](s64)
1052    ; GFX9: [[UADDO4:%[0-9]+]]:_(s32), [[UADDO5:%[0-9]+]]:_(s1) = G_UADDO [[UV12]], [[UV14]]
1053    ; GFX9: [[UADDE4:%[0-9]+]]:_(s32), [[UADDE5:%[0-9]+]]:_(s1) = G_UADDE [[UV13]], [[UV15]], [[UADDO5]]
1054    ; GFX9: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO4]](s32), [[UADDE4]](s32)
1055    ; GFX9: [[ICMP2:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[MV2]](s64), [[UV1]]
1056    ; GFX9: [[ICMP3:%[0-9]+]]:_(s1) = G_ICMP intpred(slt), [[UV3]](s64), [[C]]
1057    ; GFX9: [[XOR1:%[0-9]+]]:_(s1) = G_XOR [[ICMP3]], [[ICMP2]]
1058    ; GFX9: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[MV2]], [[C1]](s32)
1059    ; GFX9: [[UV16:%[0-9]+]]:_(s32), [[UV17:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[ASHR1]](s64)
1060    ; GFX9: [[UV18:%[0-9]+]]:_(s32), [[UV19:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[C2]](s64)
1061    ; GFX9: [[UADDO6:%[0-9]+]]:_(s32), [[UADDO7:%[0-9]+]]:_(s1) = G_UADDO [[UV16]], [[UV18]]
1062    ; GFX9: [[UADDE6:%[0-9]+]]:_(s32), [[UADDE7:%[0-9]+]]:_(s1) = G_UADDE [[UV17]], [[UV19]], [[UADDO7]]
1063    ; GFX9: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO6]](s32), [[UADDE6]](s32)
1064    ; GFX9: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[XOR1]](s1), [[MV3]], [[MV2]]
1065    ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[SELECT]](s64), [[SELECT1]](s64)
1066    ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<2 x s64>)
1067    %0:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1068    %1:_(<2 x s64>) = COPY $vgpr4_vgpr5_vgpr6_vgpr7
1069    %2:_(<2 x s64>) = G_SADDSAT %0, %1
1070    $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %2
1071...
1072