1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -mtriple=amdgcn-mesa-mesa3d -mcpu=fiji -O0 -run-pass=legalizer %s -o - | FileCheck %s --check-prefix=GFX8
3# RUN: llc -mtriple=amdgcn-mesa-mesa3d -mcpu=gfx906 -O0 -run-pass=legalizer %s -o - | FileCheck %s --check-prefix=GFX9
4
5---
6name: test_umulh_s32
7body: |
8  bb.0:
9    liveins: $vgpr0, $vgpr1
10
11    ; GFX8-LABEL: name: test_umulh_s32
12    ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
13    ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
14    ; GFX8: [[UMULH:%[0-9]+]]:_(s32) = G_UMULH [[COPY]], [[COPY1]]
15    ; GFX8: $vgpr0 = COPY [[UMULH]](s32)
16    ; GFX9-LABEL: name: test_umulh_s32
17    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
18    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
19    ; GFX9: [[UMULH:%[0-9]+]]:_(s32) = G_UMULH [[COPY]], [[COPY1]]
20    ; GFX9: $vgpr0 = COPY [[UMULH]](s32)
21    %0:_(s32) = COPY $vgpr0
22    %1:_(s32) = COPY $vgpr1
23    %2:_(s32) = G_UMULH %0, %1
24    $vgpr0 = COPY %2
25...
26
27---
28name: test_umulh_v2s32
29body: |
30  bb.0:
31    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3
32
33    ; GFX8-LABEL: name: test_umulh_v2s32
34    ; GFX8: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
35    ; GFX8: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3
36    ; GFX8: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
37    ; GFX8: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
38    ; GFX8: [[UMULH:%[0-9]+]]:_(s32) = G_UMULH [[UV]], [[UV2]]
39    ; GFX8: [[UMULH1:%[0-9]+]]:_(s32) = G_UMULH [[UV1]], [[UV3]]
40    ; GFX8: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[UMULH]](s32), [[UMULH1]](s32)
41    ; GFX8: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>)
42    ; GFX9-LABEL: name: test_umulh_v2s32
43    ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
44    ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3
45    ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
46    ; GFX9: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
47    ; GFX9: [[UMULH:%[0-9]+]]:_(s32) = G_UMULH [[UV]], [[UV2]]
48    ; GFX9: [[UMULH1:%[0-9]+]]:_(s32) = G_UMULH [[UV1]], [[UV3]]
49    ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[UMULH]](s32), [[UMULH1]](s32)
50    ; GFX9: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>)
51    %0:_(<2 x s32>) = COPY $vgpr0_vgpr1
52    %1:_(<2 x s32>) = COPY $vgpr2_vgpr3
53    %2:_(<2 x s32>) = G_UMULH %0, %1
54    $vgpr0_vgpr1 = COPY %2
55...
56
57---
58name: test_umulh_s64
59body: |
60  bb.0:
61    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3
62
63    ; GFX8-LABEL: name: test_umulh_s64
64    ; GFX8: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
65    ; GFX8: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
66    ; GFX8: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64)
67    ; GFX8: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](s64)
68    ; GFX8: [[MUL:%[0-9]+]]:_(s32) = G_MUL [[UV1]], [[UV2]]
69    ; GFX8: [[MUL1:%[0-9]+]]:_(s32) = G_MUL [[UV]], [[UV3]]
70    ; GFX8: [[UMULH:%[0-9]+]]:_(s32) = G_UMULH [[UV]], [[UV2]]
71    ; GFX8: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[MUL]], [[MUL1]]
72    ; GFX8: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO1]](s1)
73    ; GFX8: [[UADDO2:%[0-9]+]]:_(s32), [[UADDO3:%[0-9]+]]:_(s1) = G_UADDO [[UADDO]], [[UMULH]]
74    ; GFX8: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO3]](s1)
75    ; GFX8: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[ZEXT]], [[ZEXT1]]
76    ; GFX8: [[MUL2:%[0-9]+]]:_(s32) = G_MUL [[UV1]], [[UV3]]
77    ; GFX8: [[UMULH1:%[0-9]+]]:_(s32) = G_UMULH [[UV1]], [[UV2]]
78    ; GFX8: [[UMULH2:%[0-9]+]]:_(s32) = G_UMULH [[UV]], [[UV3]]
79    ; GFX8: [[UADDO4:%[0-9]+]]:_(s32), [[UADDO5:%[0-9]+]]:_(s1) = G_UADDO [[MUL2]], [[UMULH1]]
80    ; GFX8: [[ZEXT2:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO5]](s1)
81    ; GFX8: [[UADDO6:%[0-9]+]]:_(s32), [[UADDO7:%[0-9]+]]:_(s1) = G_UADDO [[UADDO4]], [[UMULH2]]
82    ; GFX8: [[ZEXT3:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO7]](s1)
83    ; GFX8: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[ZEXT2]], [[ZEXT3]]
84    ; GFX8: [[UADDO8:%[0-9]+]]:_(s32), [[UADDO9:%[0-9]+]]:_(s1) = G_UADDO [[UADDO6]], [[ADD]]
85    ; GFX8: [[ZEXT4:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO9]](s1)
86    ; GFX8: [[ADD2:%[0-9]+]]:_(s32) = G_ADD [[ADD1]], [[ZEXT4]]
87    ; GFX8: [[UMULH3:%[0-9]+]]:_(s32) = G_UMULH [[UV1]], [[UV3]]
88    ; GFX8: [[ADD3:%[0-9]+]]:_(s32) = G_ADD [[UMULH3]], [[ADD2]]
89    ; GFX8: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO8]](s32), [[ADD3]](s32)
90    ; GFX8: $vgpr0_vgpr1 = COPY [[MV]](s64)
91    ; GFX9-LABEL: name: test_umulh_s64
92    ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
93    ; GFX9: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
94    ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64)
95    ; GFX9: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](s64)
96    ; GFX9: [[MUL:%[0-9]+]]:_(s32) = G_MUL [[UV1]], [[UV2]]
97    ; GFX9: [[MUL1:%[0-9]+]]:_(s32) = G_MUL [[UV]], [[UV3]]
98    ; GFX9: [[UMULH:%[0-9]+]]:_(s32) = G_UMULH [[UV]], [[UV2]]
99    ; GFX9: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[MUL]], [[MUL1]]
100    ; GFX9: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO1]](s1)
101    ; GFX9: [[UADDO2:%[0-9]+]]:_(s32), [[UADDO3:%[0-9]+]]:_(s1) = G_UADDO [[UADDO]], [[UMULH]]
102    ; GFX9: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO3]](s1)
103    ; GFX9: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[ZEXT]], [[ZEXT1]]
104    ; GFX9: [[MUL2:%[0-9]+]]:_(s32) = G_MUL [[UV1]], [[UV3]]
105    ; GFX9: [[UMULH1:%[0-9]+]]:_(s32) = G_UMULH [[UV1]], [[UV2]]
106    ; GFX9: [[UMULH2:%[0-9]+]]:_(s32) = G_UMULH [[UV]], [[UV3]]
107    ; GFX9: [[UADDO4:%[0-9]+]]:_(s32), [[UADDO5:%[0-9]+]]:_(s1) = G_UADDO [[MUL2]], [[UMULH1]]
108    ; GFX9: [[ZEXT2:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO5]](s1)
109    ; GFX9: [[UADDO6:%[0-9]+]]:_(s32), [[UADDO7:%[0-9]+]]:_(s1) = G_UADDO [[UADDO4]], [[UMULH2]]
110    ; GFX9: [[ZEXT3:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO7]](s1)
111    ; GFX9: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[ZEXT2]], [[ZEXT3]]
112    ; GFX9: [[UADDO8:%[0-9]+]]:_(s32), [[UADDO9:%[0-9]+]]:_(s1) = G_UADDO [[UADDO6]], [[ADD]]
113    ; GFX9: [[ZEXT4:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO9]](s1)
114    ; GFX9: [[ADD2:%[0-9]+]]:_(s32) = G_ADD [[ADD1]], [[ZEXT4]]
115    ; GFX9: [[UMULH3:%[0-9]+]]:_(s32) = G_UMULH [[UV1]], [[UV3]]
116    ; GFX9: [[ADD3:%[0-9]+]]:_(s32) = G_ADD [[UMULH3]], [[ADD2]]
117    ; GFX9: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO8]](s32), [[ADD3]](s32)
118    ; GFX9: $vgpr0_vgpr1 = COPY [[MV]](s64)
119    %0:_(s64) = COPY $vgpr0_vgpr1
120    %1:_(s64) = COPY $vgpr2_vgpr3
121    %2:_(s64) = G_UMULH %0, %1
122    $vgpr0_vgpr1 = COPY %2
123...
124
125---
126name: test_umulh_v2s64
127body: |
128  bb.0:
129    liveins: $vgpr0_vgpr1_vgpr2_vgpr3, $vgpr4_vgpr5_vgpr6_vgpr7
130
131    ; GFX8-LABEL: name: test_umulh_v2s64
132    ; GFX8: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
133    ; GFX8: [[COPY1:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr4_vgpr5_vgpr6_vgpr7
134    ; GFX8: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](<2 x s64>)
135    ; GFX8: [[UV2:%[0-9]+]]:_(s64), [[UV3:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY1]](<2 x s64>)
136    ; GFX8: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV]](s64)
137    ; GFX8: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV2]](s64)
138    ; GFX8: [[MUL:%[0-9]+]]:_(s32) = G_MUL [[UV5]], [[UV6]]
139    ; GFX8: [[MUL1:%[0-9]+]]:_(s32) = G_MUL [[UV4]], [[UV7]]
140    ; GFX8: [[UMULH:%[0-9]+]]:_(s32) = G_UMULH [[UV4]], [[UV6]]
141    ; GFX8: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[MUL]], [[MUL1]]
142    ; GFX8: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO1]](s1)
143    ; GFX8: [[UADDO2:%[0-9]+]]:_(s32), [[UADDO3:%[0-9]+]]:_(s1) = G_UADDO [[UADDO]], [[UMULH]]
144    ; GFX8: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO3]](s1)
145    ; GFX8: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[ZEXT]], [[ZEXT1]]
146    ; GFX8: [[MUL2:%[0-9]+]]:_(s32) = G_MUL [[UV5]], [[UV7]]
147    ; GFX8: [[UMULH1:%[0-9]+]]:_(s32) = G_UMULH [[UV5]], [[UV6]]
148    ; GFX8: [[UMULH2:%[0-9]+]]:_(s32) = G_UMULH [[UV4]], [[UV7]]
149    ; GFX8: [[UADDO4:%[0-9]+]]:_(s32), [[UADDO5:%[0-9]+]]:_(s1) = G_UADDO [[MUL2]], [[UMULH1]]
150    ; GFX8: [[ZEXT2:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO5]](s1)
151    ; GFX8: [[UADDO6:%[0-9]+]]:_(s32), [[UADDO7:%[0-9]+]]:_(s1) = G_UADDO [[UADDO4]], [[UMULH2]]
152    ; GFX8: [[ZEXT3:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO7]](s1)
153    ; GFX8: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[ZEXT2]], [[ZEXT3]]
154    ; GFX8: [[UADDO8:%[0-9]+]]:_(s32), [[UADDO9:%[0-9]+]]:_(s1) = G_UADDO [[UADDO6]], [[ADD]]
155    ; GFX8: [[ZEXT4:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO9]](s1)
156    ; GFX8: [[ADD2:%[0-9]+]]:_(s32) = G_ADD [[ADD1]], [[ZEXT4]]
157    ; GFX8: [[UMULH3:%[0-9]+]]:_(s32) = G_UMULH [[UV5]], [[UV7]]
158    ; GFX8: [[ADD3:%[0-9]+]]:_(s32) = G_ADD [[UMULH3]], [[ADD2]]
159    ; GFX8: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO8]](s32), [[ADD3]](s32)
160    ; GFX8: [[UV8:%[0-9]+]]:_(s32), [[UV9:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV1]](s64)
161    ; GFX8: [[UV10:%[0-9]+]]:_(s32), [[UV11:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV3]](s64)
162    ; GFX8: [[MUL3:%[0-9]+]]:_(s32) = G_MUL [[UV9]], [[UV10]]
163    ; GFX8: [[MUL4:%[0-9]+]]:_(s32) = G_MUL [[UV8]], [[UV11]]
164    ; GFX8: [[UMULH4:%[0-9]+]]:_(s32) = G_UMULH [[UV8]], [[UV10]]
165    ; GFX8: [[UADDO10:%[0-9]+]]:_(s32), [[UADDO11:%[0-9]+]]:_(s1) = G_UADDO [[MUL3]], [[MUL4]]
166    ; GFX8: [[ZEXT5:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO11]](s1)
167    ; GFX8: [[UADDO12:%[0-9]+]]:_(s32), [[UADDO13:%[0-9]+]]:_(s1) = G_UADDO [[UADDO10]], [[UMULH4]]
168    ; GFX8: [[ZEXT6:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO13]](s1)
169    ; GFX8: [[ADD4:%[0-9]+]]:_(s32) = G_ADD [[ZEXT5]], [[ZEXT6]]
170    ; GFX8: [[MUL5:%[0-9]+]]:_(s32) = G_MUL [[UV9]], [[UV11]]
171    ; GFX8: [[UMULH5:%[0-9]+]]:_(s32) = G_UMULH [[UV9]], [[UV10]]
172    ; GFX8: [[UMULH6:%[0-9]+]]:_(s32) = G_UMULH [[UV8]], [[UV11]]
173    ; GFX8: [[UADDO14:%[0-9]+]]:_(s32), [[UADDO15:%[0-9]+]]:_(s1) = G_UADDO [[MUL5]], [[UMULH5]]
174    ; GFX8: [[ZEXT7:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO15]](s1)
175    ; GFX8: [[UADDO16:%[0-9]+]]:_(s32), [[UADDO17:%[0-9]+]]:_(s1) = G_UADDO [[UADDO14]], [[UMULH6]]
176    ; GFX8: [[ZEXT8:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO17]](s1)
177    ; GFX8: [[ADD5:%[0-9]+]]:_(s32) = G_ADD [[ZEXT7]], [[ZEXT8]]
178    ; GFX8: [[UADDO18:%[0-9]+]]:_(s32), [[UADDO19:%[0-9]+]]:_(s1) = G_UADDO [[UADDO16]], [[ADD4]]
179    ; GFX8: [[ZEXT9:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO19]](s1)
180    ; GFX8: [[ADD6:%[0-9]+]]:_(s32) = G_ADD [[ADD5]], [[ZEXT9]]
181    ; GFX8: [[UMULH7:%[0-9]+]]:_(s32) = G_UMULH [[UV9]], [[UV11]]
182    ; GFX8: [[ADD7:%[0-9]+]]:_(s32) = G_ADD [[UMULH7]], [[ADD6]]
183    ; GFX8: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO18]](s32), [[ADD7]](s32)
184    ; GFX8: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64)
185    ; GFX8: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<2 x s64>)
186    ; GFX9-LABEL: name: test_umulh_v2s64
187    ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
188    ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr4_vgpr5_vgpr6_vgpr7
189    ; GFX9: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](<2 x s64>)
190    ; GFX9: [[UV2:%[0-9]+]]:_(s64), [[UV3:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY1]](<2 x s64>)
191    ; GFX9: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV]](s64)
192    ; GFX9: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV2]](s64)
193    ; GFX9: [[MUL:%[0-9]+]]:_(s32) = G_MUL [[UV5]], [[UV6]]
194    ; GFX9: [[MUL1:%[0-9]+]]:_(s32) = G_MUL [[UV4]], [[UV7]]
195    ; GFX9: [[UMULH:%[0-9]+]]:_(s32) = G_UMULH [[UV4]], [[UV6]]
196    ; GFX9: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[MUL]], [[MUL1]]
197    ; GFX9: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO1]](s1)
198    ; GFX9: [[UADDO2:%[0-9]+]]:_(s32), [[UADDO3:%[0-9]+]]:_(s1) = G_UADDO [[UADDO]], [[UMULH]]
199    ; GFX9: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO3]](s1)
200    ; GFX9: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[ZEXT]], [[ZEXT1]]
201    ; GFX9: [[MUL2:%[0-9]+]]:_(s32) = G_MUL [[UV5]], [[UV7]]
202    ; GFX9: [[UMULH1:%[0-9]+]]:_(s32) = G_UMULH [[UV5]], [[UV6]]
203    ; GFX9: [[UMULH2:%[0-9]+]]:_(s32) = G_UMULH [[UV4]], [[UV7]]
204    ; GFX9: [[UADDO4:%[0-9]+]]:_(s32), [[UADDO5:%[0-9]+]]:_(s1) = G_UADDO [[MUL2]], [[UMULH1]]
205    ; GFX9: [[ZEXT2:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO5]](s1)
206    ; GFX9: [[UADDO6:%[0-9]+]]:_(s32), [[UADDO7:%[0-9]+]]:_(s1) = G_UADDO [[UADDO4]], [[UMULH2]]
207    ; GFX9: [[ZEXT3:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO7]](s1)
208    ; GFX9: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[ZEXT2]], [[ZEXT3]]
209    ; GFX9: [[UADDO8:%[0-9]+]]:_(s32), [[UADDO9:%[0-9]+]]:_(s1) = G_UADDO [[UADDO6]], [[ADD]]
210    ; GFX9: [[ZEXT4:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO9]](s1)
211    ; GFX9: [[ADD2:%[0-9]+]]:_(s32) = G_ADD [[ADD1]], [[ZEXT4]]
212    ; GFX9: [[UMULH3:%[0-9]+]]:_(s32) = G_UMULH [[UV5]], [[UV7]]
213    ; GFX9: [[ADD3:%[0-9]+]]:_(s32) = G_ADD [[UMULH3]], [[ADD2]]
214    ; GFX9: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO8]](s32), [[ADD3]](s32)
215    ; GFX9: [[UV8:%[0-9]+]]:_(s32), [[UV9:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV1]](s64)
216    ; GFX9: [[UV10:%[0-9]+]]:_(s32), [[UV11:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV3]](s64)
217    ; GFX9: [[MUL3:%[0-9]+]]:_(s32) = G_MUL [[UV9]], [[UV10]]
218    ; GFX9: [[MUL4:%[0-9]+]]:_(s32) = G_MUL [[UV8]], [[UV11]]
219    ; GFX9: [[UMULH4:%[0-9]+]]:_(s32) = G_UMULH [[UV8]], [[UV10]]
220    ; GFX9: [[UADDO10:%[0-9]+]]:_(s32), [[UADDO11:%[0-9]+]]:_(s1) = G_UADDO [[MUL3]], [[MUL4]]
221    ; GFX9: [[ZEXT5:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO11]](s1)
222    ; GFX9: [[UADDO12:%[0-9]+]]:_(s32), [[UADDO13:%[0-9]+]]:_(s1) = G_UADDO [[UADDO10]], [[UMULH4]]
223    ; GFX9: [[ZEXT6:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO13]](s1)
224    ; GFX9: [[ADD4:%[0-9]+]]:_(s32) = G_ADD [[ZEXT5]], [[ZEXT6]]
225    ; GFX9: [[MUL5:%[0-9]+]]:_(s32) = G_MUL [[UV9]], [[UV11]]
226    ; GFX9: [[UMULH5:%[0-9]+]]:_(s32) = G_UMULH [[UV9]], [[UV10]]
227    ; GFX9: [[UMULH6:%[0-9]+]]:_(s32) = G_UMULH [[UV8]], [[UV11]]
228    ; GFX9: [[UADDO14:%[0-9]+]]:_(s32), [[UADDO15:%[0-9]+]]:_(s1) = G_UADDO [[MUL5]], [[UMULH5]]
229    ; GFX9: [[ZEXT7:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO15]](s1)
230    ; GFX9: [[UADDO16:%[0-9]+]]:_(s32), [[UADDO17:%[0-9]+]]:_(s1) = G_UADDO [[UADDO14]], [[UMULH6]]
231    ; GFX9: [[ZEXT8:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO17]](s1)
232    ; GFX9: [[ADD5:%[0-9]+]]:_(s32) = G_ADD [[ZEXT7]], [[ZEXT8]]
233    ; GFX9: [[UADDO18:%[0-9]+]]:_(s32), [[UADDO19:%[0-9]+]]:_(s1) = G_UADDO [[UADDO16]], [[ADD4]]
234    ; GFX9: [[ZEXT9:%[0-9]+]]:_(s32) = G_ZEXT [[UADDO19]](s1)
235    ; GFX9: [[ADD6:%[0-9]+]]:_(s32) = G_ADD [[ADD5]], [[ZEXT9]]
236    ; GFX9: [[UMULH7:%[0-9]+]]:_(s32) = G_UMULH [[UV9]], [[UV11]]
237    ; GFX9: [[ADD7:%[0-9]+]]:_(s32) = G_ADD [[UMULH7]], [[ADD6]]
238    ; GFX9: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO18]](s32), [[ADD7]](s32)
239    ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64)
240    ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<2 x s64>)
241    %0:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
242    %1:_(<2 x s64>) = COPY $vgpr4_vgpr5_vgpr6_vgpr7
243    %2:_(<2 x s64>) = G_UMULH %0, %1
244    $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %2
245...
246
247---
248name: test_umulh_s16
249body: |
250  bb.0:
251    liveins: $vgpr0, $vgpr1
252
253    ; GFX8-LABEL: name: test_umulh_s16
254    ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
255    ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
256    ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
257    ; GFX8: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
258    ; GFX8: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C]]
259    ; GFX8: [[MUL:%[0-9]+]]:_(s32) = G_MUL [[AND]], [[AND1]]
260    ; GFX8: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
261    ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[MUL]], [[C1]](s32)
262    ; GFX8: [[AND2:%[0-9]+]]:_(s32) = G_AND [[LSHR]], [[C]]
263    ; GFX8: $vgpr0 = COPY [[AND2]](s32)
264    ; GFX9-LABEL: name: test_umulh_s16
265    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
266    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
267    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
268    ; GFX9: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
269    ; GFX9: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C]]
270    ; GFX9: [[MUL:%[0-9]+]]:_(s32) = G_MUL [[AND]], [[AND1]]
271    ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
272    ; GFX9: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[MUL]], [[C1]](s32)
273    ; GFX9: [[AND2:%[0-9]+]]:_(s32) = G_AND [[LSHR]], [[C]]
274    ; GFX9: $vgpr0 = COPY [[AND2]](s32)
275    %0:_(s32) = COPY $vgpr0
276    %1:_(s32) = COPY $vgpr1
277    %2:_(s16) = G_TRUNC %0
278    %3:_(s16) = G_TRUNC %1
279    %4:_(s16) = G_UMULH %2, %3
280    %5:_(s32) = G_ZEXT %4
281    $vgpr0 = COPY %5
282...
283
284---
285name: test_umulh_s8
286body: |
287  bb.0:
288    liveins: $vgpr0, $vgpr1
289
290    ; GFX8-LABEL: name: test_umulh_s8
291    ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
292    ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
293    ; GFX8: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
294    ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
295    ; GFX8: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC]], [[C]]
296    ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
297    ; GFX8: [[AND1:%[0-9]+]]:_(s16) = G_AND [[TRUNC1]], [[C]]
298    ; GFX8: [[MUL:%[0-9]+]]:_(s16) = G_MUL [[AND]], [[AND1]]
299    ; GFX8: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
300    ; GFX8: [[LSHR:%[0-9]+]]:_(s16) = G_LSHR [[MUL]], [[C1]](s16)
301    ; GFX8: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
302    ; GFX8: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR]](s16)
303    ; GFX8: [[AND2:%[0-9]+]]:_(s32) = G_AND [[ANYEXT]], [[C2]]
304    ; GFX8: $vgpr0 = COPY [[AND2]](s32)
305    ; GFX9-LABEL: name: test_umulh_s8
306    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
307    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
308    ; GFX9: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
309    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
310    ; GFX9: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC]], [[C]]
311    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
312    ; GFX9: [[AND1:%[0-9]+]]:_(s16) = G_AND [[TRUNC1]], [[C]]
313    ; GFX9: [[MUL:%[0-9]+]]:_(s16) = G_MUL [[AND]], [[AND1]]
314    ; GFX9: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
315    ; GFX9: [[LSHR:%[0-9]+]]:_(s16) = G_LSHR [[MUL]], [[C1]](s16)
316    ; GFX9: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
317    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR]](s16)
318    ; GFX9: [[AND2:%[0-9]+]]:_(s32) = G_AND [[ANYEXT]], [[C2]]
319    ; GFX9: $vgpr0 = COPY [[AND2]](s32)
320    %0:_(s32) = COPY $vgpr0
321    %1:_(s32) = COPY $vgpr1
322    %2:_(s8) = G_TRUNC %0
323    %3:_(s8) = G_TRUNC %1
324    %4:_(s8) = G_UMULH %2, %3
325    %5:_(s32) = G_ZEXT %4
326    $vgpr0 = COPY %5
327...
328
329---
330name: test_umulh_v2s16
331body: |
332  bb.0:
333    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3
334    ; GFX8-LABEL: name: test_umulh_v2s16
335    ; GFX8: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
336    ; GFX8: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3
337    ; GFX8: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
338    ; GFX8: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
339    ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
340    ; GFX8: [[AND:%[0-9]+]]:_(s32) = G_AND [[UV]], [[C]]
341    ; GFX8: [[AND1:%[0-9]+]]:_(s32) = G_AND [[UV2]], [[C]]
342    ; GFX8: [[MUL:%[0-9]+]]:_(s32) = G_MUL [[AND]], [[AND1]]
343    ; GFX8: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
344    ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[MUL]], [[C1]](s32)
345    ; GFX8: [[AND2:%[0-9]+]]:_(s32) = G_AND [[UV1]], [[C]]
346    ; GFX8: [[AND3:%[0-9]+]]:_(s32) = G_AND [[UV3]], [[C]]
347    ; GFX8: [[MUL1:%[0-9]+]]:_(s32) = G_MUL [[AND2]], [[AND3]]
348    ; GFX8: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[MUL1]], [[C1]](s32)
349    ; GFX8: [[AND4:%[0-9]+]]:_(s32) = G_AND [[LSHR]], [[C]]
350    ; GFX8: [[AND5:%[0-9]+]]:_(s32) = G_AND [[LSHR1]], [[C]]
351    ; GFX8: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND5]], [[C1]](s32)
352    ; GFX8: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND4]], [[SHL]]
353    ; GFX8: [[BITCAST:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
354    ; GFX8: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[BITCAST]](<2 x s16>)
355    ; GFX8: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C1]](s32)
356    ; GFX8: [[AND6:%[0-9]+]]:_(s32) = G_AND [[BITCAST1]], [[C]]
357    ; GFX8: [[AND7:%[0-9]+]]:_(s32) = G_AND [[LSHR2]], [[C]]
358    ; GFX8: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[AND6]](s32), [[AND7]](s32)
359    ; GFX8: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>)
360    ; GFX9-LABEL: name: test_umulh_v2s16
361    ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
362    ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3
363    ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
364    ; GFX9: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
365    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
366    ; GFX9: [[AND:%[0-9]+]]:_(s32) = G_AND [[UV]], [[C]]
367    ; GFX9: [[AND1:%[0-9]+]]:_(s32) = G_AND [[UV2]], [[C]]
368    ; GFX9: [[MUL:%[0-9]+]]:_(s32) = G_MUL [[AND]], [[AND1]]
369    ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
370    ; GFX9: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[MUL]], [[C1]](s32)
371    ; GFX9: [[AND2:%[0-9]+]]:_(s32) = G_AND [[UV1]], [[C]]
372    ; GFX9: [[AND3:%[0-9]+]]:_(s32) = G_AND [[UV3]], [[C]]
373    ; GFX9: [[MUL1:%[0-9]+]]:_(s32) = G_MUL [[AND2]], [[AND3]]
374    ; GFX9: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[MUL1]], [[C1]](s32)
375    ; GFX9: [[BUILD_VECTOR_TRUNC:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[LSHR]](s32), [[LSHR1]](s32)
376    ; GFX9: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[BUILD_VECTOR_TRUNC]](<2 x s16>)
377    ; GFX9: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C1]](s32)
378    ; GFX9: [[AND4:%[0-9]+]]:_(s32) = G_AND [[BITCAST]], [[C]]
379    ; GFX9: [[AND5:%[0-9]+]]:_(s32) = G_AND [[LSHR2]], [[C]]
380    ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[AND4]](s32), [[AND5]](s32)
381    ; GFX9: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>)
382    %0:_(<2 x s32>) = COPY $vgpr0_vgpr1
383    %1:_(<2 x s32>) = COPY $vgpr2_vgpr3
384    %2:_(<2 x s16>) = G_TRUNC %0
385    %3:_(<2 x s16>) = G_TRUNC %1
386    %4:_(<2 x s16>) = G_UMULH %2, %3
387    %5:_(<2 x s32>) = G_ZEXT %4
388    $vgpr0_vgpr1 = COPY %5
389...
390
391---
392name: test_umulh_v3s8
393body: |
394  bb.0:
395    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5
396    ; GFX8-LABEL: name: test_umulh_v3s8
397    ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
398    ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
399    ; GFX8: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
400    ; GFX8: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
401    ; GFX8: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
402    ; GFX8: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
403    ; GFX8: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
404    ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
405    ; GFX8: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC]], [[C]]
406    ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY3]](s32)
407    ; GFX8: [[AND1:%[0-9]+]]:_(s16) = G_AND [[TRUNC1]], [[C]]
408    ; GFX8: [[MUL:%[0-9]+]]:_(s16) = G_MUL [[AND]], [[AND1]]
409    ; GFX8: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
410    ; GFX8: [[LSHR:%[0-9]+]]:_(s16) = G_LSHR [[MUL]], [[C1]](s16)
411    ; GFX8: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
412    ; GFX8: [[AND2:%[0-9]+]]:_(s16) = G_AND [[TRUNC2]], [[C]]
413    ; GFX8: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[COPY4]](s32)
414    ; GFX8: [[AND3:%[0-9]+]]:_(s16) = G_AND [[TRUNC3]], [[C]]
415    ; GFX8: [[MUL1:%[0-9]+]]:_(s16) = G_MUL [[AND2]], [[AND3]]
416    ; GFX8: [[LSHR1:%[0-9]+]]:_(s16) = G_LSHR [[MUL1]], [[C1]](s16)
417    ; GFX8: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
418    ; GFX8: [[AND4:%[0-9]+]]:_(s16) = G_AND [[TRUNC4]], [[C]]
419    ; GFX8: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[COPY5]](s32)
420    ; GFX8: [[AND5:%[0-9]+]]:_(s16) = G_AND [[TRUNC5]], [[C]]
421    ; GFX8: [[MUL2:%[0-9]+]]:_(s16) = G_MUL [[AND4]], [[AND5]]
422    ; GFX8: [[LSHR2:%[0-9]+]]:_(s16) = G_LSHR [[MUL2]], [[C1]](s16)
423    ; GFX8: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
424    ; GFX8: [[AND6:%[0-9]+]]:_(s16) = G_AND [[LSHR]], [[C]]
425    ; GFX8: [[AND7:%[0-9]+]]:_(s16) = G_AND [[LSHR1]], [[C]]
426    ; GFX8: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[AND7]], [[C1]](s16)
427    ; GFX8: [[OR:%[0-9]+]]:_(s16) = G_OR [[AND6]], [[SHL]]
428    ; GFX8: [[AND8:%[0-9]+]]:_(s16) = G_AND [[LSHR2]], [[C]]
429    ; GFX8: [[TRUNC6:%[0-9]+]]:_(s16) = G_TRUNC [[DEF]](s32)
430    ; GFX8: [[AND9:%[0-9]+]]:_(s16) = G_AND [[TRUNC6]], [[C]]
431    ; GFX8: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[AND9]], [[C1]](s16)
432    ; GFX8: [[OR1:%[0-9]+]]:_(s16) = G_OR [[AND8]], [[SHL1]]
433    ; GFX8: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[OR]](s16)
434    ; GFX8: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[OR1]](s16)
435    ; GFX8: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
436    ; GFX8: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C2]](s32)
437    ; GFX8: [[OR2:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL2]]
438    ; GFX8: $vgpr0 = COPY [[OR2]](s32)
439    ; GFX9-LABEL: name: test_umulh_v3s8
440    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
441    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
442    ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
443    ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
444    ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
445    ; GFX9: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
446    ; GFX9: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
447    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
448    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
449    ; GFX9: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
450    ; GFX9: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[DEF]](s32)
451    ; GFX9: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[COPY3]](s32)
452    ; GFX9: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[COPY4]](s32)
453    ; GFX9: [[TRUNC6:%[0-9]+]]:_(s16) = G_TRUNC [[COPY5]](s32)
454    ; GFX9: [[TRUNC7:%[0-9]+]]:_(s16) = G_TRUNC [[DEF]](s32)
455    ; GFX9: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
456    ; GFX9: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC]], [[C]]
457    ; GFX9: [[AND1:%[0-9]+]]:_(s16) = G_AND [[TRUNC1]], [[C]]
458    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[AND]](s16)
459    ; GFX9: [[ANYEXT1:%[0-9]+]]:_(s32) = G_ANYEXT [[AND1]](s16)
460    ; GFX9: [[BUILD_VECTOR_TRUNC:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[ANYEXT]](s32), [[ANYEXT1]](s32)
461    ; GFX9: [[AND2:%[0-9]+]]:_(s16) = G_AND [[TRUNC4]], [[C]]
462    ; GFX9: [[AND3:%[0-9]+]]:_(s16) = G_AND [[TRUNC5]], [[C]]
463    ; GFX9: [[ANYEXT2:%[0-9]+]]:_(s32) = G_ANYEXT [[AND2]](s16)
464    ; GFX9: [[ANYEXT3:%[0-9]+]]:_(s32) = G_ANYEXT [[AND3]](s16)
465    ; GFX9: [[BUILD_VECTOR_TRUNC1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[ANYEXT2]](s32), [[ANYEXT3]](s32)
466    ; GFX9: [[MUL:%[0-9]+]]:_(<2 x s16>) = G_MUL [[BUILD_VECTOR_TRUNC]], [[BUILD_VECTOR_TRUNC1]]
467    ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
468    ; GFX9: [[COPY6:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
469    ; GFX9: [[BUILD_VECTOR_TRUNC2:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY6]](s32), [[C1]](s32)
470    ; GFX9: [[LSHR:%[0-9]+]]:_(<2 x s16>) = G_LSHR [[MUL]], [[BUILD_VECTOR_TRUNC2]](<2 x s16>)
471    ; GFX9: [[AND4:%[0-9]+]]:_(s16) = G_AND [[TRUNC2]], [[C]]
472    ; GFX9: [[AND5:%[0-9]+]]:_(s16) = G_AND [[TRUNC3]], [[C]]
473    ; GFX9: [[ANYEXT4:%[0-9]+]]:_(s32) = G_ANYEXT [[AND4]](s16)
474    ; GFX9: [[ANYEXT5:%[0-9]+]]:_(s32) = G_ANYEXT [[AND5]](s16)
475    ; GFX9: [[BUILD_VECTOR_TRUNC3:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[ANYEXT4]](s32), [[ANYEXT5]](s32)
476    ; GFX9: [[AND6:%[0-9]+]]:_(s16) = G_AND [[TRUNC6]], [[C]]
477    ; GFX9: [[AND7:%[0-9]+]]:_(s16) = G_AND [[TRUNC7]], [[C]]
478    ; GFX9: [[ANYEXT6:%[0-9]+]]:_(s32) = G_ANYEXT [[AND6]](s16)
479    ; GFX9: [[ANYEXT7:%[0-9]+]]:_(s32) = G_ANYEXT [[AND7]](s16)
480    ; GFX9: [[BUILD_VECTOR_TRUNC4:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[ANYEXT6]](s32), [[ANYEXT7]](s32)
481    ; GFX9: [[MUL1:%[0-9]+]]:_(<2 x s16>) = G_MUL [[BUILD_VECTOR_TRUNC3]], [[BUILD_VECTOR_TRUNC4]]
482    ; GFX9: [[COPY7:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
483    ; GFX9: [[COPY8:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
484    ; GFX9: [[BUILD_VECTOR_TRUNC5:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY7]](s32), [[COPY8]](s32)
485    ; GFX9: [[LSHR1:%[0-9]+]]:_(<2 x s16>) = G_LSHR [[MUL1]], [[BUILD_VECTOR_TRUNC5]](<2 x s16>)
486    ; GFX9: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[LSHR]](<2 x s16>)
487    ; GFX9: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
488    ; GFX9: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C2]](s32)
489    ; GFX9: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[LSHR1]](<2 x s16>)
490    ; GFX9: [[DEF1:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
491    ; GFX9: [[TRUNC8:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32)
492    ; GFX9: [[AND8:%[0-9]+]]:_(s16) = G_AND [[TRUNC8]], [[C]]
493    ; GFX9: [[TRUNC9:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR2]](s32)
494    ; GFX9: [[AND9:%[0-9]+]]:_(s16) = G_AND [[TRUNC9]], [[C]]
495    ; GFX9: [[C3:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
496    ; GFX9: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[AND9]], [[C3]](s16)
497    ; GFX9: [[OR:%[0-9]+]]:_(s16) = G_OR [[AND8]], [[SHL]]
498    ; GFX9: [[TRUNC10:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32)
499    ; GFX9: [[AND10:%[0-9]+]]:_(s16) = G_AND [[TRUNC10]], [[C]]
500    ; GFX9: [[TRUNC11:%[0-9]+]]:_(s16) = G_TRUNC [[DEF1]](s32)
501    ; GFX9: [[AND11:%[0-9]+]]:_(s16) = G_AND [[TRUNC11]], [[C]]
502    ; GFX9: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[AND11]], [[C3]](s16)
503    ; GFX9: [[OR1:%[0-9]+]]:_(s16) = G_OR [[AND10]], [[SHL1]]
504    ; GFX9: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[OR]](s16)
505    ; GFX9: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[OR1]](s16)
506    ; GFX9: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C2]](s32)
507    ; GFX9: [[OR2:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL2]]
508    ; GFX9: $vgpr0 = COPY [[OR2]](s32)
509    %0:_(s32) = COPY $vgpr0
510    %1:_(s32) = COPY $vgpr1
511    %2:_(s32) = COPY $vgpr2
512    %3:_(s32) = COPY $vgpr3
513    %4:_(s32) = COPY $vgpr4
514    %20:_(s32) = COPY $vgpr5
515    %5:_(s8) = G_TRUNC %0
516    %6:_(s8) = G_TRUNC %1
517    %7:_(s8) = G_TRUNC %2
518    %8:_(s8) = G_TRUNC %3
519    %9:_(s8) = G_TRUNC %4
520    %10:_(s8) = G_TRUNC %20
521    %11:_(<3 x s8>) = G_BUILD_VECTOR %5, %6, %7
522    %12:_(<3 x s8>) = G_BUILD_VECTOR %8, %9, %10
523    %13:_(<3 x s8>) = G_UMULH %11, %12
524    %14:_(s8), %15:_(s8), %16:_(s8) = G_UNMERGE_VALUES %13
525    %17:_(s24) = G_MERGE_VALUES %14, %15, %16
526    %18:_(s32) = G_ANYEXT %17
527    $vgpr0 = COPY %18
528...
529
530---
531name: test_umulh_v2s8
532body: |
533  bb.0:
534    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3
535    ; GFX8-LABEL: name: test_umulh_v2s8
536    ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
537    ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
538    ; GFX8: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
539    ; GFX8: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
540    ; GFX8: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
541    ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
542    ; GFX8: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC]], [[C]]
543    ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
544    ; GFX8: [[AND1:%[0-9]+]]:_(s16) = G_AND [[TRUNC1]], [[C]]
545    ; GFX8: [[MUL:%[0-9]+]]:_(s16) = G_MUL [[AND]], [[AND1]]
546    ; GFX8: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
547    ; GFX8: [[LSHR:%[0-9]+]]:_(s16) = G_LSHR [[MUL]], [[C1]](s16)
548    ; GFX8: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
549    ; GFX8: [[AND2:%[0-9]+]]:_(s16) = G_AND [[TRUNC2]], [[C]]
550    ; GFX8: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[COPY3]](s32)
551    ; GFX8: [[AND3:%[0-9]+]]:_(s16) = G_AND [[TRUNC3]], [[C]]
552    ; GFX8: [[MUL1:%[0-9]+]]:_(s16) = G_MUL [[AND2]], [[AND3]]
553    ; GFX8: [[LSHR1:%[0-9]+]]:_(s16) = G_LSHR [[MUL1]], [[C1]](s16)
554    ; GFX8: [[AND4:%[0-9]+]]:_(s16) = G_AND [[LSHR]], [[C]]
555    ; GFX8: [[AND5:%[0-9]+]]:_(s16) = G_AND [[LSHR1]], [[C]]
556    ; GFX8: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[AND5]], [[C1]](s16)
557    ; GFX8: [[OR:%[0-9]+]]:_(s16) = G_OR [[AND4]], [[SHL]]
558    ; GFX8: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[OR]](s16)
559    ; GFX8: $vgpr0 = COPY [[ANYEXT]](s32)
560    ; GFX9-LABEL: name: test_umulh_v2s8
561    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
562    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
563    ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
564    ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
565    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
566    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
567    ; GFX9: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
568    ; GFX9: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[COPY3]](s32)
569    ; GFX9: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
570    ; GFX9: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC]], [[C]]
571    ; GFX9: [[AND1:%[0-9]+]]:_(s16) = G_AND [[TRUNC1]], [[C]]
572    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[AND]](s16)
573    ; GFX9: [[ANYEXT1:%[0-9]+]]:_(s32) = G_ANYEXT [[AND1]](s16)
574    ; GFX9: [[BUILD_VECTOR_TRUNC:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[ANYEXT]](s32), [[ANYEXT1]](s32)
575    ; GFX9: [[AND2:%[0-9]+]]:_(s16) = G_AND [[TRUNC2]], [[C]]
576    ; GFX9: [[AND3:%[0-9]+]]:_(s16) = G_AND [[TRUNC3]], [[C]]
577    ; GFX9: [[ANYEXT2:%[0-9]+]]:_(s32) = G_ANYEXT [[AND2]](s16)
578    ; GFX9: [[ANYEXT3:%[0-9]+]]:_(s32) = G_ANYEXT [[AND3]](s16)
579    ; GFX9: [[BUILD_VECTOR_TRUNC1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[ANYEXT2]](s32), [[ANYEXT3]](s32)
580    ; GFX9: [[MUL:%[0-9]+]]:_(<2 x s16>) = G_MUL [[BUILD_VECTOR_TRUNC]], [[BUILD_VECTOR_TRUNC1]]
581    ; GFX9: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
582    ; GFX9: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
583    ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY [[C2]](s32)
584    ; GFX9: [[BUILD_VECTOR_TRUNC2:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY4]](s32), [[C2]](s32)
585    ; GFX9: [[LSHR:%[0-9]+]]:_(<2 x s16>) = G_LSHR [[MUL]], [[BUILD_VECTOR_TRUNC2]](<2 x s16>)
586    ; GFX9: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[LSHR]](<2 x s16>)
587    ; GFX9: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32)
588    ; GFX9: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
589    ; GFX9: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C3]](s32)
590    ; GFX9: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32)
591    ; GFX9: [[AND4:%[0-9]+]]:_(s16) = G_AND [[TRUNC4]], [[C]]
592    ; GFX9: [[AND5:%[0-9]+]]:_(s16) = G_AND [[TRUNC5]], [[C]]
593    ; GFX9: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[AND5]], [[C1]](s16)
594    ; GFX9: [[OR:%[0-9]+]]:_(s16) = G_OR [[AND4]], [[SHL]]
595    ; GFX9: [[ANYEXT4:%[0-9]+]]:_(s32) = G_ANYEXT [[OR]](s16)
596    ; GFX9: $vgpr0 = COPY [[ANYEXT4]](s32)
597    %0:_(s32) = COPY $vgpr0
598    %1:_(s32) = COPY $vgpr1
599    %2:_(s32) = COPY $vgpr2
600    %3:_(s32) = COPY $vgpr3
601    %5:_(s8) = G_TRUNC %0
602    %6:_(s8) = G_TRUNC %1
603    %7:_(s8) = G_TRUNC %2
604    %8:_(s8) = G_TRUNC %3
605    %11:_(<2 x s8>) = G_BUILD_VECTOR %5, %6
606    %12:_(<2 x s8>) = G_BUILD_VECTOR %7, %8
607    %13:_(<2 x s8>) = G_UMULH %11, %12
608    %14:_(s8), %15:_(s8) = G_UNMERGE_VALUES %13
609    %17:_(s16) = G_MERGE_VALUES %14, %15
610    %18:_(s32) = G_ANYEXT %17
611    $vgpr0 = COPY %18
612...
613
614---
615name: test_umulh_v4s8
616body: |
617  bb.0:
618    liveins: $vgpr0, $vgpr1
619    ; GFX8-LABEL: name: test_umulh_v4s8
620    ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
621    ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
622    ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
623    ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
624    ; GFX8: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
625    ; GFX8: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32)
626    ; GFX8: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
627    ; GFX8: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32)
628    ; GFX8: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C]](s32)
629    ; GFX8: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C1]](s32)
630    ; GFX8: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C2]](s32)
631    ; GFX8: [[C3:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
632    ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
633    ; GFX8: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC]], [[C3]]
634    ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
635    ; GFX8: [[AND1:%[0-9]+]]:_(s16) = G_AND [[TRUNC1]], [[C3]]
636    ; GFX8: [[MUL:%[0-9]+]]:_(s16) = G_MUL [[AND]], [[AND1]]
637    ; GFX8: [[C4:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
638    ; GFX8: [[LSHR6:%[0-9]+]]:_(s16) = G_LSHR [[MUL]], [[C4]](s16)
639    ; GFX8: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
640    ; GFX8: [[AND2:%[0-9]+]]:_(s16) = G_AND [[TRUNC2]], [[C3]]
641    ; GFX8: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR3]](s32)
642    ; GFX8: [[AND3:%[0-9]+]]:_(s16) = G_AND [[TRUNC3]], [[C3]]
643    ; GFX8: [[MUL1:%[0-9]+]]:_(s16) = G_MUL [[AND2]], [[AND3]]
644    ; GFX8: [[LSHR7:%[0-9]+]]:_(s16) = G_LSHR [[MUL1]], [[C4]](s16)
645    ; GFX8: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32)
646    ; GFX8: [[AND4:%[0-9]+]]:_(s16) = G_AND [[TRUNC4]], [[C3]]
647    ; GFX8: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR4]](s32)
648    ; GFX8: [[AND5:%[0-9]+]]:_(s16) = G_AND [[TRUNC5]], [[C3]]
649    ; GFX8: [[MUL2:%[0-9]+]]:_(s16) = G_MUL [[AND4]], [[AND5]]
650    ; GFX8: [[LSHR8:%[0-9]+]]:_(s16) = G_LSHR [[MUL2]], [[C4]](s16)
651    ; GFX8: [[TRUNC6:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR2]](s32)
652    ; GFX8: [[AND6:%[0-9]+]]:_(s16) = G_AND [[TRUNC6]], [[C3]]
653    ; GFX8: [[TRUNC7:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR5]](s32)
654    ; GFX8: [[AND7:%[0-9]+]]:_(s16) = G_AND [[TRUNC7]], [[C3]]
655    ; GFX8: [[MUL3:%[0-9]+]]:_(s16) = G_MUL [[AND6]], [[AND7]]
656    ; GFX8: [[LSHR9:%[0-9]+]]:_(s16) = G_LSHR [[MUL3]], [[C4]](s16)
657    ; GFX8: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
658    ; GFX8: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR6]](s16)
659    ; GFX8: [[AND8:%[0-9]+]]:_(s32) = G_AND [[ANYEXT]], [[C5]]
660    ; GFX8: [[ANYEXT1:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR7]](s16)
661    ; GFX8: [[AND9:%[0-9]+]]:_(s32) = G_AND [[ANYEXT1]], [[C5]]
662    ; GFX8: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND9]], [[C]](s32)
663    ; GFX8: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND8]], [[SHL]]
664    ; GFX8: [[ANYEXT2:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR8]](s16)
665    ; GFX8: [[AND10:%[0-9]+]]:_(s32) = G_AND [[ANYEXT2]], [[C5]]
666    ; GFX8: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND10]], [[C1]](s32)
667    ; GFX8: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
668    ; GFX8: [[ANYEXT3:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR9]](s16)
669    ; GFX8: [[AND11:%[0-9]+]]:_(s32) = G_AND [[ANYEXT3]], [[C5]]
670    ; GFX8: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND11]], [[C2]](s32)
671    ; GFX8: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
672    ; GFX8: $vgpr0 = COPY [[OR2]](s32)
673    ; GFX9-LABEL: name: test_umulh_v4s8
674    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
675    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
676    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
677    ; GFX9: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
678    ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
679    ; GFX9: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32)
680    ; GFX9: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
681    ; GFX9: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32)
682    ; GFX9: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C]](s32)
683    ; GFX9: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C1]](s32)
684    ; GFX9: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C2]](s32)
685    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
686    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
687    ; GFX9: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32)
688    ; GFX9: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR2]](s32)
689    ; GFX9: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
690    ; GFX9: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR3]](s32)
691    ; GFX9: [[TRUNC6:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR4]](s32)
692    ; GFX9: [[TRUNC7:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR5]](s32)
693    ; GFX9: [[C3:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
694    ; GFX9: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC]], [[C3]]
695    ; GFX9: [[AND1:%[0-9]+]]:_(s16) = G_AND [[TRUNC1]], [[C3]]
696    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[AND]](s16)
697    ; GFX9: [[ANYEXT1:%[0-9]+]]:_(s32) = G_ANYEXT [[AND1]](s16)
698    ; GFX9: [[BUILD_VECTOR_TRUNC:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[ANYEXT]](s32), [[ANYEXT1]](s32)
699    ; GFX9: [[AND2:%[0-9]+]]:_(s16) = G_AND [[TRUNC4]], [[C3]]
700    ; GFX9: [[AND3:%[0-9]+]]:_(s16) = G_AND [[TRUNC5]], [[C3]]
701    ; GFX9: [[ANYEXT2:%[0-9]+]]:_(s32) = G_ANYEXT [[AND2]](s16)
702    ; GFX9: [[ANYEXT3:%[0-9]+]]:_(s32) = G_ANYEXT [[AND3]](s16)
703    ; GFX9: [[BUILD_VECTOR_TRUNC1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[ANYEXT2]](s32), [[ANYEXT3]](s32)
704    ; GFX9: [[MUL:%[0-9]+]]:_(<2 x s16>) = G_MUL [[BUILD_VECTOR_TRUNC]], [[BUILD_VECTOR_TRUNC1]]
705    ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[C]](s32)
706    ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY [[C]](s32)
707    ; GFX9: [[BUILD_VECTOR_TRUNC2:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY2]](s32), [[COPY3]](s32)
708    ; GFX9: [[LSHR6:%[0-9]+]]:_(<2 x s16>) = G_LSHR [[MUL]], [[BUILD_VECTOR_TRUNC2]](<2 x s16>)
709    ; GFX9: [[AND4:%[0-9]+]]:_(s16) = G_AND [[TRUNC2]], [[C3]]
710    ; GFX9: [[AND5:%[0-9]+]]:_(s16) = G_AND [[TRUNC3]], [[C3]]
711    ; GFX9: [[ANYEXT4:%[0-9]+]]:_(s32) = G_ANYEXT [[AND4]](s16)
712    ; GFX9: [[ANYEXT5:%[0-9]+]]:_(s32) = G_ANYEXT [[AND5]](s16)
713    ; GFX9: [[BUILD_VECTOR_TRUNC3:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[ANYEXT4]](s32), [[ANYEXT5]](s32)
714    ; GFX9: [[AND6:%[0-9]+]]:_(s16) = G_AND [[TRUNC6]], [[C3]]
715    ; GFX9: [[AND7:%[0-9]+]]:_(s16) = G_AND [[TRUNC7]], [[C3]]
716    ; GFX9: [[ANYEXT6:%[0-9]+]]:_(s32) = G_ANYEXT [[AND6]](s16)
717    ; GFX9: [[ANYEXT7:%[0-9]+]]:_(s32) = G_ANYEXT [[AND7]](s16)
718    ; GFX9: [[BUILD_VECTOR_TRUNC4:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[ANYEXT6]](s32), [[ANYEXT7]](s32)
719    ; GFX9: [[MUL1:%[0-9]+]]:_(<2 x s16>) = G_MUL [[BUILD_VECTOR_TRUNC3]], [[BUILD_VECTOR_TRUNC4]]
720    ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY [[C]](s32)
721    ; GFX9: [[COPY5:%[0-9]+]]:_(s32) = COPY [[C]](s32)
722    ; GFX9: [[BUILD_VECTOR_TRUNC5:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY4]](s32), [[COPY5]](s32)
723    ; GFX9: [[LSHR7:%[0-9]+]]:_(<2 x s16>) = G_LSHR [[MUL1]], [[BUILD_VECTOR_TRUNC5]](<2 x s16>)
724    ; GFX9: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[LSHR6]](<2 x s16>)
725    ; GFX9: [[LSHR8:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C1]](s32)
726    ; GFX9: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[LSHR7]](<2 x s16>)
727    ; GFX9: [[LSHR9:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C1]](s32)
728    ; GFX9: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
729    ; GFX9: [[AND8:%[0-9]+]]:_(s32) = G_AND [[BITCAST]], [[C4]]
730    ; GFX9: [[AND9:%[0-9]+]]:_(s32) = G_AND [[LSHR8]], [[C4]]
731    ; GFX9: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND9]], [[C]](s32)
732    ; GFX9: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND8]], [[SHL]]
733    ; GFX9: [[AND10:%[0-9]+]]:_(s32) = G_AND [[BITCAST1]], [[C4]]
734    ; GFX9: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND10]], [[C1]](s32)
735    ; GFX9: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
736    ; GFX9: [[AND11:%[0-9]+]]:_(s32) = G_AND [[LSHR9]], [[C4]]
737    ; GFX9: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND11]], [[C2]](s32)
738    ; GFX9: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
739    ; GFX9: $vgpr0 = COPY [[OR2]](s32)
740    %0:_(s32) = COPY $vgpr0
741    %1:_(s32) = COPY $vgpr1
742    %2:_(s8), %3:_(s8), %4:_(s8), %5:_(s8) = G_UNMERGE_VALUES %0
743    %6:_(s8), %7:_(s8), %8:_(s8), %9:_(s8) = G_UNMERGE_VALUES %1
744    %10:_(<4 x s8>) = G_BUILD_VECTOR %2:_(s8), %3:_(s8), %4:_(s8), %5:_(s8)
745    %11:_(<4 x s8>) = G_BUILD_VECTOR %6:_(s8), %7:_(s8), %8:_(s8), %9:_(s8)
746    %12:_(<4 x s8>) = G_UMULH %10:_, %11:_
747    %13:_(s8), %14:_(s8), %15:_(s8), %16:_(s8) = G_UNMERGE_VALUES %12:_(<4 x s8>)
748    %17:_(s32) = G_MERGE_VALUES %13, %14, %15, %16
749    $vgpr0 = COPY %17
750...
751