1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -march=amdgcn -mcpu=tahiti -run-pass=instruction-select -verify-machineinstrs %s -o -  | FileCheck -check-prefix=GCN %s
3
4---
5name: ldexp_s32_vsv
6legalized: true
7regBankSelected: true
8tracksRegLiveness: true
9
10body: |
11  bb.0:
12    liveins: $sgpr0, $vgpr0
13    ; GCN-LABEL: name: ldexp_s32_vsv
14    ; GCN: liveins: $sgpr0, $vgpr0
15    ; GCN: [[COPY:%[0-9]+]]:sreg_32 = COPY $sgpr0
16    ; GCN: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0
17    ; GCN: %2:vgpr_32 = nofpexcept V_LDEXP_F32_e64 0, [[COPY]], 0, [[COPY1]], 0, 0, implicit $mode, implicit $exec
18    ; GCN: S_ENDPGM 0, implicit %2
19    %0:sgpr(s32) = COPY $sgpr0
20    %1:vgpr(s32) = COPY $vgpr0
21    %2:vgpr(s32) = G_INTRINSIC intrinsic(@llvm.amdgcn.ldexp), %0, %1
22    S_ENDPGM 0, implicit %2
23...
24
25---
26name: ldexp_s32_vvs
27legalized: true
28regBankSelected: true
29tracksRegLiveness: true
30
31body: |
32  bb.0:
33    liveins: $sgpr0, $vgpr0
34    ; GCN-LABEL: name: ldexp_s32_vvs
35    ; GCN: liveins: $sgpr0, $vgpr0
36    ; GCN: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr0
37    ; GCN: [[COPY1:%[0-9]+]]:sreg_32 = COPY $sgpr0
38    ; GCN: %2:vgpr_32 = nofpexcept V_LDEXP_F32_e64 0, [[COPY]], 0, [[COPY1]], 0, 0, implicit $mode, implicit $exec
39    ; GCN: S_ENDPGM 0, implicit %2
40    %0:vgpr(s32) = COPY $vgpr0
41    %1:sgpr(s32) = COPY $sgpr0
42    %2:vgpr(s32) = G_INTRINSIC intrinsic(@llvm.amdgcn.ldexp), %0, %1
43    S_ENDPGM 0, implicit %2
44...
45
46---
47name: ldexp_s32_vvv
48legalized: true
49regBankSelected: true
50tracksRegLiveness: true
51
52body: |
53  bb.0:
54    liveins: $vgpr0, $vgpr1
55    ; GCN-LABEL: name: ldexp_s32_vvv
56    ; GCN: liveins: $vgpr0, $vgpr1
57    ; GCN: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr0
58    ; GCN: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1
59    ; GCN: %2:vgpr_32 = nofpexcept V_LDEXP_F32_e64 0, [[COPY]], 0, [[COPY1]], 0, 0, implicit $mode, implicit $exec
60    ; GCN: S_ENDPGM 0, implicit %2
61    %0:vgpr(s32) = COPY $vgpr0
62    %1:vgpr(s32) = COPY $vgpr1
63    %2:vgpr(s32) = G_INTRINSIC intrinsic(@llvm.amdgcn.ldexp), %0, %1
64    S_ENDPGM 0, implicit %2
65...
66
67---
68name: ldexp_s64_vsv
69legalized: true
70regBankSelected: true
71tracksRegLiveness: true
72
73body: |
74  bb.0:
75    liveins: $sgpr0_sgpr1, $vgpr0
76    ; GCN-LABEL: name: ldexp_s64_vsv
77    ; GCN: liveins: $sgpr0_sgpr1, $vgpr0
78    ; GCN: [[COPY:%[0-9]+]]:sreg_64 = COPY $sgpr0_sgpr1
79    ; GCN: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0
80    ; GCN: %2:vreg_64 = nofpexcept V_LDEXP_F64_e64 0, [[COPY]], 0, [[COPY1]], 0, 0, implicit $mode, implicit $exec
81    ; GCN: S_ENDPGM 0, implicit %2
82    %0:sgpr(s64) = COPY $sgpr0_sgpr1
83    %1:vgpr(s32) = COPY $vgpr0
84    %2:vgpr(s64) = G_INTRINSIC intrinsic(@llvm.amdgcn.ldexp), %0, %1
85    S_ENDPGM 0, implicit %2
86...
87
88---
89name: ldexp_s64_vvs
90legalized: true
91regBankSelected: true
92tracksRegLiveness: true
93
94body: |
95  bb.0:
96    liveins: $sgpr0_sgpr1, $vgpr0
97    ; GCN-LABEL: name: ldexp_s64_vvs
98    ; GCN: liveins: $sgpr0_sgpr1, $vgpr0
99    ; GCN: [[COPY:%[0-9]+]]:vreg_64 = COPY $vgpr0_vgpr1
100    ; GCN: [[COPY1:%[0-9]+]]:sreg_32 = COPY $sgpr0
101    ; GCN: %2:vreg_64 = nofpexcept V_LDEXP_F64_e64 0, [[COPY]], 0, [[COPY1]], 0, 0, implicit $mode, implicit $exec
102    ; GCN: S_ENDPGM 0, implicit %2
103    %0:vgpr(s64) = COPY $vgpr0_vgpr1
104    %1:sgpr(s32) = COPY $sgpr0
105    %2:vgpr(s64) = G_INTRINSIC intrinsic(@llvm.amdgcn.ldexp), %0, %1
106    S_ENDPGM 0, implicit %2
107...
108
109---
110name: ldexp_s64_vvv
111legalized: true
112regBankSelected: true
113tracksRegLiveness: true
114
115body: |
116  bb.0:
117    liveins: $vgpr0_vgpr1, $vgpr2
118    ; GCN-LABEL: name: ldexp_s64_vvv
119    ; GCN: liveins: $vgpr0_vgpr1, $vgpr2
120    ; GCN: [[COPY:%[0-9]+]]:vreg_64 = COPY $vgpr0_vgpr1
121    ; GCN: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr2
122    ; GCN: %2:vreg_64 = nofpexcept V_LDEXP_F64_e64 0, [[COPY]], 0, [[COPY1]], 0, 0, implicit $mode, implicit $exec
123    ; GCN: S_ENDPGM 0, implicit %2
124    %0:vgpr(s64) = COPY $vgpr0_vgpr1
125    %1:vgpr(s32) = COPY $vgpr2
126    %2:vgpr(s64) = G_INTRINSIC intrinsic(@llvm.amdgcn.ldexp), %0, %1
127    S_ENDPGM 0, implicit %2
128...
129