1 /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
2 
3 #include "test_sve_acle.h"
4 
5 /*
6 ** abd_s64_m_tied1:
7 **	sabd	z0\.d, p0/m, z0\.d, z1\.d
8 **	ret
9 */
10 TEST_UNIFORM_Z (abd_s64_m_tied1, svint64_t,
11 		z0 = svabd_s64_m (p0, z0, z1),
12 		z0 = svabd_m (p0, z0, z1))
13 
14 /*
15 ** abd_s64_m_tied2:
16 **	mov	(z[0-9]+\.d), z0\.d
17 **	movprfx	z0, z1
18 **	sabd	z0\.d, p0/m, z0\.d, \1
19 **	ret
20 */
21 TEST_UNIFORM_Z (abd_s64_m_tied2, svint64_t,
22 		z0 = svabd_s64_m (p0, z1, z0),
23 		z0 = svabd_m (p0, z1, z0))
24 
25 /*
26 ** abd_s64_m_untied:
27 **	movprfx	z0, z1
28 **	sabd	z0\.d, p0/m, z0\.d, z2\.d
29 **	ret
30 */
31 TEST_UNIFORM_Z (abd_s64_m_untied, svint64_t,
32 		z0 = svabd_s64_m (p0, z1, z2),
33 		z0 = svabd_m (p0, z1, z2))
34 
35 /*
36 ** abd_x0_s64_m_tied1:
37 **	mov	(z[0-9]+\.d), x0
38 **	sabd	z0\.d, p0/m, z0\.d, \1
39 **	ret
40 */
41 TEST_UNIFORM_ZX (abd_x0_s64_m_tied1, svint64_t, int64_t,
42 		 z0 = svabd_n_s64_m (p0, z0, x0),
43 		 z0 = svabd_m (p0, z0, x0))
44 
45 /*
46 ** abd_x0_s64_m_untied:
47 **	mov	(z[0-9]+\.d), x0
48 **	movprfx	z0, z1
49 **	sabd	z0\.d, p0/m, z0\.d, \1
50 **	ret
51 */
52 TEST_UNIFORM_ZX (abd_x0_s64_m_untied, svint64_t, int64_t,
53 		 z0 = svabd_n_s64_m (p0, z1, x0),
54 		 z0 = svabd_m (p0, z1, x0))
55 
56 /*
57 ** abd_1_s64_m_tied1:
58 **	mov	(z[0-9]+\.d), #1
59 **	sabd	z0\.d, p0/m, z0\.d, \1
60 **	ret
61 */
62 TEST_UNIFORM_Z (abd_1_s64_m_tied1, svint64_t,
63 		z0 = svabd_n_s64_m (p0, z0, 1),
64 		z0 = svabd_m (p0, z0, 1))
65 
66 /*
67 ** abd_1_s64_m_untied: { xfail *-*-* }
68 **	mov	(z[0-9]+\.d), #1
69 **	movprfx	z0, z1
70 **	sabd	z0\.d, p0/m, z0\.d, \1
71 **	ret
72 */
73 TEST_UNIFORM_Z (abd_1_s64_m_untied, svint64_t,
74 		z0 = svabd_n_s64_m (p0, z1, 1),
75 		z0 = svabd_m (p0, z1, 1))
76 
77 /*
78 ** abd_s64_z_tied1:
79 **	movprfx	z0\.d, p0/z, z0\.d
80 **	sabd	z0\.d, p0/m, z0\.d, z1\.d
81 **	ret
82 */
83 TEST_UNIFORM_Z (abd_s64_z_tied1, svint64_t,
84 		z0 = svabd_s64_z (p0, z0, z1),
85 		z0 = svabd_z (p0, z0, z1))
86 
87 /*
88 ** abd_s64_z_tied2:
89 **	movprfx	z0\.d, p0/z, z0\.d
90 **	sabd	z0\.d, p0/m, z0\.d, z1\.d
91 **	ret
92 */
93 TEST_UNIFORM_Z (abd_s64_z_tied2, svint64_t,
94 		z0 = svabd_s64_z (p0, z1, z0),
95 		z0 = svabd_z (p0, z1, z0))
96 
97 /*
98 ** abd_s64_z_untied:
99 ** (
100 **	movprfx	z0\.d, p0/z, z1\.d
101 **	sabd	z0\.d, p0/m, z0\.d, z2\.d
102 ** |
103 **	movprfx	z0\.d, p0/z, z2\.d
104 **	sabd	z0\.d, p0/m, z0\.d, z1\.d
105 ** )
106 **	ret
107 */
108 TEST_UNIFORM_Z (abd_s64_z_untied, svint64_t,
109 		z0 = svabd_s64_z (p0, z1, z2),
110 		z0 = svabd_z (p0, z1, z2))
111 
112 /*
113 ** abd_x0_s64_z_tied1:
114 **	mov	(z[0-9]+\.d), x0
115 **	movprfx	z0\.d, p0/z, z0\.d
116 **	sabd	z0\.d, p0/m, z0\.d, \1
117 **	ret
118 */
119 TEST_UNIFORM_ZX (abd_x0_s64_z_tied1, svint64_t, int64_t,
120 		 z0 = svabd_n_s64_z (p0, z0, x0),
121 		 z0 = svabd_z (p0, z0, x0))
122 
123 /*
124 ** abd_x0_s64_z_untied:
125 **	mov	(z[0-9]+\.d), x0
126 ** (
127 **	movprfx	z0\.d, p0/z, z1\.d
128 **	sabd	z0\.d, p0/m, z0\.d, \1
129 ** |
130 **	movprfx	z0\.d, p0/z, \1
131 **	sabd	z0\.d, p0/m, z0\.d, z1\.d
132 ** )
133 **	ret
134 */
135 TEST_UNIFORM_ZX (abd_x0_s64_z_untied, svint64_t, int64_t,
136 		 z0 = svabd_n_s64_z (p0, z1, x0),
137 		 z0 = svabd_z (p0, z1, x0))
138 
139 /*
140 ** abd_1_s64_z_tied1:
141 **	mov	(z[0-9]+\.d), #1
142 **	movprfx	z0\.d, p0/z, z0\.d
143 **	sabd	z0\.d, p0/m, z0\.d, \1
144 **	ret
145 */
146 TEST_UNIFORM_Z (abd_1_s64_z_tied1, svint64_t,
147 		z0 = svabd_n_s64_z (p0, z0, 1),
148 		z0 = svabd_z (p0, z0, 1))
149 
150 /*
151 ** abd_1_s64_z_untied:
152 **	mov	(z[0-9]+\.d), #1
153 ** (
154 **	movprfx	z0\.d, p0/z, z1\.d
155 **	sabd	z0\.d, p0/m, z0\.d, \1
156 ** |
157 **	movprfx	z0\.d, p0/z, \1
158 **	sabd	z0\.d, p0/m, z0\.d, z1\.d
159 ** )
160 **	ret
161 */
162 TEST_UNIFORM_Z (abd_1_s64_z_untied, svint64_t,
163 		z0 = svabd_n_s64_z (p0, z1, 1),
164 		z0 = svabd_z (p0, z1, 1))
165 
166 /*
167 ** abd_s64_x_tied1:
168 **	sabd	z0\.d, p0/m, z0\.d, z1\.d
169 **	ret
170 */
171 TEST_UNIFORM_Z (abd_s64_x_tied1, svint64_t,
172 		z0 = svabd_s64_x (p0, z0, z1),
173 		z0 = svabd_x (p0, z0, z1))
174 
175 /*
176 ** abd_s64_x_tied2:
177 **	sabd	z0\.d, p0/m, z0\.d, z1\.d
178 **	ret
179 */
180 TEST_UNIFORM_Z (abd_s64_x_tied2, svint64_t,
181 		z0 = svabd_s64_x (p0, z1, z0),
182 		z0 = svabd_x (p0, z1, z0))
183 
184 /*
185 ** abd_s64_x_untied:
186 ** (
187 **	movprfx	z0, z1
188 **	sabd	z0\.d, p0/m, z0\.d, z2\.d
189 ** |
190 **	movprfx	z0, z2
191 **	sabd	z0\.d, p0/m, z0\.d, z1\.d
192 ** )
193 **	ret
194 */
195 TEST_UNIFORM_Z (abd_s64_x_untied, svint64_t,
196 		z0 = svabd_s64_x (p0, z1, z2),
197 		z0 = svabd_x (p0, z1, z2))
198 
199 /*
200 ** abd_x0_s64_x_tied1:
201 **	mov	(z[0-9]+\.d), x0
202 **	sabd	z0\.d, p0/m, z0\.d, \1
203 **	ret
204 */
205 TEST_UNIFORM_ZX (abd_x0_s64_x_tied1, svint64_t, int64_t,
206 		 z0 = svabd_n_s64_x (p0, z0, x0),
207 		 z0 = svabd_x (p0, z0, x0))
208 
209 /*
210 ** abd_x0_s64_x_untied:
211 **	mov	z0\.d, x0
212 **	sabd	z0\.d, p0/m, z0\.d, z1\.d
213 **	ret
214 */
215 TEST_UNIFORM_ZX (abd_x0_s64_x_untied, svint64_t, int64_t,
216 		 z0 = svabd_n_s64_x (p0, z1, x0),
217 		 z0 = svabd_x (p0, z1, x0))
218 
219 /*
220 ** abd_1_s64_x_tied1:
221 **	mov	(z[0-9]+\.d), #1
222 **	sabd	z0\.d, p0/m, z0\.d, \1
223 **	ret
224 */
225 TEST_UNIFORM_Z (abd_1_s64_x_tied1, svint64_t,
226 		z0 = svabd_n_s64_x (p0, z0, 1),
227 		z0 = svabd_x (p0, z0, 1))
228 
229 /*
230 ** abd_1_s64_x_untied:
231 **	mov	z0\.d, #1
232 **	sabd	z0\.d, p0/m, z0\.d, z1\.d
233 **	ret
234 */
235 TEST_UNIFORM_Z (abd_1_s64_x_untied, svint64_t,
236 		z0 = svabd_n_s64_x (p0, z1, 1),
237 		z0 = svabd_x (p0, z1, 1))
238