1 /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
2 
3 #include "test_sve_acle.h"
4 
5 /*
6 ** cmplt_wide_s8_tied:
7 **	cmplt	p0\.b, p0/z, z0\.b, z1\.d
8 **	ret
9 */
10 TEST_COMPARE_DUAL_Z (cmplt_wide_s8_tied, svint8_t, svint64_t,
11 		     p0 = svcmplt_wide_s8 (p0, z0, z1),
12 		     p0 = svcmplt_wide (p0, z0, z1))
13 
14 /*
15 ** cmplt_wide_s8_untied:
16 **	cmplt	p0\.b, p1/z, z0\.b, z1\.d
17 **	ret
18 */
19 TEST_COMPARE_DUAL_Z (cmplt_wide_s8_untied, svint8_t, svint64_t,
20 		     p0 = svcmplt_wide_s8 (p1, z0, z1),
21 		     p0 = svcmplt_wide (p1, z0, z1))
22 
23 /*
24 ** cmplt_wide_x0_s8:
25 **	mov	(z[0-9]+\.d), x0
26 **	cmplt	p0\.b, p1/z, z0\.b, \1
27 **	ret
28 */
29 TEST_COMPARE_ZX (cmplt_wide_x0_s8, svint8_t, int64_t,
30 		 p0 = svcmplt_wide_n_s8 (p1, z0, x0),
31 		 p0 = svcmplt_wide (p1, z0, x0))
32 
33 /*
34 ** cmplt_wide_0_s8:
35 **	cmplt	p0\.b, p1/z, z0\.b, #0
36 **	ret
37 */
38 TEST_COMPARE_Z (cmplt_wide_0_s8, svint8_t,
39 		p0 = svcmplt_wide_n_s8 (p1, z0, 0),
40 		p0 = svcmplt_wide (p1, z0, 0))
41 
42 /*
43 ** cmplt_wide_1_s8:
44 **	cmplt	p0\.b, p1/z, z0\.b, #1
45 **	ret
46 */
47 TEST_COMPARE_Z (cmplt_wide_1_s8, svint8_t,
48 		p0 = svcmplt_wide_n_s8 (p1, z0, 1),
49 		p0 = svcmplt_wide (p1, z0, 1))
50 
51 /*
52 ** cmplt_wide_15_s8:
53 **	cmplt	p0\.b, p1/z, z0\.b, #15
54 **	ret
55 */
56 TEST_COMPARE_Z (cmplt_wide_15_s8, svint8_t,
57 		p0 = svcmplt_wide_n_s8 (p1, z0, 15),
58 		p0 = svcmplt_wide (p1, z0, 15))
59 
60 /*
61 ** cmplt_wide_16_s8:
62 **	mov	(z[0-9]+\.d), #16
63 **	cmplt	p0\.b, p1/z, z0\.b, \1
64 **	ret
65 */
66 TEST_COMPARE_Z (cmplt_wide_16_s8, svint8_t,
67 		p0 = svcmplt_wide_n_s8 (p1, z0, 16),
68 		p0 = svcmplt_wide (p1, z0, 16))
69 
70 /*
71 ** cmplt_wide_m1_s8:
72 **	cmplt	p0\.b, p1/z, z0\.b, #-1
73 **	ret
74 */
75 TEST_COMPARE_Z (cmplt_wide_m1_s8, svint8_t,
76 		p0 = svcmplt_wide_n_s8 (p1, z0, -1),
77 		p0 = svcmplt_wide (p1, z0, -1))
78 
79 /*
80 ** cmplt_wide_m16_s8:
81 **	cmplt	p0\.b, p1/z, z0\.b, #-16
82 **	ret
83 */
84 TEST_COMPARE_Z (cmplt_wide_m16_s8, svint8_t,
85 		p0 = svcmplt_wide_n_s8 (p1, z0, -16),
86 		p0 = svcmplt_wide (p1, z0, -16))
87 
88 /*
89 ** cmplt_wide_m17_s8:
90 **	mov	(z[0-9]+\.d), #-17
91 **	cmplt	p0\.b, p1/z, z0\.b, \1
92 **	ret
93 */
94 TEST_COMPARE_Z (cmplt_wide_m17_s8, svint8_t,
95 		p0 = svcmplt_wide_n_s8 (p1, z0, -17),
96 		p0 = svcmplt_wide (p1, z0, -17))
97