1 /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */ 2 3 #include "test_sve_acle.h" 4 5 /* 6 ** cmplt_u8_tied: 7 ** ( 8 ** cmphi p0\.b, p0/z, z1\.b, z0\.b 9 ** | 10 ** cmplo p0\.b, p0/z, z0\.b, z1\.b 11 ** ) 12 ** ret 13 */ 14 TEST_COMPARE_Z (cmplt_u8_tied, svuint8_t, 15 p0 = svcmplt_u8 (p0, z0, z1), 16 p0 = svcmplt (p0, z0, z1)) 17 18 /* 19 ** cmplt_u8_untied: 20 ** ( 21 ** cmphi p0\.b, p1/z, z1\.b, z0\.b 22 ** | 23 ** cmplo p0\.b, p1/z, z0\.b, z1\.b 24 ** ) 25 ** ret 26 */ 27 TEST_COMPARE_Z (cmplt_u8_untied, svuint8_t, 28 p0 = svcmplt_u8 (p1, z0, z1), 29 p0 = svcmplt (p1, z0, z1)) 30 31 /* 32 ** cmplt_w0_u8: 33 ** mov (z[0-9]+\.b), w0 34 ** ( 35 ** cmphi p0\.b, p1/z, \1, z0\.b 36 ** | 37 ** cmplo p0\.b, p1/z, z0\.b, \1 38 ** ) 39 ** ret 40 */ 41 TEST_COMPARE_ZX (cmplt_w0_u8, svuint8_t, uint8_t, 42 p0 = svcmplt_n_u8 (p1, z0, x0), 43 p0 = svcmplt (p1, z0, x0)) 44 45 /* 46 ** cmplt_0_u8: 47 ** cmplo p0\.b, p1/z, z0\.b, #0 48 ** ret 49 */ 50 TEST_COMPARE_Z (cmplt_0_u8, svuint8_t, 51 p0 = svcmplt_n_u8 (p1, z0, 0), 52 p0 = svcmplt (p1, z0, 0)) 53 54 /* 55 ** cmplt_1_u8: 56 ** cmplo p0\.b, p1/z, z0\.b, #1 57 ** ret 58 */ 59 TEST_COMPARE_Z (cmplt_1_u8, svuint8_t, 60 p0 = svcmplt_n_u8 (p1, z0, 1), 61 p0 = svcmplt (p1, z0, 1)) 62 63 /* 64 ** cmplt_15_u8: 65 ** cmplo p0\.b, p1/z, z0\.b, #15 66 ** ret 67 */ 68 TEST_COMPARE_Z (cmplt_15_u8, svuint8_t, 69 p0 = svcmplt_n_u8 (p1, z0, 15), 70 p0 = svcmplt (p1, z0, 15)) 71 72 /* 73 ** cmplt_16_u8: 74 ** cmplo p0\.b, p1/z, z0\.b, #16 75 ** ret 76 */ 77 TEST_COMPARE_Z (cmplt_16_u8, svuint8_t, 78 p0 = svcmplt_n_u8 (p1, z0, 16), 79 p0 = svcmplt (p1, z0, 16)) 80 81 /* 82 ** cmplt_127_u8: 83 ** cmplo p0\.b, p1/z, z0\.b, #127 84 ** ret 85 */ 86 TEST_COMPARE_Z (cmplt_127_u8, svuint8_t, 87 p0 = svcmplt_n_u8 (p1, z0, 127), 88 p0 = svcmplt (p1, z0, 127)) 89 90 /* 91 ** cmplt_128_u8: 92 ** mov (z[0-9]+\.b), #-128 93 ** ( 94 ** cmphi p0\.b, p1/z, \1, z0\.b 95 ** | 96 ** cmplo p0\.b, p1/z, z0\.b, \1 97 ** ) 98 ** ret 99 */ 100 TEST_COMPARE_Z (cmplt_128_u8, svuint8_t, 101 p0 = svcmplt_n_u8 (p1, z0, 128), 102 p0 = svcmplt (p1, z0, 128)) 103 104 /* 105 ** cmplt_m1_u8: 106 ** mov (z[0-9]+\.b), #-1 107 ** ( 108 ** cmphi p0\.b, p1/z, \1, z0\.b 109 ** | 110 ** cmplo p0\.b, p1/z, z0\.b, \1 111 ** ) 112 ** ret 113 */ 114 TEST_COMPARE_Z (cmplt_m1_u8, svuint8_t, 115 p0 = svcmplt_n_u8 (p1, z0, -1), 116 p0 = svcmplt (p1, z0, -1)) 117