1 // REQUIRES: aarch64-registered-target
2 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
3 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - -x c++ %s | FileCheck %s
4 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
5 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - -x c++ %s | FileCheck %s
6 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - %s >/dev/null
7 #include <arm_sve.h>
8 
9 #ifdef SVE_OVERLOADED_FORMS
10 // A simple used,unused... macro, long enough to represent any SVE builtin.
11 #define SVE_ACLE_FUNC(A1,A2_UNUSED,A3,A4_UNUSED) A1##A3
12 #else
13 #define SVE_ACLE_FUNC(A1,A2,A3,A4) A1##A2##A3##A4
14 #endif
15 
test_svwhilelt_b8_s32(int32_t op1,int32_t op2)16 svbool_t test_svwhilelt_b8_s32(int32_t op1, int32_t op2)
17 {
18   // CHECK-LABEL: test_svwhilelt_b8_s32
19   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.whilelt.nxv16i1.i32(i32 %op1, i32 %op2)
20   // CHECK: ret <vscale x 16 x i1> %[[INTRINSIC]]
21   return SVE_ACLE_FUNC(svwhilelt_b8,_s32,,)(op1, op2);
22 }
23 
test_svwhilelt_b16_s32(int32_t op1,int32_t op2)24 svbool_t test_svwhilelt_b16_s32(int32_t op1, int32_t op2)
25 {
26   // CHECK-LABEL: test_svwhilelt_b16_s32
27   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.whilelt.nxv8i1.i32(i32 %op1, i32 %op2)
28   // CHECK: %[[CAST:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.convert.to.svbool.nxv8i1(<vscale x 8 x i1> %[[INTRINSIC]])
29   // CHECK: ret <vscale x 16 x i1> %[[CAST]]
30   return SVE_ACLE_FUNC(svwhilelt_b16,_s32,,)(op1, op2);
31 }
32 
test_svwhilelt_b32_s32(int32_t op1,int32_t op2)33 svbool_t test_svwhilelt_b32_s32(int32_t op1, int32_t op2)
34 {
35   // CHECK-LABEL: test_svwhilelt_b32_s32
36   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.whilelt.nxv4i1.i32(i32 %op1, i32 %op2)
37   // CHECK: %[[CAST:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.convert.to.svbool.nxv4i1(<vscale x 4 x i1> %[[INTRINSIC]])
38   // CHECK: ret <vscale x 16 x i1> %[[CAST]]
39   return SVE_ACLE_FUNC(svwhilelt_b32,_s32,,)(op1, op2);
40 }
41 
test_svwhilelt_b64_s32(int32_t op1,int32_t op2)42 svbool_t test_svwhilelt_b64_s32(int32_t op1, int32_t op2)
43 {
44   // CHECK-LABEL: test_svwhilelt_b64_s32
45   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.whilelt.nxv2i1.i32(i32 %op1, i32 %op2)
46   // CHECK: %[[CAST:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.convert.to.svbool.nxv2i1(<vscale x 2 x i1> %[[INTRINSIC]])
47   // CHECK: ret <vscale x 16 x i1> %[[CAST]]
48   return SVE_ACLE_FUNC(svwhilelt_b64,_s32,,)(op1, op2);
49 }
50 
test_svwhilelt_b8_u32(uint32_t op1,uint32_t op2)51 svbool_t test_svwhilelt_b8_u32(uint32_t op1, uint32_t op2)
52 {
53   // CHECK-LABEL: test_svwhilelt_b8_u32
54   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.whilelo.nxv16i1.i32(i32 %op1, i32 %op2)
55   // CHECK: ret <vscale x 16 x i1> %[[INTRINSIC]]
56   return SVE_ACLE_FUNC(svwhilelt_b8,_u32,,)(op1, op2);
57 }
58 
test_svwhilelt_b16_u32(uint32_t op1,uint32_t op2)59 svbool_t test_svwhilelt_b16_u32(uint32_t op1, uint32_t op2)
60 {
61   // CHECK-LABEL: test_svwhilelt_b16_u32
62   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.whilelo.nxv8i1.i32(i32 %op1, i32 %op2)
63   // CHECK: %[[CAST:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.convert.to.svbool.nxv8i1(<vscale x 8 x i1> %[[INTRINSIC]])
64   // CHECK: ret <vscale x 16 x i1> %[[CAST]]
65   return SVE_ACLE_FUNC(svwhilelt_b16,_u32,,)(op1, op2);
66 }
67 
test_svwhilelt_b32_u32(uint32_t op1,uint32_t op2)68 svbool_t test_svwhilelt_b32_u32(uint32_t op1, uint32_t op2)
69 {
70   // CHECK-LABEL: test_svwhilelt_b32_u32
71   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.whilelo.nxv4i1.i32(i32 %op1, i32 %op2)
72   // CHECK: %[[CAST:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.convert.to.svbool.nxv4i1(<vscale x 4 x i1> %[[INTRINSIC]])
73   // CHECK: ret <vscale x 16 x i1> %[[CAST]]
74   return SVE_ACLE_FUNC(svwhilelt_b32,_u32,,)(op1, op2);
75 }
76 
test_svwhilelt_b64_u32(uint32_t op1,uint32_t op2)77 svbool_t test_svwhilelt_b64_u32(uint32_t op1, uint32_t op2)
78 {
79   // CHECK-LABEL: test_svwhilelt_b64_u32
80   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.whilelo.nxv2i1.i32(i32 %op1, i32 %op2)
81   // CHECK: %[[CAST:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.convert.to.svbool.nxv2i1(<vscale x 2 x i1> %[[INTRINSIC]])
82   // CHECK: ret <vscale x 16 x i1> %[[CAST]]
83   return SVE_ACLE_FUNC(svwhilelt_b64,_u32,,)(op1, op2);
84 }
85 
test_svwhilelt_b8_s64(int64_t op1,int64_t op2)86 svbool_t test_svwhilelt_b8_s64(int64_t op1, int64_t op2)
87 {
88   // CHECK-LABEL: test_svwhilelt_b8_s64
89   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.whilelt.nxv16i1.i64(i64 %op1, i64 %op2)
90   // CHECK: ret <vscale x 16 x i1> %[[INTRINSIC]]
91   return SVE_ACLE_FUNC(svwhilelt_b8,_s64,,)(op1, op2);
92 }
93 
test_svwhilelt_b16_s64(int64_t op1,int64_t op2)94 svbool_t test_svwhilelt_b16_s64(int64_t op1, int64_t op2)
95 {
96   // CHECK-LABEL: test_svwhilelt_b16_s64
97   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.whilelt.nxv8i1.i64(i64 %op1, i64 %op2)
98   // CHECK: %[[CAST:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.convert.to.svbool.nxv8i1(<vscale x 8 x i1> %[[INTRINSIC]])
99   // CHECK: ret <vscale x 16 x i1> %[[CAST]]
100   return SVE_ACLE_FUNC(svwhilelt_b16,_s64,,)(op1, op2);
101 }
102 
test_svwhilelt_b32_s64(int64_t op1,int64_t op2)103 svbool_t test_svwhilelt_b32_s64(int64_t op1, int64_t op2)
104 {
105   // CHECK-LABEL: test_svwhilelt_b32_s64
106   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.whilelt.nxv4i1.i64(i64 %op1, i64 %op2)
107   // CHECK: %[[CAST:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.convert.to.svbool.nxv4i1(<vscale x 4 x i1> %[[INTRINSIC]])
108   // CHECK: ret <vscale x 16 x i1> %[[CAST]]
109   return SVE_ACLE_FUNC(svwhilelt_b32,_s64,,)(op1, op2);
110 }
111 
test_svwhilelt_b64_s64(int64_t op1,int64_t op2)112 svbool_t test_svwhilelt_b64_s64(int64_t op1, int64_t op2)
113 {
114   // CHECK-LABEL: test_svwhilelt_b64_s64
115   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.whilelt.nxv2i1.i64(i64 %op1, i64 %op2)
116   // CHECK: %[[CAST:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.convert.to.svbool.nxv2i1(<vscale x 2 x i1> %[[INTRINSIC]])
117   // CHECK: ret <vscale x 16 x i1> %[[CAST]]
118   return SVE_ACLE_FUNC(svwhilelt_b64,_s64,,)(op1, op2);
119 }
120 
test_svwhilelt_b8_u64(uint64_t op1,uint64_t op2)121 svbool_t test_svwhilelt_b8_u64(uint64_t op1, uint64_t op2)
122 {
123   // CHECK-LABEL: test_svwhilelt_b8_u64
124   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.whilelo.nxv16i1.i64(i64 %op1, i64 %op2)
125   // CHECK: ret <vscale x 16 x i1> %[[INTRINSIC]]
126   return SVE_ACLE_FUNC(svwhilelt_b8,_u64,,)(op1, op2);
127 }
128 
test_svwhilelt_b16_u64(uint64_t op1,uint64_t op2)129 svbool_t test_svwhilelt_b16_u64(uint64_t op1, uint64_t op2)
130 {
131   // CHECK-LABEL: test_svwhilelt_b16_u64
132   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.whilelo.nxv8i1.i64(i64 %op1, i64 %op2)
133   // CHECK: %[[CAST:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.convert.to.svbool.nxv8i1(<vscale x 8 x i1> %[[INTRINSIC]])
134   // CHECK: ret <vscale x 16 x i1> %[[CAST]]
135   return SVE_ACLE_FUNC(svwhilelt_b16,_u64,,)(op1, op2);
136 }
137 
test_svwhilelt_b32_u64(uint64_t op1,uint64_t op2)138 svbool_t test_svwhilelt_b32_u64(uint64_t op1, uint64_t op2)
139 {
140   // CHECK-LABEL: test_svwhilelt_b32_u64
141   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.whilelo.nxv4i1.i64(i64 %op1, i64 %op2)
142   // CHECK: %[[CAST:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.convert.to.svbool.nxv4i1(<vscale x 4 x i1> %[[INTRINSIC]])
143   // CHECK: ret <vscale x 16 x i1> %[[CAST]]
144   return SVE_ACLE_FUNC(svwhilelt_b32,_u64,,)(op1, op2);
145 }
146 
test_svwhilelt_b64_u64(uint64_t op1,uint64_t op2)147 svbool_t test_svwhilelt_b64_u64(uint64_t op1, uint64_t op2)
148 {
149   // CHECK-LABEL: test_svwhilelt_b64_u64
150   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.whilelo.nxv2i1.i64(i64 %op1, i64 %op2)
151   // CHECK: %[[CAST:.*]] = call <vscale x 16 x i1> @llvm.aarch64.sve.convert.to.svbool.nxv2i1(<vscale x 2 x i1> %[[INTRINSIC]])
152   // CHECK: ret <vscale x 16 x i1> %[[CAST]]
153   return SVE_ACLE_FUNC(svwhilelt_b64,_u64,,)(op1, op2);
154 }
155