1 // REQUIRES: aarch64-registered-target
2 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
3 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - -x c++ %s | FileCheck %s
4 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
5 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - -x c++ %s | FileCheck %s
6 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -o - %s >/dev/null
7 #include <arm_sve.h>
8 
9 #ifdef SVE_OVERLOADED_FORMS
10 // A simple used,unused... macro, long enough to represent any SVE builtin.
11 #define SVE_ACLE_FUNC(A1,A2_UNUSED,A3,A4_UNUSED) A1##A3
12 #else
13 #define SVE_ACLE_FUNC(A1,A2,A3,A4) A1##A2##A3##A4
14 #endif
15 
test_svdot_s32(svint32_t op1,svint8_t op2,svint8_t op3)16 svint32_t test_svdot_s32(svint32_t op1, svint8_t op2, svint8_t op3)
17 {
18   // CHECK-LABEL: test_svdot_s32
19   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.sdot.nxv4i32(<vscale x 4 x i32> %op1, <vscale x 16 x i8> %op2, <vscale x 16 x i8> %op3)
20   // CHECK: ret <vscale x 4 x i32> %[[INTRINSIC]]
21   return SVE_ACLE_FUNC(svdot,_s32,,)(op1, op2, op3);
22 }
23 
test_svdot_s64(svint64_t op1,svint16_t op2,svint16_t op3)24 svint64_t test_svdot_s64(svint64_t op1, svint16_t op2, svint16_t op3)
25 {
26   // CHECK-LABEL: test_svdot_s64
27   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.sdot.nxv2i64(<vscale x 2 x i64> %op1, <vscale x 8 x i16> %op2, <vscale x 8 x i16> %op3)
28   // CHECK: ret <vscale x 2 x i64> %[[INTRINSIC]]
29   return SVE_ACLE_FUNC(svdot,_s64,,)(op1, op2, op3);
30 }
31 
test_svdot_u32(svuint32_t op1,svuint8_t op2,svuint8_t op3)32 svuint32_t test_svdot_u32(svuint32_t op1, svuint8_t op2, svuint8_t op3)
33 {
34   // CHECK-LABEL: test_svdot_u32
35   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.udot.nxv4i32(<vscale x 4 x i32> %op1, <vscale x 16 x i8> %op2, <vscale x 16 x i8> %op3)
36   // CHECK: ret <vscale x 4 x i32> %[[INTRINSIC]]
37   return SVE_ACLE_FUNC(svdot,_u32,,)(op1, op2, op3);
38 }
39 
test_svdot_u64(svuint64_t op1,svuint16_t op2,svuint16_t op3)40 svuint64_t test_svdot_u64(svuint64_t op1, svuint16_t op2, svuint16_t op3)
41 {
42   // CHECK-LABEL: test_svdot_u64
43   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.udot.nxv2i64(<vscale x 2 x i64> %op1, <vscale x 8 x i16> %op2, <vscale x 8 x i16> %op3)
44   // CHECK: ret <vscale x 2 x i64> %[[INTRINSIC]]
45   return SVE_ACLE_FUNC(svdot,_u64,,)(op1, op2, op3);
46 }
47 
test_svdot_n_s32(svint32_t op1,svint8_t op2,int8_t op3)48 svint32_t test_svdot_n_s32(svint32_t op1, svint8_t op2, int8_t op3)
49 {
50   // CHECK-LABEL: test_svdot_n_s32
51   // CHECK: %[[DUP:.*]] = call <vscale x 16 x i8> @llvm.aarch64.sve.dup.x.nxv16i8(i8 %op3)
52   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.sdot.nxv4i32(<vscale x 4 x i32> %op1, <vscale x 16 x i8> %op2, <vscale x 16 x i8> %[[DUP]])
53   // CHECK: ret <vscale x 4 x i32> %[[INTRINSIC]]
54   return SVE_ACLE_FUNC(svdot,_n_s32,,)(op1, op2, op3);
55 }
56 
test_svdot_n_s64(svint64_t op1,svint16_t op2,int16_t op3)57 svint64_t test_svdot_n_s64(svint64_t op1, svint16_t op2, int16_t op3)
58 {
59   // CHECK-LABEL: test_svdot_n_s64
60   // CHECK: %[[DUP:.*]] = call <vscale x 8 x i16> @llvm.aarch64.sve.dup.x.nxv8i16(i16 %op3)
61   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.sdot.nxv2i64(<vscale x 2 x i64> %op1, <vscale x 8 x i16> %op2, <vscale x 8 x i16> %[[DUP]])
62   // CHECK: ret <vscale x 2 x i64> %[[INTRINSIC]]
63   return SVE_ACLE_FUNC(svdot,_n_s64,,)(op1, op2, op3);
64 }
65 
test_svdot_n_u32(svuint32_t op1,svuint8_t op2,uint8_t op3)66 svuint32_t test_svdot_n_u32(svuint32_t op1, svuint8_t op2, uint8_t op3)
67 {
68   // CHECK-LABEL: test_svdot_n_u32
69   // CHECK: %[[DUP:.*]] = call <vscale x 16 x i8> @llvm.aarch64.sve.dup.x.nxv16i8(i8 %op3)
70   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.udot.nxv4i32(<vscale x 4 x i32> %op1, <vscale x 16 x i8> %op2, <vscale x 16 x i8> %[[DUP]])
71   // CHECK: ret <vscale x 4 x i32> %[[INTRINSIC]]
72   return SVE_ACLE_FUNC(svdot,_n_u32,,)(op1, op2, op3);
73 }
74 
test_svdot_n_u64(svuint64_t op1,svuint16_t op2,uint16_t op3)75 svuint64_t test_svdot_n_u64(svuint64_t op1, svuint16_t op2, uint16_t op3)
76 {
77   // CHECK-LABEL: test_svdot_n_u64
78   // CHECK: %[[DUP:.*]] = call <vscale x 8 x i16> @llvm.aarch64.sve.dup.x.nxv8i16(i16 %op3)
79   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.udot.nxv2i64(<vscale x 2 x i64> %op1, <vscale x 8 x i16> %op2, <vscale x 8 x i16> %[[DUP]])
80   // CHECK: ret <vscale x 2 x i64> %[[INTRINSIC]]
81   return SVE_ACLE_FUNC(svdot,_n_u64,,)(op1, op2, op3);
82 }
83 
test_svdot_lane_s32(svint32_t op1,svint8_t op2,svint8_t op3)84 svint32_t test_svdot_lane_s32(svint32_t op1, svint8_t op2, svint8_t op3)
85 {
86   // CHECK-LABEL: test_svdot_lane_s32
87   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.sdot.lane.nxv4i32(<vscale x 4 x i32> %op1, <vscale x 16 x i8> %op2, <vscale x 16 x i8> %op3, i32 0)
88   // CHECK: ret <vscale x 4 x i32> %[[INTRINSIC]]
89   return SVE_ACLE_FUNC(svdot_lane,_s32,,)(op1, op2, op3, 0);
90 }
91 
test_svdot_lane_s32_1(svint32_t op1,svint8_t op2,svint8_t op3)92 svint32_t test_svdot_lane_s32_1(svint32_t op1, svint8_t op2, svint8_t op3)
93 {
94   // CHECK-LABEL: test_svdot_lane_s32_1
95   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.sdot.lane.nxv4i32(<vscale x 4 x i32> %op1, <vscale x 16 x i8> %op2, <vscale x 16 x i8> %op3, i32 3)
96   // CHECK: ret <vscale x 4 x i32> %[[INTRINSIC]]
97   return SVE_ACLE_FUNC(svdot_lane,_s32,,)(op1, op2, op3, 3);
98 }
99 
test_svdot_lane_s64(svint64_t op1,svint16_t op2,svint16_t op3)100 svint64_t test_svdot_lane_s64(svint64_t op1, svint16_t op2, svint16_t op3)
101 {
102   // CHECK-LABEL: test_svdot_lane_s64
103   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.sdot.lane.nxv2i64(<vscale x 2 x i64> %op1, <vscale x 8 x i16> %op2, <vscale x 8 x i16> %op3, i32 0)
104   // CHECK: ret <vscale x 2 x i64> %[[INTRINSIC]]
105   return SVE_ACLE_FUNC(svdot_lane,_s64,,)(op1, op2, op3, 0);
106 }
107 
test_svdot_lane_s64_1(svint64_t op1,svint16_t op2,svint16_t op3)108 svint64_t test_svdot_lane_s64_1(svint64_t op1, svint16_t op2, svint16_t op3)
109 {
110   // CHECK-LABEL: test_svdot_lane_s64_1
111   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.sdot.lane.nxv2i64(<vscale x 2 x i64> %op1, <vscale x 8 x i16> %op2, <vscale x 8 x i16> %op3, i32 1)
112   // CHECK: ret <vscale x 2 x i64> %[[INTRINSIC]]
113   return SVE_ACLE_FUNC(svdot_lane,_s64,,)(op1, op2, op3, 1);
114 }
115 
test_svdot_lane_u32(svuint32_t op1,svuint8_t op2,svuint8_t op3)116 svuint32_t test_svdot_lane_u32(svuint32_t op1, svuint8_t op2, svuint8_t op3)
117 {
118   // CHECK-LABEL: test_svdot_lane_u32
119   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.udot.lane.nxv4i32(<vscale x 4 x i32> %op1, <vscale x 16 x i8> %op2, <vscale x 16 x i8> %op3, i32 3)
120   // CHECK: ret <vscale x 4 x i32> %[[INTRINSIC]]
121   return SVE_ACLE_FUNC(svdot_lane,_u32,,)(op1, op2, op3, 3);
122 }
123 
test_svdot_lane_u64(svuint64_t op1,svuint16_t op2,svuint16_t op3)124 svuint64_t test_svdot_lane_u64(svuint64_t op1, svuint16_t op2, svuint16_t op3)
125 {
126   // CHECK-LABEL: test_svdot_lane_u64
127   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.udot.lane.nxv2i64(<vscale x 2 x i64> %op1, <vscale x 8 x i16> %op2, <vscale x 8 x i16> %op3, i32 1)
128   // CHECK: ret <vscale x 2 x i64> %[[INTRINSIC]]
129   return SVE_ACLE_FUNC(svdot_lane,_u64,,)(op1, op2, op3, 1);
130 }
131