1 // REQUIRES: aarch64-registered-target
2 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
3 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
4 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -o - %s >/dev/null 2>%t
5 // RUN: FileCheck --check-prefix=ASM --allow-empty %s <%t
6 
7 // If this check fails please read test/CodeGen/aarch64-sve-intrinsics/README for instructions on how to resolve it.
8 // ASM-NOT: warning
9 #include <arm_sve.h>
10 
11 #ifdef SVE_OVERLOADED_FORMS
12 // A simple used,unused... macro, long enough to represent any SVE builtin.
13 #define SVE_ACLE_FUNC(A1,A2_UNUSED,A3,A4_UNUSED) A1##A3
14 #else
15 #define SVE_ACLE_FUNC(A1,A2,A3,A4) A1##A2##A3##A4
16 #endif
17 
test_svqincw_n_s32(int32_t op)18 int32_t test_svqincw_n_s32(int32_t op)
19 {
20   // CHECK-LABEL: test_svqincw_n_s32
21   // CHECK: %[[INTRINSIC:.*]] = call i32 @llvm.aarch64.sve.sqincw.n32(i32 %op, i32 31, i32 1)
22   // CHECK: ret i32 %[[INTRINSIC]]
23   return SVE_ACLE_FUNC(svqincw,_n_s32,,)(op, 1);
24 }
25 
test_svqincw_n_s32_1(int32_t op)26 int32_t test_svqincw_n_s32_1(int32_t op)
27 {
28   // CHECK-LABEL: test_svqincw_n_s32_1
29   // CHECK: %[[INTRINSIC:.*]] = call i32 @llvm.aarch64.sve.sqincw.n32(i32 %op, i32 31, i32 16)
30   // CHECK: ret i32 %[[INTRINSIC]]
31   return SVE_ACLE_FUNC(svqincw,_n_s32,,)(op, 16);
32 }
33 
test_svqincw_n_s64(int64_t op)34 int64_t test_svqincw_n_s64(int64_t op)
35 {
36   // CHECK-LABEL: test_svqincw_n_s64
37   // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.sqincw.n64(i64 %op, i32 31, i32 1)
38   // CHECK: ret i64 %[[INTRINSIC]]
39   return SVE_ACLE_FUNC(svqincw,_n_s64,,)(op, 1);
40 }
41 
test_svqincw_n_u32(uint32_t op)42 uint32_t test_svqincw_n_u32(uint32_t op)
43 {
44   // CHECK-LABEL: test_svqincw_n_u32
45   // CHECK: %[[INTRINSIC:.*]] = call i32 @llvm.aarch64.sve.uqincw.n32(i32 %op, i32 31, i32 16)
46   // CHECK: ret i32 %[[INTRINSIC]]
47   return SVE_ACLE_FUNC(svqincw,_n_u32,,)(op, 16);
48 }
49 
test_svqincw_n_u64(uint64_t op)50 uint64_t test_svqincw_n_u64(uint64_t op)
51 {
52   // CHECK-LABEL: test_svqincw_n_u64
53   // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.uqincw.n64(i64 %op, i32 31, i32 1)
54   // CHECK: ret i64 %[[INTRINSIC]]
55   return SVE_ACLE_FUNC(svqincw,_n_u64,,)(op, 1);
56 }
57 
test_svqincw_pat_n_s32(int32_t op)58 int32_t test_svqincw_pat_n_s32(int32_t op)
59 {
60   // CHECK-LABEL: test_svqincw_pat_n_s32
61   // CHECK: %[[INTRINSIC:.*]] = call i32 @llvm.aarch64.sve.sqincw.n32(i32 %op, i32 4, i32 16)
62   // CHECK: ret i32 %[[INTRINSIC]]
63   return SVE_ACLE_FUNC(svqincw_pat,_n_s32,,)(op, SV_VL4, 16);
64 }
65 
test_svqincw_pat_n_s64(int64_t op)66 int64_t test_svqincw_pat_n_s64(int64_t op)
67 {
68   // CHECK-LABEL: test_svqincw_pat_n_s64
69   // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.sqincw.n64(i64 %op, i32 5, i32 1)
70   // CHECK: ret i64 %[[INTRINSIC]]
71   return SVE_ACLE_FUNC(svqincw_pat,_n_s64,,)(op, SV_VL5, 1);
72 }
73 
test_svqincw_pat_n_u32(uint32_t op)74 uint32_t test_svqincw_pat_n_u32(uint32_t op)
75 {
76   // CHECK-LABEL: test_svqincw_pat_n_u32
77   // CHECK: %[[INTRINSIC:.*]] = call i32 @llvm.aarch64.sve.uqincw.n32(i32 %op, i32 6, i32 16)
78   // CHECK: ret i32 %[[INTRINSIC]]
79   return SVE_ACLE_FUNC(svqincw_pat,_n_u32,,)(op, SV_VL6, 16);
80 }
81 
test_svqincw_pat_n_u64(uint64_t op)82 uint64_t test_svqincw_pat_n_u64(uint64_t op)
83 {
84   // CHECK-LABEL: test_svqincw_pat_n_u64
85   // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.uqincw.n64(i64 %op, i32 7, i32 1)
86   // CHECK: ret i64 %[[INTRINSIC]]
87   return SVE_ACLE_FUNC(svqincw_pat,_n_u64,,)(op, SV_VL7, 1);
88 }
89 
test_svqincw_s32(svint32_t op)90 svint32_t test_svqincw_s32(svint32_t op)
91 {
92   // CHECK-LABEL: test_svqincw_s32
93   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.sqincw.nxv4i32(<vscale x 4 x i32> %op, i32 31, i32 16)
94   // CHECK: ret <vscale x 4 x i32> %[[INTRINSIC]]
95   return SVE_ACLE_FUNC(svqincw,_s32,,)(op, 16);
96 }
97 
test_svqincw_u32(svuint32_t op)98 svuint32_t test_svqincw_u32(svuint32_t op)
99 {
100   // CHECK-LABEL: test_svqincw_u32
101   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.uqincw.nxv4i32(<vscale x 4 x i32> %op, i32 31, i32 1)
102   // CHECK: ret <vscale x 4 x i32> %[[INTRINSIC]]
103   return SVE_ACLE_FUNC(svqincw,_u32,,)(op, 1);
104 }
105 
test_svqincw_pat_s32(svint32_t op)106 svint32_t test_svqincw_pat_s32(svint32_t op)
107 {
108   // CHECK-LABEL: test_svqincw_pat_s32
109   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.sqincw.nxv4i32(<vscale x 4 x i32> %op, i32 8, i32 16)
110   // CHECK: ret <vscale x 4 x i32> %[[INTRINSIC]]
111   return SVE_ACLE_FUNC(svqincw_pat,_s32,,)(op, SV_VL8, 16);
112 }
113 
test_svqincw_pat_u32(svuint32_t op)114 svuint32_t test_svqincw_pat_u32(svuint32_t op)
115 {
116   // CHECK-LABEL: test_svqincw_pat_u32
117   // CHECK: %[[INTRINSIC:.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.uqincw.nxv4i32(<vscale x 4 x i32> %op, i32 9, i32 1)
118   // CHECK: ret <vscale x 4 x i32> %[[INTRINSIC]]
119   return SVE_ACLE_FUNC(svqincw_pat,_u32,,)(op, SV_VL16, 1);
120 }
121