1 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve2 -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
2 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve2 -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
3 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -fsyntax-only -verify -verify-ignore-unexpected=error %s
4 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -fsyntax-only -verify=overload -verify-ignore-unexpected=error %s
5
6 #include <arm_sve.h>
7
8 #ifdef SVE_OVERLOADED_FORMS
9 // A simple used,unused... macro, long enough to represent any SVE builtin.
10 #define SVE_ACLE_FUNC(A1, A2_UNUSED, A3, A4_UNUSED) A1##A3
11 #else
12 #define SVE_ACLE_FUNC(A1, A2, A3, A4) A1##A2##A3##A4
13 #endif
14
test_svstnt1b_scatter_u32base_s32(svbool_t pg,svuint32_t bases,svint32_t data)15 void test_svstnt1b_scatter_u32base_s32(svbool_t pg, svuint32_t bases, svint32_t data) {
16 // CHECK-LABEL: test_svstnt1b_scatter_u32base_s32
17 // CHECK-DAG: [[TRUNC:%.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
18 // CHECK-DAG: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
19 // CHECK: call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv4i8.nxv4i32(<vscale x 4 x i8> [[TRUNC]], <vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 0)
20 // CHECK: ret void
21 // overload-warning@+2 {{implicit declaration of function 'svstnt1b_scatter'}}
22 // expected-warning@+1 {{implicit declaration of function 'svstnt1b_scatter_u32base_s32'}}
23 return SVE_ACLE_FUNC(svstnt1b_scatter, _u32base, , _s32)(pg, bases, data);
24 }
25
test_svstnt1b_scatter_u64base_s64(svbool_t pg,svuint64_t bases,svint64_t data)26 void test_svstnt1b_scatter_u64base_s64(svbool_t pg, svuint64_t bases, svint64_t data) {
27 // CHECK-LABEL: test_svstnt1b_scatter_u64base_s64
28 // CHECK-DAG: [[TRUNC:%.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
29 // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
30 // CHECK: call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2i8.nxv2i64(<vscale x 2 x i8> [[TRUNC]], <vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 0)
31 // CHECK: ret void
32 // overload-warning@+2 {{implicit declaration of function 'svstnt1b_scatter'}}
33 // expected-warning@+1 {{implicit declaration of function 'svstnt1b_scatter_u64base_s64'}}
34 return SVE_ACLE_FUNC(svstnt1b_scatter, _u64base, , _s64)(pg, bases, data);
35 }
36
test_svstnt1b_scatter_u32base_u32(svbool_t pg,svuint32_t bases,svuint32_t data)37 void test_svstnt1b_scatter_u32base_u32(svbool_t pg, svuint32_t bases, svuint32_t data) {
38 // CHECK-LABEL: test_svstnt1b_scatter_u32base_u32
39 // CHECK-DAG: [[TRUNC:%.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
40 // CHECK-DAG: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
41 // CHECK: call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv4i8.nxv4i32(<vscale x 4 x i8> [[TRUNC]], <vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 0)
42 // CHECK: ret void
43 // overload-warning@+2 {{implicit declaration of function 'svstnt1b_scatter'}}
44 // expected-warning@+1 {{implicit declaration of function 'svstnt1b_scatter_u32base_u32'}}
45 return SVE_ACLE_FUNC(svstnt1b_scatter, _u32base, , _u32)(pg, bases, data);
46 }
47
test_svstnt1b_scatter_u64base_u64(svbool_t pg,svuint64_t bases,svuint64_t data)48 void test_svstnt1b_scatter_u64base_u64(svbool_t pg, svuint64_t bases, svuint64_t data) {
49 // CHECK-LABEL: test_svstnt1b_scatter_u64base_u64
50 // CHECK-DAG: [[TRUNC:%.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
51 // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
52 // CHECK: call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2i8.nxv2i64(<vscale x 2 x i8> [[TRUNC]], <vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 0)
53 // CHECK: ret void
54 // overload-warning@+2 {{implicit declaration of function 'svstnt1b_scatter'}}
55 // expected-warning@+1 {{implicit declaration of function 'svstnt1b_scatter_u64base_u64'}}
56 return SVE_ACLE_FUNC(svstnt1b_scatter, _u64base, , _u64)(pg, bases, data);
57 }
58
test_svstnt1b_scatter_s64offset_s64(svbool_t pg,int8_t * base,svint64_t offsets,svint64_t data)59 void test_svstnt1b_scatter_s64offset_s64(svbool_t pg, int8_t *base, svint64_t offsets, svint64_t data) {
60 // CHECK-LABEL: test_svstnt1b_scatter_s64offset_s64
61 // CHECK-DAG: [[TRUNC:%.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
62 // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
63 // CHECK: call void @llvm.aarch64.sve.stnt1.scatter.nxv2i8(<vscale x 2 x i8> [[TRUNC]], <vscale x 2 x i1> [[PG]], i8* %base, <vscale x 2 x i64> %offsets)
64 // CHECK: ret void
65 // overload-warning@+2 {{implicit declaration of function 'svstnt1b_scatter_offset'}}
66 // expected-warning@+1 {{implicit declaration of function 'svstnt1b_scatter_s64offset_s64'}}
67 return SVE_ACLE_FUNC(svstnt1b_scatter_, s64, offset, _s64)(pg, base, offsets, data);
68 }
69
test_svstnt1b_scatter_s64offset_u64(svbool_t pg,uint8_t * base,svint64_t offsets,svuint64_t data)70 void test_svstnt1b_scatter_s64offset_u64(svbool_t pg, uint8_t *base, svint64_t offsets, svuint64_t data) {
71 // CHECK-LABEL: test_svstnt1b_scatter_s64offset_u64
72 // CHECK-DAG: [[TRUNC:%.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
73 // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
74 // CHECK: call void @llvm.aarch64.sve.stnt1.scatter.nxv2i8(<vscale x 2 x i8> [[TRUNC]], <vscale x 2 x i1> [[PG]], i8* %base, <vscale x 2 x i64> %offsets)
75 // CHECK: ret void
76 // overload-warning@+2 {{implicit declaration of function 'svstnt1b_scatter_offset'}}
77 // expected-warning@+1 {{implicit declaration of function 'svstnt1b_scatter_s64offset_u64'}}
78 return SVE_ACLE_FUNC(svstnt1b_scatter_, s64, offset, _u64)(pg, base, offsets, data);
79 }
80
test_svstnt1b_scatter_u32offset_s32(svbool_t pg,int8_t * base,svuint32_t offsets,svint32_t data)81 void test_svstnt1b_scatter_u32offset_s32(svbool_t pg, int8_t *base, svuint32_t offsets, svint32_t data) {
82 // CHECK-LABEL: test_svstnt1b_scatter_u32offset_s32
83 // CHECK-DAG: [[TRUNC:%.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
84 // CHECK-DAG: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
85 // CHECK: call void @llvm.aarch64.sve.stnt1.scatter.uxtw.nxv4i8(<vscale x 4 x i8> [[TRUNC]], <vscale x 4 x i1> [[PG]], i8* %base, <vscale x 4 x i32> %offsets)
86 // CHECK: ret void
87 // overload-warning@+2 {{implicit declaration of function 'svstnt1b_scatter_offset'}}
88 // expected-warning@+1 {{implicit declaration of function 'svstnt1b_scatter_u32offset_s32'}}
89 return SVE_ACLE_FUNC(svstnt1b_scatter_, u32, offset, _s32)(pg, base, offsets, data);
90 }
91
test_svstnt1b_scatter_u64offset_s64(svbool_t pg,int8_t * base,svuint64_t offsets,svint64_t data)92 void test_svstnt1b_scatter_u64offset_s64(svbool_t pg, int8_t *base, svuint64_t offsets, svint64_t data) {
93 // CHECK-LABEL: test_svstnt1b_scatter_u64offset_s64
94 // CHECK-DAG: [[TRUNC:%.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
95 // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
96 // CHECK: call void @llvm.aarch64.sve.stnt1.scatter.nxv2i8(<vscale x 2 x i8> [[TRUNC]], <vscale x 2 x i1> [[PG]], i8* %base, <vscale x 2 x i64> %offsets)
97 // CHECK: ret void
98 // overload-warning@+2 {{implicit declaration of function 'svstnt1b_scatter_offset'}}
99 // expected-warning@+1 {{implicit declaration of function 'svstnt1b_scatter_u64offset_s64'}}
100 return SVE_ACLE_FUNC(svstnt1b_scatter_, u64, offset, _s64)(pg, base, offsets, data);
101 }
102
test_svstnt1b_scatter_u32offset_u32(svbool_t pg,uint8_t * base,svuint32_t offsets,svuint32_t data)103 void test_svstnt1b_scatter_u32offset_u32(svbool_t pg, uint8_t *base, svuint32_t offsets, svuint32_t data) {
104 // CHECK-LABEL: test_svstnt1b_scatter_u32offset_u32
105 // CHECK-DAG: [[TRUNC:%.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
106 // CHECK-DAG: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
107 // CHECK: call void @llvm.aarch64.sve.stnt1.scatter.uxtw.nxv4i8(<vscale x 4 x i8> [[TRUNC]], <vscale x 4 x i1> [[PG]], i8* %base, <vscale x 4 x i32> %offsets)
108 // CHECK: ret void
109 // overload-warning@+2 {{implicit declaration of function 'svstnt1b_scatter_offset'}}
110 // expected-warning@+1 {{implicit declaration of function 'svstnt1b_scatter_u32offset_u32'}}
111 return SVE_ACLE_FUNC(svstnt1b_scatter_, u32, offset, _u32)(pg, base, offsets, data);
112 }
113
test_svstnt1b_scatter_u64offset_u64(svbool_t pg,uint8_t * base,svuint64_t offsets,svuint64_t data)114 void test_svstnt1b_scatter_u64offset_u64(svbool_t pg, uint8_t *base, svuint64_t offsets, svuint64_t data) {
115 // CHECK-LABEL: test_svstnt1b_scatter_u64offset_u64
116 // CHECK-DAG: [[TRUNC:%.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
117 // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
118 // CHECK: call void @llvm.aarch64.sve.stnt1.scatter.nxv2i8(<vscale x 2 x i8> [[TRUNC]], <vscale x 2 x i1> [[PG]], i8* %base, <vscale x 2 x i64> %offsets)
119 // CHECK: ret void
120 // overload-warning@+2 {{implicit declaration of function 'svstnt1b_scatter_offset'}}
121 // expected-warning@+1 {{implicit declaration of function 'svstnt1b_scatter_u64offset_u64'}}
122 return SVE_ACLE_FUNC(svstnt1b_scatter_, u64, offset, _u64)(pg, base, offsets, data);
123 }
124
test_svstnt1b_scatter_u32base_offset_s32(svbool_t pg,svuint32_t bases,int64_t offset,svint32_t data)125 void test_svstnt1b_scatter_u32base_offset_s32(svbool_t pg, svuint32_t bases, int64_t offset, svint32_t data) {
126 // CHECK-LABEL: test_svstnt1b_scatter_u32base_offset_s32
127 // CHECK-DAG: [[TRUNC:%.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
128 // CHECK-DAG: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
129 // CHECK: call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv4i8.nxv4i32(<vscale x 4 x i8> [[TRUNC]], <vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 %offset)
130 // CHECK: ret void
131 // overload-warning@+2 {{implicit declaration of function 'svstnt1b_scatter_offset'}}
132 // expected-warning@+1 {{implicit declaration of function 'svstnt1b_scatter_u32base_offset_s32'}}
133 return SVE_ACLE_FUNC(svstnt1b_scatter, _u32base, _offset, _s32)(pg, bases, offset, data);
134 }
135
test_svstnt1b_scatter_u64base_offset_s64(svbool_t pg,svuint64_t bases,int64_t offset,svint64_t data)136 void test_svstnt1b_scatter_u64base_offset_s64(svbool_t pg, svuint64_t bases, int64_t offset, svint64_t data) {
137 // CHECK-LABEL: test_svstnt1b_scatter_u64base_offset_s64
138 // CHECK-DAG: [[TRUNC:%.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
139 // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
140 // CHECK: call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2i8.nxv2i64(<vscale x 2 x i8> [[TRUNC]], <vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 %offset)
141 // CHECK: ret void
142 // overload-warning@+2 {{implicit declaration of function 'svstnt1b_scatter_offset'}}
143 // expected-warning@+1 {{implicit declaration of function 'svstnt1b_scatter_u64base_offset_s64'}}
144 return SVE_ACLE_FUNC(svstnt1b_scatter, _u64base, _offset, _s64)(pg, bases, offset, data);
145 }
146
test_svstnt1b_scatter_u32base_offset_u32(svbool_t pg,svuint32_t bases,int64_t offset,svuint32_t data)147 void test_svstnt1b_scatter_u32base_offset_u32(svbool_t pg, svuint32_t bases, int64_t offset, svuint32_t data) {
148 // CHECK-LABEL: test_svstnt1b_scatter_u32base_offset_u32
149 // CHECK-DAG: [[TRUNC:%.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
150 // CHECK-DAG: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
151 // CHECK: call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv4i8.nxv4i32(<vscale x 4 x i8> [[TRUNC]], <vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 %offset)
152 // CHECK: ret void
153 // overload-warning@+2 {{implicit declaration of function 'svstnt1b_scatter_offset'}}
154 // expected-warning@+1 {{implicit declaration of function 'svstnt1b_scatter_u32base_offset_u32'}}
155 return SVE_ACLE_FUNC(svstnt1b_scatter, _u32base, _offset, _u32)(pg, bases, offset, data);
156 }
157
test_svstnt1b_scatter_u64base_offset_u64(svbool_t pg,svuint64_t bases,int64_t offset,svuint64_t data)158 void test_svstnt1b_scatter_u64base_offset_u64(svbool_t pg, svuint64_t bases, int64_t offset, svuint64_t data) {
159 // CHECK-LABEL: test_svstnt1b_scatter_u64base_offset_u64
160 // CHECK-DAG: [[TRUNC:%.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
161 // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
162 // CHECK: call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2i8.nxv2i64(<vscale x 2 x i8> [[TRUNC]], <vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 %offset)
163 // CHECK: ret void
164 // overload-warning@+2 {{implicit declaration of function 'svstnt1b_scatter_offset'}}
165 // expected-warning@+1 {{implicit declaration of function 'svstnt1b_scatter_u64base_offset_u64'}}
166 return SVE_ACLE_FUNC(svstnt1b_scatter, _u64base, _offset, _u64)(pg, bases, offset, data);
167 }
168