1 // REQUIRES: aarch64-registered-target
2 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
3 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - -x c++ %s | FileCheck %s
4 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
5 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - -x c++ %s | FileCheck %s
6 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -o - %s >/dev/null
7 #include <arm_sve.h>
8 
9 #ifdef SVE_OVERLOADED_FORMS
10 // A simple used,unused... macro, long enough to represent any SVE builtin.
11 #define SVE_ACLE_FUNC(A1,A2_UNUSED,A3,A4_UNUSED) A1##A3
12 #else
13 #define SVE_ACLE_FUNC(A1,A2,A3,A4) A1##A2##A3##A4
14 #endif
15 
test_svget2_s8(svint8x2_t tuple)16 svint8_t test_svget2_s8(svint8x2_t tuple)
17 {
18   // CHECK-LABEL: test_svget2_s8
19   // CHECK: %[[EXT:.*]] = call <vscale x 16 x i8> @llvm.aarch64.sve.tuple.get.nxv16i8.nxv32i8(<vscale x 32 x i8> %tuple, i32 0)
20   // CHECK-NEXT: ret <vscale x 16 x i8> %[[EXT]]
21   return SVE_ACLE_FUNC(svget2,_s8,,)(tuple, 0);
22 }
23 
test_svget2_s16(svint16x2_t tuple)24 svint16_t test_svget2_s16(svint16x2_t tuple)
25 {
26   // CHECK-LABEL: test_svget2_s16
27   // CHECK: %[[EXT:.*]] = call <vscale x 8 x i16> @llvm.aarch64.sve.tuple.get.nxv8i16.nxv16i16(<vscale x 16 x i16> %tuple, i32 1)
28   // CHECK-NEXT: ret <vscale x 8 x i16> %[[EXT]]
29   return SVE_ACLE_FUNC(svget2,_s16,,)(tuple, 1);
30 }
31 
test_svget2_s32(svint32x2_t tuple)32 svint32_t test_svget2_s32(svint32x2_t tuple)
33 {
34   // CHECK-LABEL: test_svget2_s32
35   // CHECK: %[[EXT:.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.tuple.get.nxv4i32.nxv8i32(<vscale x 8 x i32> %tuple, i32 0)
36   // CHECK-NEXT: ret <vscale x 4 x i32> %[[EXT]]
37   return SVE_ACLE_FUNC(svget2,_s32,,)(tuple, 0);
38 }
39 
test_svget2_s64(svint64x2_t tuple)40 svint64_t test_svget2_s64(svint64x2_t tuple)
41 {
42   // CHECK-LABEL: test_svget2_s64
43   // CHECK: %[[EXT:.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.tuple.get.nxv2i64.nxv4i64(<vscale x 4 x i64> %tuple, i32 1)
44   // CHECK-NEXT: ret <vscale x 2 x i64> %[[EXT]]
45   return SVE_ACLE_FUNC(svget2,_s64,,)(tuple, 1);
46 }
47 
test_svget2_u8(svuint8x2_t tuple)48 svuint8_t test_svget2_u8(svuint8x2_t tuple)
49 {
50   // CHECK-LABEL: test_svget2_u8
51   // CHECK: %[[EXT:.*]] = call <vscale x 16 x i8> @llvm.aarch64.sve.tuple.get.nxv16i8.nxv32i8(<vscale x 32 x i8> %tuple, i32 0)
52   // CHECK-NEXT: ret <vscale x 16 x i8> %[[EXT]]
53   return SVE_ACLE_FUNC(svget2,_u8,,)(tuple, 0);
54 }
55 
test_svget2_u16(svuint16x2_t tuple)56 svuint16_t test_svget2_u16(svuint16x2_t tuple)
57 {
58   // CHECK-LABEL: test_svget2_u16
59   // CHECK: %[[EXT:.*]] = call <vscale x 8 x i16> @llvm.aarch64.sve.tuple.get.nxv8i16.nxv16i16(<vscale x 16 x i16> %tuple, i32 1)
60   // CHECK-NEXT: ret <vscale x 8 x i16> %[[EXT]]
61   return SVE_ACLE_FUNC(svget2,_u16,,)(tuple, 1);
62 }
63 
test_svget2_u32(svuint32x2_t tuple)64 svuint32_t test_svget2_u32(svuint32x2_t tuple)
65 {
66   // CHECK-LABEL: test_svget2_u32
67   // CHECK: %[[EXT:.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.tuple.get.nxv4i32.nxv8i32(<vscale x 8 x i32> %tuple, i32 0)
68   // CHECK-NEXT: ret <vscale x 4 x i32> %[[EXT]]
69   return SVE_ACLE_FUNC(svget2,_u32,,)(tuple, 0);
70 }
71 
test_svget2_u64(svuint64x2_t tuple)72 svuint64_t test_svget2_u64(svuint64x2_t tuple)
73 {
74   // CHECK-LABEL: test_svget2_u64
75   // CHECK: %[[EXT:.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.tuple.get.nxv2i64.nxv4i64(<vscale x 4 x i64> %tuple, i32 1)
76   // CHECK-NEXT: ret <vscale x 2 x i64> %[[EXT]]
77   return SVE_ACLE_FUNC(svget2,_u64,,)(tuple, 1);
78 }
79 
test_svget2_f16(svfloat16x2_t tuple)80 svfloat16_t test_svget2_f16(svfloat16x2_t tuple)
81 {
82   // CHECK-LABEL: test_svget2_f16
83   // CHECK: %[[EXT:.*]] = call <vscale x 8 x half> @llvm.aarch64.sve.tuple.get.nxv8f16.nxv16f16(<vscale x 16 x half> %tuple, i32 0)
84   // CHECK-NEXT: ret <vscale x 8 x half> %[[EXT]]
85   return SVE_ACLE_FUNC(svget2,_f16,,)(tuple, 0);
86 }
87 
test_svget2_f32(svfloat32x2_t tuple)88 svfloat32_t test_svget2_f32(svfloat32x2_t tuple)
89 {
90   // CHECK-LABEL: test_svget2_f32
91   // CHECK: %[[EXT:.*]] = call <vscale x 4 x float> @llvm.aarch64.sve.tuple.get.nxv4f32.nxv8f32(<vscale x 8 x float> %tuple, i32 1)
92   // CHECK-NEXT: ret <vscale x 4 x float> %[[EXT]]
93   return SVE_ACLE_FUNC(svget2,_f32,,)(tuple, 1);
94 }
95 
test_svget2_f64(svfloat64x2_t tuple)96 svfloat64_t test_svget2_f64(svfloat64x2_t tuple)
97 {
98   // CHECK-LABEL: test_svget2_f64
99   // CHECK: %[[EXT:.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.tuple.get.nxv2f64.nxv4f64(<vscale x 4 x double> %tuple, i32 0)
100   // CHECK-NEXT: ret <vscale x 2 x double> %[[EXT]]
101   return SVE_ACLE_FUNC(svget2,_f64,,)(tuple, 0);
102 }
103