1 // RUN: %clang_cc1 -triple arm-none-linux-gnueabi -target-feature +neon \
2 // RUN:  -target-feature +sha2 -target-feature +aes \
3 // RUN:  -target-cpu cortex-a57 -emit-llvm -O1 -o - %s | FileCheck %s
4 
5 // RUN: %clang_cc1 -triple arm64-none-linux-gnu -target-feature +neon \
6 // RUN:   -target-feature +sha2 -target-feature +aes \
7 // RUN:   -emit-llvm -O1 -o - %s | FileCheck %s
8 // RUN: not %clang_cc1 -triple arm64-none-linux-gnu -target-feature +neon \
9 // RUN:   -S -O3 -o - %s 2>&1 | FileCheck --check-prefix=CHECK-NO-CRYPTO %s
10 
11 // Test new aarch64 intrinsics and types
12 
13 #include <arm_neon.h>
14 
test_vaeseq_u8(uint8x16_t data,uint8x16_t key)15 uint8x16_t test_vaeseq_u8(uint8x16_t data, uint8x16_t key) {
16   // CHECK-LABEL: @test_vaeseq_u8
17   // CHECK-NO-CRYPTO: warning: implicit declaration of function 'vaeseq_u8' is invalid in C99
18   return vaeseq_u8(data, key);
19   // CHECK: call <16 x i8> @llvm.{{arm.neon|aarch64.crypto}}.aese(<16 x i8> %data, <16 x i8> %key)
20 }
21 
test_vaesdq_u8(uint8x16_t data,uint8x16_t key)22 uint8x16_t test_vaesdq_u8(uint8x16_t data, uint8x16_t key) {
23   // CHECK-LABEL: @test_vaesdq_u8
24   return vaesdq_u8(data, key);
25   // CHECK: call <16 x i8> @llvm.{{arm.neon|aarch64.crypto}}.aesd(<16 x i8> %data, <16 x i8> %key)
26 }
27 
test_vaesmcq_u8(uint8x16_t data)28 uint8x16_t test_vaesmcq_u8(uint8x16_t data) {
29   // CHECK-LABEL: @test_vaesmcq_u8
30   return vaesmcq_u8(data);
31   // CHECK: call <16 x i8> @llvm.{{arm.neon|aarch64.crypto}}.aesmc(<16 x i8> %data)
32 }
33 
test_vaesimcq_u8(uint8x16_t data)34 uint8x16_t test_vaesimcq_u8(uint8x16_t data) {
35   // CHECK-LABEL: @test_vaesimcq_u8
36   return vaesimcq_u8(data);
37   // CHECK: call <16 x i8> @llvm.{{arm.neon|aarch64.crypto}}.aesimc(<16 x i8> %data)
38 }
39 
test_vsha1h_u32(uint32_t hash_e)40 uint32_t test_vsha1h_u32(uint32_t hash_e) {
41   // CHECK-LABEL: @test_vsha1h_u32
42   return vsha1h_u32(hash_e);
43   // CHECK: call i32 @llvm.{{arm.neon|aarch64.crypto}}.sha1h(i32 %hash_e)
44 }
45 
test_vsha1su1q_u32(uint32x4_t w0_3,uint32x4_t w12_15)46 uint32x4_t test_vsha1su1q_u32(uint32x4_t w0_3, uint32x4_t w12_15) {
47   // CHECK-LABEL: @test_vsha1su1q_u32
48   return vsha1su1q_u32(w0_3, w12_15);
49   // CHECK: call <4 x i32> @llvm.{{arm.neon|aarch64.crypto}}.sha1su1(<4 x i32> %w0_3, <4 x i32> %w12_15)
50 }
51 
test_vsha256su0q_u32(uint32x4_t w0_3,uint32x4_t w4_7)52 uint32x4_t test_vsha256su0q_u32(uint32x4_t w0_3, uint32x4_t w4_7) {
53   // CHECK-LABEL: @test_vsha256su0q_u32
54   return vsha256su0q_u32(w0_3, w4_7);
55   // CHECK: call <4 x i32> @llvm.{{arm.neon|aarch64.crypto}}.sha256su0(<4 x i32> %w0_3, <4 x i32> %w4_7)
56 }
57 
test_vsha1cq_u32(uint32x4_t hash_abcd,uint32_t hash_e,uint32x4_t wk)58 uint32x4_t test_vsha1cq_u32(uint32x4_t hash_abcd, uint32_t hash_e, uint32x4_t wk) {
59   // CHECK-LABEL: @test_vsha1cq_u32
60   return vsha1cq_u32(hash_abcd, hash_e, wk);
61   // CHECK: call <4 x i32> @llvm.{{arm.neon|aarch64.crypto}}.sha1c(<4 x i32> %hash_abcd, i32 %hash_e, <4 x i32> %wk)
62 }
63 
test_vsha1pq_u32(uint32x4_t hash_abcd,uint32_t hash_e,uint32x4_t wk)64 uint32x4_t test_vsha1pq_u32(uint32x4_t hash_abcd, uint32_t hash_e, uint32x4_t wk) {
65   // CHECK-LABEL: @test_vsha1pq_u32
66   return vsha1pq_u32(hash_abcd, hash_e, wk);
67   // CHECK: call <4 x i32> @llvm.{{arm.neon|aarch64.crypto}}.sha1p(<4 x i32> %hash_abcd, i32 %hash_e, <4 x i32> %wk)
68 }
69 
test_vsha1mq_u32(uint32x4_t hash_abcd,uint32_t hash_e,uint32x4_t wk)70 uint32x4_t test_vsha1mq_u32(uint32x4_t hash_abcd, uint32_t hash_e, uint32x4_t wk) {
71   // CHECK-LABEL: @test_vsha1mq_u32
72   return vsha1mq_u32(hash_abcd, hash_e, wk);
73   // CHECK: call <4 x i32> @llvm.{{arm.neon|aarch64.crypto}}.sha1m(<4 x i32> %hash_abcd, i32 %hash_e, <4 x i32> %wk)
74 }
75 
test_vsha1su0q_u32(uint32x4_t w0_3,uint32x4_t w4_7,uint32x4_t w8_11)76 uint32x4_t test_vsha1su0q_u32(uint32x4_t w0_3, uint32x4_t w4_7, uint32x4_t w8_11) {
77   // CHECK-LABEL: @test_vsha1su0q_u32
78   return vsha1su0q_u32(w0_3, w4_7, w8_11);
79   // CHECK: call <4 x i32> @llvm.{{arm.neon|aarch64.crypto}}.sha1su0(<4 x i32> %w0_3, <4 x i32> %w4_7, <4 x i32> %w8_11)
80 }
81 
test_vsha256hq_u32(uint32x4_t hash_abcd,uint32x4_t hash_efgh,uint32x4_t wk)82 uint32x4_t test_vsha256hq_u32(uint32x4_t hash_abcd, uint32x4_t hash_efgh, uint32x4_t wk) {
83   // CHECK-LABEL: @test_vsha256hq_u32
84   return vsha256hq_u32(hash_abcd, hash_efgh, wk);
85   // CHECK: call <4 x i32> @llvm.{{arm.neon|aarch64.crypto}}.sha256h(<4 x i32> %hash_abcd, <4 x i32> %hash_efgh, <4 x i32> %wk)
86 }
87 
test_vsha256h2q_u32(uint32x4_t hash_efgh,uint32x4_t hash_abcd,uint32x4_t wk)88 uint32x4_t test_vsha256h2q_u32(uint32x4_t hash_efgh, uint32x4_t hash_abcd, uint32x4_t wk) {
89   // CHECK-LABEL: @test_vsha256h2q_u32
90   return vsha256h2q_u32(hash_efgh, hash_abcd, wk);
91   // CHECK: call <4 x i32> @llvm.{{arm.neon|aarch64.crypto}}.sha256h2(<4 x i32> %hash_efgh, <4 x i32> %hash_abcd, <4 x i32> %wk)
92 }
93 
test_vsha256su1q_u32(uint32x4_t w0_3,uint32x4_t w8_11,uint32x4_t w12_15)94 uint32x4_t test_vsha256su1q_u32(uint32x4_t w0_3, uint32x4_t w8_11, uint32x4_t w12_15) {
95   // CHECK-LABEL: @test_vsha256su1q_u32
96   return vsha256su1q_u32(w0_3, w8_11, w12_15);
97   // CHECK: call <4 x i32> @llvm.{{arm.neon|aarch64.crypto}}.sha256su1(<4 x i32> %w0_3, <4 x i32> %w8_11, <4 x i32> %w12_15)
98 }
99