1*055c9901SRichard Henderson /* 2*055c9901SRichard Henderson * AArch64 specific clmul acceleration. 3*055c9901SRichard Henderson * SPDX-License-Identifier: GPL-2.0-or-later 4*055c9901SRichard Henderson */ 5*055c9901SRichard Henderson 6*055c9901SRichard Henderson #ifndef AARCH64_HOST_CRYPTO_CLMUL_H 7*055c9901SRichard Henderson #define AARCH64_HOST_CRYPTO_CLMUL_H 8*055c9901SRichard Henderson 9*055c9901SRichard Henderson #include "host/cpuinfo.h" 10*055c9901SRichard Henderson #include <arm_neon.h> 11*055c9901SRichard Henderson 12*055c9901SRichard Henderson /* 13*055c9901SRichard Henderson * 64x64->128 pmull is available with FEAT_PMULL. 14*055c9901SRichard Henderson * Both FEAT_AES and FEAT_PMULL are covered under the same macro. 15*055c9901SRichard Henderson */ 16*055c9901SRichard Henderson #ifdef __ARM_FEATURE_AES 17*055c9901SRichard Henderson # define HAVE_CLMUL_ACCEL true 18*055c9901SRichard Henderson #else 19*055c9901SRichard Henderson # define HAVE_CLMUL_ACCEL likely(cpuinfo & CPUINFO_PMULL) 20*055c9901SRichard Henderson #endif 21*055c9901SRichard Henderson #if !defined(__ARM_FEATURE_AES) && defined(CONFIG_ARM_AES_BUILTIN) 22*055c9901SRichard Henderson # define ATTR_CLMUL_ACCEL __attribute__((target("+crypto"))) 23*055c9901SRichard Henderson #else 24*055c9901SRichard Henderson # define ATTR_CLMUL_ACCEL 25*055c9901SRichard Henderson #endif 26*055c9901SRichard Henderson 27*055c9901SRichard Henderson static inline Int128 ATTR_CLMUL_ACCEL clmul_64_accel(uint64_t n,uint64_t m)28*055c9901SRichard Hendersonclmul_64_accel(uint64_t n, uint64_t m) 29*055c9901SRichard Henderson { 30*055c9901SRichard Henderson union { poly128_t v; Int128 s; } u; 31*055c9901SRichard Henderson 32*055c9901SRichard Henderson #ifdef CONFIG_ARM_AES_BUILTIN 33*055c9901SRichard Henderson u.v = vmull_p64((poly64_t)n, (poly64_t)m); 34*055c9901SRichard Henderson #else 35*055c9901SRichard Henderson asm(".arch_extension aes\n\t" 36*055c9901SRichard Henderson "pmull %0.1q, %1.1d, %2.1d" : "=w"(u.v) : "w"(n), "w"(m)); 37*055c9901SRichard Henderson #endif 38*055c9901SRichard Henderson return u.s; 39*055c9901SRichard Henderson } 40*055c9901SRichard Henderson 41*055c9901SRichard Henderson #endif /* AARCH64_HOST_CRYPTO_CLMUL_H */ 42