16b0a96ceSRichard Henderson /* 26b0a96ceSRichard Henderson * AES round fragments, generic version 36b0a96ceSRichard Henderson * SPDX-License-Identifier: GPL-2.0-or-later 46b0a96ceSRichard Henderson * 56b0a96ceSRichard Henderson * Copyright (C) 2023 Linaro, Ltd. 66b0a96ceSRichard Henderson */ 76b0a96ceSRichard Henderson 86b0a96ceSRichard Henderson #ifndef CRYPTO_AES_ROUND_H 96b0a96ceSRichard Henderson #define CRYPTO_AES_ROUND_H 106b0a96ceSRichard Henderson 116b0a96ceSRichard Henderson /* Hosts with acceleration will usually need a 16-byte vector type. */ 126b0a96ceSRichard Henderson typedef uint8_t AESStateVec __attribute__((vector_size(16))); 136b0a96ceSRichard Henderson 146b0a96ceSRichard Henderson typedef union { 156b0a96ceSRichard Henderson uint8_t b[16]; 166b0a96ceSRichard Henderson uint32_t w[4]; 176b0a96ceSRichard Henderson uint64_t d[2]; 186b0a96ceSRichard Henderson AESStateVec v; 196b0a96ceSRichard Henderson } AESState; 206b0a96ceSRichard Henderson 216b0a96ceSRichard Henderson #include "host/crypto/aes-round.h" 226b0a96ceSRichard Henderson 236b0a96ceSRichard Henderson /* 2404e1f30eSRichard Henderson * Perform MixColumns. 2504e1f30eSRichard Henderson */ 2604e1f30eSRichard Henderson 2704e1f30eSRichard Henderson void aesenc_MC_gen(AESState *ret, const AESState *st); 2804e1f30eSRichard Henderson void aesenc_MC_genrev(AESState *ret, const AESState *st); 2904e1f30eSRichard Henderson 3004e1f30eSRichard Henderson static inline void aesenc_MC(AESState *r, const AESState *st, bool be) 3104e1f30eSRichard Henderson { 3204e1f30eSRichard Henderson if (HAVE_AES_ACCEL) { 3304e1f30eSRichard Henderson aesenc_MC_accel(r, st, be); 3404e1f30eSRichard Henderson } else if (HOST_BIG_ENDIAN == be) { 3504e1f30eSRichard Henderson aesenc_MC_gen(r, st); 3604e1f30eSRichard Henderson } else { 3704e1f30eSRichard Henderson aesenc_MC_genrev(r, st); 3804e1f30eSRichard Henderson } 3904e1f30eSRichard Henderson } 4004e1f30eSRichard Henderson 4104e1f30eSRichard Henderson /* 426b0a96ceSRichard Henderson * Perform SubBytes + ShiftRows + AddRoundKey. 436b0a96ceSRichard Henderson */ 446b0a96ceSRichard Henderson 456b0a96ceSRichard Henderson void aesenc_SB_SR_AK_gen(AESState *ret, const AESState *st, 466b0a96ceSRichard Henderson const AESState *rk); 476b0a96ceSRichard Henderson void aesenc_SB_SR_AK_genrev(AESState *ret, const AESState *st, 486b0a96ceSRichard Henderson const AESState *rk); 496b0a96ceSRichard Henderson 506b0a96ceSRichard Henderson static inline void aesenc_SB_SR_AK(AESState *r, const AESState *st, 516b0a96ceSRichard Henderson const AESState *rk, bool be) 526b0a96ceSRichard Henderson { 536b0a96ceSRichard Henderson if (HAVE_AES_ACCEL) { 546b0a96ceSRichard Henderson aesenc_SB_SR_AK_accel(r, st, rk, be); 556b0a96ceSRichard Henderson } else if (HOST_BIG_ENDIAN == be) { 566b0a96ceSRichard Henderson aesenc_SB_SR_AK_gen(r, st, rk); 576b0a96ceSRichard Henderson } else { 586b0a96ceSRichard Henderson aesenc_SB_SR_AK_genrev(r, st, rk); 596b0a96ceSRichard Henderson } 606b0a96ceSRichard Henderson } 616b0a96ceSRichard Henderson 62192fa849SRichard Henderson /* 63*7c58cb97SRichard Henderson * Perform SubBytes + ShiftRows + MixColumns + AddRoundKey. 64*7c58cb97SRichard Henderson */ 65*7c58cb97SRichard Henderson 66*7c58cb97SRichard Henderson void aesenc_SB_SR_MC_AK_gen(AESState *ret, const AESState *st, 67*7c58cb97SRichard Henderson const AESState *rk); 68*7c58cb97SRichard Henderson void aesenc_SB_SR_MC_AK_genrev(AESState *ret, const AESState *st, 69*7c58cb97SRichard Henderson const AESState *rk); 70*7c58cb97SRichard Henderson 71*7c58cb97SRichard Henderson static inline void aesenc_SB_SR_MC_AK(AESState *r, const AESState *st, 72*7c58cb97SRichard Henderson const AESState *rk, bool be) 73*7c58cb97SRichard Henderson { 74*7c58cb97SRichard Henderson if (HAVE_AES_ACCEL) { 75*7c58cb97SRichard Henderson aesenc_SB_SR_MC_AK_accel(r, st, rk, be); 76*7c58cb97SRichard Henderson } else if (HOST_BIG_ENDIAN == be) { 77*7c58cb97SRichard Henderson aesenc_SB_SR_MC_AK_gen(r, st, rk); 78*7c58cb97SRichard Henderson } else { 79*7c58cb97SRichard Henderson aesenc_SB_SR_MC_AK_genrev(r, st, rk); 80*7c58cb97SRichard Henderson } 81*7c58cb97SRichard Henderson } 82*7c58cb97SRichard Henderson 83*7c58cb97SRichard Henderson /* 845b41deb3SRichard Henderson * Perform InvMixColumns. 855b41deb3SRichard Henderson */ 865b41deb3SRichard Henderson 875b41deb3SRichard Henderson void aesdec_IMC_gen(AESState *ret, const AESState *st); 885b41deb3SRichard Henderson void aesdec_IMC_genrev(AESState *ret, const AESState *st); 895b41deb3SRichard Henderson 905b41deb3SRichard Henderson static inline void aesdec_IMC(AESState *r, const AESState *st, bool be) 915b41deb3SRichard Henderson { 925b41deb3SRichard Henderson if (HAVE_AES_ACCEL) { 935b41deb3SRichard Henderson aesdec_IMC_accel(r, st, be); 945b41deb3SRichard Henderson } else if (HOST_BIG_ENDIAN == be) { 955b41deb3SRichard Henderson aesdec_IMC_gen(r, st); 965b41deb3SRichard Henderson } else { 975b41deb3SRichard Henderson aesdec_IMC_genrev(r, st); 985b41deb3SRichard Henderson } 995b41deb3SRichard Henderson } 1005b41deb3SRichard Henderson 1015b41deb3SRichard Henderson /* 102192fa849SRichard Henderson * Perform InvSubBytes + InvShiftRows + AddRoundKey. 103192fa849SRichard Henderson */ 104192fa849SRichard Henderson 105192fa849SRichard Henderson void aesdec_ISB_ISR_AK_gen(AESState *ret, const AESState *st, 106192fa849SRichard Henderson const AESState *rk); 107192fa849SRichard Henderson void aesdec_ISB_ISR_AK_genrev(AESState *ret, const AESState *st, 108192fa849SRichard Henderson const AESState *rk); 109192fa849SRichard Henderson 110192fa849SRichard Henderson static inline void aesdec_ISB_ISR_AK(AESState *r, const AESState *st, 111192fa849SRichard Henderson const AESState *rk, bool be) 112192fa849SRichard Henderson { 113192fa849SRichard Henderson if (HAVE_AES_ACCEL) { 114192fa849SRichard Henderson aesdec_ISB_ISR_AK_accel(r, st, rk, be); 115192fa849SRichard Henderson } else if (HOST_BIG_ENDIAN == be) { 116192fa849SRichard Henderson aesdec_ISB_ISR_AK_gen(r, st, rk); 117192fa849SRichard Henderson } else { 118192fa849SRichard Henderson aesdec_ISB_ISR_AK_genrev(r, st, rk); 119192fa849SRichard Henderson } 120192fa849SRichard Henderson } 121192fa849SRichard Henderson 1226b0a96ceSRichard Henderson #endif /* CRYPTO_AES_ROUND_H */ 123