xref: /qemu/include/crypto/aes-round.h (revision 28e91474)
16b0a96ceSRichard Henderson /*
26b0a96ceSRichard Henderson  * AES round fragments, generic version
36b0a96ceSRichard Henderson  * SPDX-License-Identifier: GPL-2.0-or-later
46b0a96ceSRichard Henderson  *
56b0a96ceSRichard Henderson  * Copyright (C) 2023 Linaro, Ltd.
66b0a96ceSRichard Henderson  */
76b0a96ceSRichard Henderson 
86b0a96ceSRichard Henderson #ifndef CRYPTO_AES_ROUND_H
96b0a96ceSRichard Henderson #define CRYPTO_AES_ROUND_H
106b0a96ceSRichard Henderson 
116b0a96ceSRichard Henderson /* Hosts with acceleration will usually need a 16-byte vector type. */
126b0a96ceSRichard Henderson typedef uint8_t AESStateVec __attribute__((vector_size(16)));
136b0a96ceSRichard Henderson 
146b0a96ceSRichard Henderson typedef union {
156b0a96ceSRichard Henderson     uint8_t b[16];
166b0a96ceSRichard Henderson     uint32_t w[4];
176b0a96ceSRichard Henderson     uint64_t d[2];
186b0a96ceSRichard Henderson     AESStateVec v;
196b0a96ceSRichard Henderson } AESState;
206b0a96ceSRichard Henderson 
216b0a96ceSRichard Henderson #include "host/crypto/aes-round.h"
226b0a96ceSRichard Henderson 
236b0a96ceSRichard Henderson /*
2404e1f30eSRichard Henderson  * Perform MixColumns.
2504e1f30eSRichard Henderson  */
2604e1f30eSRichard Henderson 
2704e1f30eSRichard Henderson void aesenc_MC_gen(AESState *ret, const AESState *st);
2804e1f30eSRichard Henderson void aesenc_MC_genrev(AESState *ret, const AESState *st);
2904e1f30eSRichard Henderson 
aesenc_MC(AESState * r,const AESState * st,bool be)3004e1f30eSRichard Henderson static inline void aesenc_MC(AESState *r, const AESState *st, bool be)
3104e1f30eSRichard Henderson {
3204e1f30eSRichard Henderson     if (HAVE_AES_ACCEL) {
3304e1f30eSRichard Henderson         aesenc_MC_accel(r, st, be);
3404e1f30eSRichard Henderson     } else if (HOST_BIG_ENDIAN == be) {
3504e1f30eSRichard Henderson         aesenc_MC_gen(r, st);
3604e1f30eSRichard Henderson     } else {
3704e1f30eSRichard Henderson         aesenc_MC_genrev(r, st);
3804e1f30eSRichard Henderson     }
3904e1f30eSRichard Henderson }
4004e1f30eSRichard Henderson 
4104e1f30eSRichard Henderson /*
426b0a96ceSRichard Henderson  * Perform SubBytes + ShiftRows + AddRoundKey.
436b0a96ceSRichard Henderson  */
446b0a96ceSRichard Henderson 
456b0a96ceSRichard Henderson void aesenc_SB_SR_AK_gen(AESState *ret, const AESState *st,
466b0a96ceSRichard Henderson                          const AESState *rk);
476b0a96ceSRichard Henderson void aesenc_SB_SR_AK_genrev(AESState *ret, const AESState *st,
486b0a96ceSRichard Henderson                             const AESState *rk);
496b0a96ceSRichard Henderson 
aesenc_SB_SR_AK(AESState * r,const AESState * st,const AESState * rk,bool be)506b0a96ceSRichard Henderson static inline void aesenc_SB_SR_AK(AESState *r, const AESState *st,
516b0a96ceSRichard Henderson                                    const AESState *rk, bool be)
526b0a96ceSRichard Henderson {
536b0a96ceSRichard Henderson     if (HAVE_AES_ACCEL) {
546b0a96ceSRichard Henderson         aesenc_SB_SR_AK_accel(r, st, rk, be);
556b0a96ceSRichard Henderson     } else if (HOST_BIG_ENDIAN == be) {
566b0a96ceSRichard Henderson         aesenc_SB_SR_AK_gen(r, st, rk);
576b0a96ceSRichard Henderson     } else {
586b0a96ceSRichard Henderson         aesenc_SB_SR_AK_genrev(r, st, rk);
596b0a96ceSRichard Henderson     }
606b0a96ceSRichard Henderson }
616b0a96ceSRichard Henderson 
62192fa849SRichard Henderson /*
637c58cb97SRichard Henderson  * Perform SubBytes + ShiftRows + MixColumns + AddRoundKey.
647c58cb97SRichard Henderson  */
657c58cb97SRichard Henderson 
667c58cb97SRichard Henderson void aesenc_SB_SR_MC_AK_gen(AESState *ret, const AESState *st,
677c58cb97SRichard Henderson                             const AESState *rk);
687c58cb97SRichard Henderson void aesenc_SB_SR_MC_AK_genrev(AESState *ret, const AESState *st,
697c58cb97SRichard Henderson                                const AESState *rk);
707c58cb97SRichard Henderson 
aesenc_SB_SR_MC_AK(AESState * r,const AESState * st,const AESState * rk,bool be)717c58cb97SRichard Henderson static inline void aesenc_SB_SR_MC_AK(AESState *r, const AESState *st,
727c58cb97SRichard Henderson                                       const AESState *rk, bool be)
737c58cb97SRichard Henderson {
747c58cb97SRichard Henderson     if (HAVE_AES_ACCEL) {
757c58cb97SRichard Henderson         aesenc_SB_SR_MC_AK_accel(r, st, rk, be);
767c58cb97SRichard Henderson     } else if (HOST_BIG_ENDIAN == be) {
777c58cb97SRichard Henderson         aesenc_SB_SR_MC_AK_gen(r, st, rk);
787c58cb97SRichard Henderson     } else {
797c58cb97SRichard Henderson         aesenc_SB_SR_MC_AK_genrev(r, st, rk);
807c58cb97SRichard Henderson     }
817c58cb97SRichard Henderson }
827c58cb97SRichard Henderson 
837c58cb97SRichard Henderson /*
845b41deb3SRichard Henderson  * Perform InvMixColumns.
855b41deb3SRichard Henderson  */
865b41deb3SRichard Henderson 
875b41deb3SRichard Henderson void aesdec_IMC_gen(AESState *ret, const AESState *st);
885b41deb3SRichard Henderson void aesdec_IMC_genrev(AESState *ret, const AESState *st);
895b41deb3SRichard Henderson 
aesdec_IMC(AESState * r,const AESState * st,bool be)905b41deb3SRichard Henderson static inline void aesdec_IMC(AESState *r, const AESState *st, bool be)
915b41deb3SRichard Henderson {
925b41deb3SRichard Henderson     if (HAVE_AES_ACCEL) {
935b41deb3SRichard Henderson         aesdec_IMC_accel(r, st, be);
945b41deb3SRichard Henderson     } else if (HOST_BIG_ENDIAN == be) {
955b41deb3SRichard Henderson         aesdec_IMC_gen(r, st);
965b41deb3SRichard Henderson     } else {
975b41deb3SRichard Henderson         aesdec_IMC_genrev(r, st);
985b41deb3SRichard Henderson     }
995b41deb3SRichard Henderson }
1005b41deb3SRichard Henderson 
1015b41deb3SRichard Henderson /*
102192fa849SRichard Henderson  * Perform InvSubBytes + InvShiftRows + AddRoundKey.
103192fa849SRichard Henderson  */
104192fa849SRichard Henderson 
105192fa849SRichard Henderson void aesdec_ISB_ISR_AK_gen(AESState *ret, const AESState *st,
106192fa849SRichard Henderson                            const AESState *rk);
107192fa849SRichard Henderson void aesdec_ISB_ISR_AK_genrev(AESState *ret, const AESState *st,
108192fa849SRichard Henderson                               const AESState *rk);
109192fa849SRichard Henderson 
aesdec_ISB_ISR_AK(AESState * r,const AESState * st,const AESState * rk,bool be)110192fa849SRichard Henderson static inline void aesdec_ISB_ISR_AK(AESState *r, const AESState *st,
111192fa849SRichard Henderson                                      const AESState *rk, bool be)
112192fa849SRichard Henderson {
113192fa849SRichard Henderson     if (HAVE_AES_ACCEL) {
114192fa849SRichard Henderson         aesdec_ISB_ISR_AK_accel(r, st, rk, be);
115192fa849SRichard Henderson     } else if (HOST_BIG_ENDIAN == be) {
116192fa849SRichard Henderson         aesdec_ISB_ISR_AK_gen(r, st, rk);
117192fa849SRichard Henderson     } else {
118192fa849SRichard Henderson         aesdec_ISB_ISR_AK_genrev(r, st, rk);
119192fa849SRichard Henderson     }
120192fa849SRichard Henderson }
121192fa849SRichard Henderson 
12215ff1598SRichard Henderson /*
123*28e91474SRichard Henderson  * Perform InvSubBytes + InvShiftRows + AddRoundKey + InvMixColumns.
124*28e91474SRichard Henderson  */
125*28e91474SRichard Henderson 
126*28e91474SRichard Henderson void aesdec_ISB_ISR_AK_IMC_gen(AESState *ret, const AESState *st,
127*28e91474SRichard Henderson                                const AESState *rk);
128*28e91474SRichard Henderson void aesdec_ISB_ISR_AK_IMC_genrev(AESState *ret, const AESState *st,
129*28e91474SRichard Henderson                                   const AESState *rk);
130*28e91474SRichard Henderson 
aesdec_ISB_ISR_AK_IMC(AESState * r,const AESState * st,const AESState * rk,bool be)131*28e91474SRichard Henderson static inline void aesdec_ISB_ISR_AK_IMC(AESState *r, const AESState *st,
132*28e91474SRichard Henderson                                          const AESState *rk, bool be)
133*28e91474SRichard Henderson {
134*28e91474SRichard Henderson     if (HAVE_AES_ACCEL) {
135*28e91474SRichard Henderson         aesdec_ISB_ISR_AK_IMC_accel(r, st, rk, be);
136*28e91474SRichard Henderson     } else if (HOST_BIG_ENDIAN == be) {
137*28e91474SRichard Henderson         aesdec_ISB_ISR_AK_IMC_gen(r, st, rk);
138*28e91474SRichard Henderson     } else {
139*28e91474SRichard Henderson         aesdec_ISB_ISR_AK_IMC_genrev(r, st, rk);
140*28e91474SRichard Henderson     }
141*28e91474SRichard Henderson }
142*28e91474SRichard Henderson 
143*28e91474SRichard Henderson /*
14415ff1598SRichard Henderson  * Perform InvSubBytes + InvShiftRows + InvMixColumns + AddRoundKey.
14515ff1598SRichard Henderson  */
14615ff1598SRichard Henderson 
14715ff1598SRichard Henderson void aesdec_ISB_ISR_IMC_AK_gen(AESState *ret, const AESState *st,
14815ff1598SRichard Henderson                                const AESState *rk);
14915ff1598SRichard Henderson void aesdec_ISB_ISR_IMC_AK_genrev(AESState *ret, const AESState *st,
15015ff1598SRichard Henderson                                   const AESState *rk);
15115ff1598SRichard Henderson 
aesdec_ISB_ISR_IMC_AK(AESState * r,const AESState * st,const AESState * rk,bool be)15215ff1598SRichard Henderson static inline void aesdec_ISB_ISR_IMC_AK(AESState *r, const AESState *st,
15315ff1598SRichard Henderson                                          const AESState *rk, bool be)
15415ff1598SRichard Henderson {
15515ff1598SRichard Henderson     if (HAVE_AES_ACCEL) {
15615ff1598SRichard Henderson         aesdec_ISB_ISR_IMC_AK_accel(r, st, rk, be);
15715ff1598SRichard Henderson     } else if (HOST_BIG_ENDIAN == be) {
15815ff1598SRichard Henderson         aesdec_ISB_ISR_IMC_AK_gen(r, st, rk);
15915ff1598SRichard Henderson     } else {
16015ff1598SRichard Henderson         aesdec_ISB_ISR_IMC_AK_genrev(r, st, rk);
16115ff1598SRichard Henderson     }
16215ff1598SRichard Henderson }
16315ff1598SRichard Henderson 
1646b0a96ceSRichard Henderson #endif /* CRYPTO_AES_ROUND_H */
165