xref: /qemu/include/crypto/aes-round.h (revision 192fa849)
16b0a96ceSRichard Henderson /*
26b0a96ceSRichard Henderson  * AES round fragments, generic version
36b0a96ceSRichard Henderson  * SPDX-License-Identifier: GPL-2.0-or-later
46b0a96ceSRichard Henderson  *
56b0a96ceSRichard Henderson  * Copyright (C) 2023 Linaro, Ltd.
66b0a96ceSRichard Henderson  */
76b0a96ceSRichard Henderson 
86b0a96ceSRichard Henderson #ifndef CRYPTO_AES_ROUND_H
96b0a96ceSRichard Henderson #define CRYPTO_AES_ROUND_H
106b0a96ceSRichard Henderson 
116b0a96ceSRichard Henderson /* Hosts with acceleration will usually need a 16-byte vector type. */
126b0a96ceSRichard Henderson typedef uint8_t AESStateVec __attribute__((vector_size(16)));
136b0a96ceSRichard Henderson 
146b0a96ceSRichard Henderson typedef union {
156b0a96ceSRichard Henderson     uint8_t b[16];
166b0a96ceSRichard Henderson     uint32_t w[4];
176b0a96ceSRichard Henderson     uint64_t d[2];
186b0a96ceSRichard Henderson     AESStateVec v;
196b0a96ceSRichard Henderson } AESState;
206b0a96ceSRichard Henderson 
216b0a96ceSRichard Henderson #include "host/crypto/aes-round.h"
226b0a96ceSRichard Henderson 
236b0a96ceSRichard Henderson /*
246b0a96ceSRichard Henderson  * Perform SubBytes + ShiftRows + AddRoundKey.
256b0a96ceSRichard Henderson  */
266b0a96ceSRichard Henderson 
276b0a96ceSRichard Henderson void aesenc_SB_SR_AK_gen(AESState *ret, const AESState *st,
286b0a96ceSRichard Henderson                          const AESState *rk);
296b0a96ceSRichard Henderson void aesenc_SB_SR_AK_genrev(AESState *ret, const AESState *st,
306b0a96ceSRichard Henderson                             const AESState *rk);
316b0a96ceSRichard Henderson 
326b0a96ceSRichard Henderson static inline void aesenc_SB_SR_AK(AESState *r, const AESState *st,
336b0a96ceSRichard Henderson                                    const AESState *rk, bool be)
346b0a96ceSRichard Henderson {
356b0a96ceSRichard Henderson     if (HAVE_AES_ACCEL) {
366b0a96ceSRichard Henderson         aesenc_SB_SR_AK_accel(r, st, rk, be);
376b0a96ceSRichard Henderson     } else if (HOST_BIG_ENDIAN == be) {
386b0a96ceSRichard Henderson         aesenc_SB_SR_AK_gen(r, st, rk);
396b0a96ceSRichard Henderson     } else {
406b0a96ceSRichard Henderson         aesenc_SB_SR_AK_genrev(r, st, rk);
416b0a96ceSRichard Henderson     }
426b0a96ceSRichard Henderson }
436b0a96ceSRichard Henderson 
44*192fa849SRichard Henderson /*
45*192fa849SRichard Henderson  * Perform InvSubBytes + InvShiftRows + AddRoundKey.
46*192fa849SRichard Henderson  */
47*192fa849SRichard Henderson 
48*192fa849SRichard Henderson void aesdec_ISB_ISR_AK_gen(AESState *ret, const AESState *st,
49*192fa849SRichard Henderson                            const AESState *rk);
50*192fa849SRichard Henderson void aesdec_ISB_ISR_AK_genrev(AESState *ret, const AESState *st,
51*192fa849SRichard Henderson                               const AESState *rk);
52*192fa849SRichard Henderson 
53*192fa849SRichard Henderson static inline void aesdec_ISB_ISR_AK(AESState *r, const AESState *st,
54*192fa849SRichard Henderson                                      const AESState *rk, bool be)
55*192fa849SRichard Henderson {
56*192fa849SRichard Henderson     if (HAVE_AES_ACCEL) {
57*192fa849SRichard Henderson         aesdec_ISB_ISR_AK_accel(r, st, rk, be);
58*192fa849SRichard Henderson     } else if (HOST_BIG_ENDIAN == be) {
59*192fa849SRichard Henderson         aesdec_ISB_ISR_AK_gen(r, st, rk);
60*192fa849SRichard Henderson     } else {
61*192fa849SRichard Henderson         aesdec_ISB_ISR_AK_genrev(r, st, rk);
62*192fa849SRichard Henderson     }
63*192fa849SRichard Henderson }
64*192fa849SRichard Henderson 
656b0a96ceSRichard Henderson #endif /* CRYPTO_AES_ROUND_H */
66