Home
last modified time | relevance | path

Searched refs:AES_BLOCK_SIZE (Results 1 – 25 of 114) sorted by relevance

12345

/linux/arch/arm64/crypto/
H A Daes-glue.c134 u8 dg[AES_BLOCK_SIZE];
465 u8 buf[AES_BLOCK_SIZE]; in xctr_encrypt()
510 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt()
712 .ivsize = AES_BLOCK_SIZE,
727 .ivsize = AES_BLOCK_SIZE,
743 .ivsize = AES_BLOCK_SIZE,
852 u8 key[AES_BLOCK_SIZE]; in xcbc_setkey()
900 in += AES_BLOCK_SIZE; in mac_do_update()
921 len %= AES_BLOCK_SIZE; in mac_update()
984 2 * AES_BLOCK_SIZE,
[all …]
H A Daes-neonbs-glue.c61 } __aligned(AES_BLOCK_SIZE);
226 dst += blocks * AES_BLOCK_SIZE; in ctr_encrypt()
227 src += blocks * AES_BLOCK_SIZE; in ctr_encrypt()
230 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt()
297 AES_BLOCK_SIZE) - 2; in __xts_crypt()
333 in += blocks * AES_BLOCK_SIZE; in __xts_crypt()
402 .walksize = 8 * AES_BLOCK_SIZE,
417 .ivsize = AES_BLOCK_SIZE,
431 .chunksize = AES_BLOCK_SIZE,
433 .ivsize = AES_BLOCK_SIZE,
[all …]
H A Daes-ce-ccm-glue.c106 u32 blocks = abytes / AES_BLOCK_SIZE; in ce_aes_ccm_auth_data()
142 u32 macp = AES_BLOCK_SIZE; in ccm_calculate_auth_mac()
184 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_encrypt()
185 u8 orig_iv[AES_BLOCK_SIZE]; in ccm_encrypt()
209 u8 buf[AES_BLOCK_SIZE]; in ccm_encrypt()
251 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_decrypt()
252 u8 orig_iv[AES_BLOCK_SIZE]; in ccm_decrypt()
276 u8 buf[AES_BLOCK_SIZE]; in ccm_decrypt()
324 .ivsize = AES_BLOCK_SIZE,
325 .chunksize = AES_BLOCK_SIZE,
[all …]
H A Dghash-ce-glue.c341 u8 buf[AES_BLOCK_SIZE]; in gcm_encrypt()
405 u8 otag[AES_BLOCK_SIZE]; in gcm_decrypt()
406 u8 buf[AES_BLOCK_SIZE]; in gcm_decrypt()
466 u8 iv[AES_BLOCK_SIZE]; in gcm_aes_encrypt()
474 u8 iv[AES_BLOCK_SIZE]; in gcm_aes_decrypt()
504 u8 iv[AES_BLOCK_SIZE]; in rfc4106_encrypt()
517 u8 iv[AES_BLOCK_SIZE]; in rfc4106_decrypt()
528 .chunksize = AES_BLOCK_SIZE,
529 .maxauthsize = AES_BLOCK_SIZE,
544 .chunksize = AES_BLOCK_SIZE,
[all …]
/linux/arch/riscv/crypto/
H A Daes-riscv64-glue.c27 const u8 in[AES_BLOCK_SIZE],
28 u8 out[AES_BLOCK_SIZE]);
31 u8 out[AES_BLOCK_SIZE]);
51 u8 iv[AES_BLOCK_SIZE]);
56 u8 tweak[AES_BLOCK_SIZE]);
61 u8 tweak[AES_BLOCK_SIZE]);
238 AES_BLOCK_SIZE); in riscv64_aes_cbc_cts_crypt()
498 .ivsize = AES_BLOCK_SIZE,
513 .ivsize = AES_BLOCK_SIZE,
532 .ivsize = AES_BLOCK_SIZE,
[all …]
/linux/arch/arm/crypto/
H A Daes-neonbs-glue.c49 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32] __aligned(AES_BLOCK_SIZE);
240 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt()
345 u8 buf[2 * AES_BLOCK_SIZE]; in __xts_crypt()
391 AES_BLOCK_SIZE, 0); in __xts_crypt()
405 AES_BLOCK_SIZE + tail, 1); in __xts_crypt()
447 .ivsize = AES_BLOCK_SIZE,
464 .chunksize = AES_BLOCK_SIZE,
466 .ivsize = AES_BLOCK_SIZE,
480 .chunksize = AES_BLOCK_SIZE,
482 .ivsize = AES_BLOCK_SIZE,
[all …]
H A Daes-ce-glue.c54 u8 b[AES_BLOCK_SIZE];
462 AES_BLOCK_SIZE) - 2; in xts_encrypt()
534 AES_BLOCK_SIZE) - 2; in xts_decrypt()
613 .ivsize = AES_BLOCK_SIZE,
628 .ivsize = AES_BLOCK_SIZE,
629 .walksize = 2 * AES_BLOCK_SIZE,
644 .ivsize = AES_BLOCK_SIZE,
645 .chunksize = AES_BLOCK_SIZE,
659 .ivsize = AES_BLOCK_SIZE,
660 .chunksize = AES_BLOCK_SIZE,
[all …]
H A Dghash-ce-glue.c495 u8 buf[AES_BLOCK_SIZE]; in gcm_encrypt()
524 src += nblocks * AES_BLOCK_SIZE; in gcm_encrypt()
525 dst += nblocks * AES_BLOCK_SIZE; in gcm_encrypt()
586 u8 otag[AES_BLOCK_SIZE]; in gcm_decrypt()
587 u8 buf[AES_BLOCK_SIZE]; in gcm_decrypt()
620 src += nblocks * AES_BLOCK_SIZE; in gcm_decrypt()
621 dst += nblocks * AES_BLOCK_SIZE; in gcm_decrypt()
721 .chunksize = AES_BLOCK_SIZE,
722 .maxauthsize = AES_BLOCK_SIZE,
736 .chunksize = AES_BLOCK_SIZE,
[all …]
/linux/lib/crypto/
H A Daescfb.c43 int len, const u8 iv[AES_BLOCK_SIZE]) in aescfb_encrypt() argument
45 u8 ks[AES_BLOCK_SIZE]; in aescfb_encrypt()
53 dst += AES_BLOCK_SIZE; in aescfb_encrypt()
54 src += AES_BLOCK_SIZE; in aescfb_encrypt()
55 len -= AES_BLOCK_SIZE; in aescfb_encrypt()
74 u8 ks[2][AES_BLOCK_SIZE]; in aescfb_decrypt()
79 if (len > AES_BLOCK_SIZE) in aescfb_decrypt()
89 dst += AES_BLOCK_SIZE; in aescfb_decrypt()
90 src += AES_BLOCK_SIZE; in aescfb_decrypt()
91 len -= AES_BLOCK_SIZE; in aescfb_decrypt()
[all …]
H A Daesgcm.c49 u8 kin[AES_BLOCK_SIZE] = {}; in aesgcm_expandkey()
93 u8 buf[AES_BLOCK_SIZE]; in aesgcm_mac()
111 u8 buf[AES_BLOCK_SIZE]; in aesgcm_crypt()
124 crypto_xor_cpy(dst, src, buf, min(len, AES_BLOCK_SIZE)); in aesgcm_crypt()
126 dst += AES_BLOCK_SIZE; in aesgcm_crypt()
127 src += AES_BLOCK_SIZE; in aesgcm_crypt()
128 len -= AES_BLOCK_SIZE; in aesgcm_crypt()
183 u8 tagbuf[AES_BLOCK_SIZE]; in aesgcm_decrypt()
697 u8 tagbuf[AES_BLOCK_SIZE]; in libaesgcm_init()
/linux/net/mac80211/
H A Dfils_aead.c28 u8 d[AES_BLOCK_SIZE], tmp[AES_BLOCK_SIZE] = {}; in aes_s2v()
41 crypto_xor(d, tmp, AES_BLOCK_SIZE); in aes_s2v()
46 if (len[i] >= AES_BLOCK_SIZE) { in aes_s2v()
51 AES_BLOCK_SIZE); in aes_s2v()
71 u8 v[AES_BLOCK_SIZE]; in aes_siv_encrypt()
106 memcpy(out, v, AES_BLOCK_SIZE); in aes_siv_encrypt()
155 u8 frame_iv[AES_BLOCK_SIZE], iv[AES_BLOCK_SIZE]; in aes_siv_decrypt()
156 u8 check[AES_BLOCK_SIZE]; in aes_siv_decrypt()
263 skb_put(skb, AES_BLOCK_SIZE); in fils_encrypt_assoc_req()
317 if (crypt_len < AES_BLOCK_SIZE) { in fils_decrypt_assoc_resp()
[all …]
/linux/drivers/crypto/nx/
H A Dnx-aes-xcbc.c23 u8 state[AES_BLOCK_SIZE];
25 u8 buffer[AES_BLOCK_SIZE];
64 u8 keys[2][AES_BLOCK_SIZE]; in nx_xcbc_empty()
114 len = AES_BLOCK_SIZE; in nx_xcbc_empty()
118 if (len != AES_BLOCK_SIZE) in nx_xcbc_empty()
189 if (total <= AES_BLOCK_SIZE) { in nx_xcbc_update()
201 data_len = AES_BLOCK_SIZE; in nx_xcbc_update()
225 leftover = AES_BLOCK_SIZE; in nx_xcbc_update()
259 AES_BLOCK_SIZE); in nx_xcbc_update()
332 len = AES_BLOCK_SIZE; in nx_xcbc_final()
[all …]
H A Dnx-aes-gcm.c112 if (nbytes <= AES_BLOCK_SIZE) { in nx_gca()
155 AES_BLOCK_SIZE); in nx_gca()
224 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE); in gmac()
226 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE); in gmac()
248 char out[AES_BLOCK_SIZE]; in gcm_empty()
266 len = AES_BLOCK_SIZE; in gcm_empty()
272 if (len != AES_BLOCK_SIZE) in gcm_empty()
378 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE); in gcm_aes_nx_crypt()
380 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE); in gcm_aes_nx_crypt()
482 .maxauthsize = AES_BLOCK_SIZE,
[all …]
/linux/arch/powerpc/crypto/
H A Daes-spe-glue.c327 u8 b[2][AES_BLOCK_SIZE]; in ppc_xts_encrypt()
330 if (req->cryptlen < AES_BLOCK_SIZE) in ppc_xts_encrypt()
365 u8 b[3][AES_BLOCK_SIZE]; in ppc_xts_decrypt()
369 if (req->cryptlen < AES_BLOCK_SIZE) in ppc_xts_decrypt()
388 AES_BLOCK_SIZE); in ppc_xts_decrypt()
418 .cra_blocksize = AES_BLOCK_SIZE,
438 .base.cra_blocksize = AES_BLOCK_SIZE,
455 .ivsize = AES_BLOCK_SIZE,
468 .ivsize = AES_BLOCK_SIZE,
472 .chunksize = AES_BLOCK_SIZE,
[all …]
H A Daes_ctr.c73 u8 keystream[AES_BLOCK_SIZE]; in p8_aes_ctr_final()
87 crypto_inc(ctrblk, AES_BLOCK_SIZE); in p8_aes_ctr_final()
107 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt()
113 nbytes / AES_BLOCK_SIZE, in p8_aes_ctr_crypt()
120 crypto_inc(walk.iv, AES_BLOCK_SIZE); in p8_aes_ctr_crypt()
121 } while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE); in p8_aes_ctr_crypt()
147 .ivsize = AES_BLOCK_SIZE,
148 .chunksize = AES_BLOCK_SIZE,
H A Daes_xts.c84 u8 tweak[AES_BLOCK_SIZE]; in p8_aes_xts_crypt()
87 if (req->cryptlen < AES_BLOCK_SIZE) in p8_aes_xts_crypt()
120 round_down(nbytes, AES_BLOCK_SIZE), in p8_aes_xts_crypt()
125 round_down(nbytes, AES_BLOCK_SIZE), in p8_aes_xts_crypt()
131 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in p8_aes_xts_crypt()
152 .base.cra_blocksize = AES_BLOCK_SIZE,
161 .ivsize = AES_BLOCK_SIZE,
/linux/arch/s390/crypto/
H A Daes_s390.c66 u8 buf[AES_BLOCK_SIZE];
326 u8 iv[AES_BLOCK_SIZE]; in cbc_aes_crypt()
372 .ivsize = AES_BLOCK_SIZE,
523 .ivsize = AES_BLOCK_SIZE,
558 memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE); in __ctrblk_init()
559 crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE); in __ctrblk_init()
560 ctrptr += AES_BLOCK_SIZE; in __ctrblk_init()
581 n = AES_BLOCK_SIZE; in ctr_aes_crypt()
590 AES_BLOCK_SIZE); in ctr_aes_crypt()
624 .ivsize = AES_BLOCK_SIZE,
[all …]
H A Dpaes_s390.c334 u8 iv[AES_BLOCK_SIZE]; in cbc_paes_crypt()
389 .ivsize = AES_BLOCK_SIZE,
569 .ivsize = AES_BLOCK_SIZE,
635 memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE); in __ctrblk_init()
636 crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE); in __ctrblk_init()
637 ctrptr += AES_BLOCK_SIZE; in __ctrblk_init()
665 n = AES_BLOCK_SIZE; in ctr_paes_crypt()
674 AES_BLOCK_SIZE); in ctr_paes_crypt()
699 buf, AES_BLOCK_SIZE, in ctr_paes_crypt()
728 .ivsize = AES_BLOCK_SIZE,
[all …]
/linux/drivers/crypto/ccp/
H A Dccp-crypto.h109 u8 k1[AES_BLOCK_SIZE];
110 u8 k2[AES_BLOCK_SIZE];
115 u8 iv[AES_BLOCK_SIZE];
118 u8 tag[AES_BLOCK_SIZE];
122 u8 rfc3686_iv[AES_BLOCK_SIZE];
142 u8 iv[AES_BLOCK_SIZE];
146 u8 buf[AES_BLOCK_SIZE];
150 u8 pad[AES_BLOCK_SIZE];
158 u8 iv[AES_BLOCK_SIZE];
161 u8 buf[AES_BLOCK_SIZE];
[all …]
/linux/arch/x86/crypto/
H A Daesni-intel_glue.c302 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt()
324 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt()
509 u8 keystream[AES_BLOCK_SIZE]; in ctr_crypt()
558 u8 keystream[AES_BLOCK_SIZE]; in xctr_crypt()
1004 u8 iv[AES_BLOCK_SIZE]) in aesni_xts_encrypt_iv() argument
1080 .ivsize = AES_BLOCK_SIZE,
1096 .ivsize = AES_BLOCK_SIZE,
1114 .ivsize = AES_BLOCK_SIZE,
1132 .ivsize = AES_BLOCK_SIZE,
1160 .ivsize = AES_BLOCK_SIZE,
[all …]
/linux/drivers/crypto/
H A Dpadlock-aes.c329 .cra_blocksize = AES_BLOCK_SIZE,
359 nbytes / AES_BLOCK_SIZE); in ecb_aes_encrypt()
360 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_encrypt()
384 nbytes / AES_BLOCK_SIZE); in ecb_aes_decrypt()
385 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_decrypt()
398 .base.cra_blocksize = AES_BLOCK_SIZE,
425 nbytes / AES_BLOCK_SIZE); in cbc_aes_encrypt()
427 nbytes &= AES_BLOCK_SIZE - 1; in cbc_aes_encrypt()
451 nbytes / AES_BLOCK_SIZE); in cbc_aes_decrypt()
452 nbytes &= AES_BLOCK_SIZE - 1; in cbc_aes_decrypt()
[all …]
/linux/arch/sparc/crypto/
H A Daes_glue.c284 round_down(nbytes, AES_BLOCK_SIZE), in cbc_encrypt()
310 round_down(nbytes, AES_BLOCK_SIZE), in cbc_decrypt()
329 keystream, AES_BLOCK_SIZE); in ctr_crypt_final()
331 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final()
350 round_down(nbytes, AES_BLOCK_SIZE), in ctr_crypt()
367 .cra_blocksize = AES_BLOCK_SIZE,
387 .base.cra_blocksize = AES_BLOCK_SIZE,
400 .base.cra_blocksize = AES_BLOCK_SIZE,
406 .ivsize = AES_BLOCK_SIZE,
420 .ivsize = AES_BLOCK_SIZE,
[all …]
/linux/drivers/crypto/ccree/
H A Dcc_aead.h17 #define CCM_CONFIG_BUF_SIZE (AES_BLOCK_SIZE * 3)
52 u8 ctr_iv[AES_BLOCK_SIZE] ____cacheline_aligned;
55 u8 gcm_iv_inc1[AES_BLOCK_SIZE] ____cacheline_aligned;
56 u8 gcm_iv_inc2[AES_BLOCK_SIZE] ____cacheline_aligned;
57 u8 hkey[AES_BLOCK_SIZE] ____cacheline_aligned;
/linux/drivers/crypto/intel/keembay/
H A Dkeembay-ocs-aes-core.c94 u8 last_ct_blk[AES_BLOCK_SIZE];
102 u8 in_tag[AES_BLOCK_SIZE];
105 u8 out_tag[AES_BLOCK_SIZE];
227 u8 tmp_buf1[AES_BLOCK_SIZE], tmp_buf2[AES_BLOCK_SIZE]; in sg_swap_blocks()
619 if (iv_size != AES_BLOCK_SIZE) in kmb_ocs_aead_validate_input()
1296 .base.ivsize = AES_BLOCK_SIZE,
1318 .base.ivsize = AES_BLOCK_SIZE,
1341 .base.ivsize = AES_BLOCK_SIZE,
1385 .base.ivsize = AES_BLOCK_SIZE,
1406 .base.ivsize = AES_BLOCK_SIZE,
[all …]
/linux/drivers/crypto/intel/qat/qat_common/
H A Dqat_algs.c481 memcpy(key_reverse + AES_BLOCK_SIZE, key - AES_BLOCK_SIZE, in qat_alg_xts_reverse_key()
482 AES_BLOCK_SIZE); in qat_alg_xts_reverse_key()
1282 .cra_blocksize = AES_BLOCK_SIZE,
1291 .ivsize = AES_BLOCK_SIZE,
1299 .cra_blocksize = AES_BLOCK_SIZE,
1308 .ivsize = AES_BLOCK_SIZE,
1316 .cra_blocksize = AES_BLOCK_SIZE,
1325 .ivsize = AES_BLOCK_SIZE,
1346 .ivsize = AES_BLOCK_SIZE,
1364 .ivsize = AES_BLOCK_SIZE,
[all …]

12345