xref: /linux/include/crypto/ctr.h (revision d9ec772d)
12874c5fdSThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-or-later */
25311f248SHerbert Xu /*
35311f248SHerbert Xu  * CTR: Counter mode
45311f248SHerbert Xu  *
55311f248SHerbert Xu  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
65311f248SHerbert Xu  */
75311f248SHerbert Xu 
85311f248SHerbert Xu #ifndef _CRYPTO_CTR_H
95311f248SHerbert Xu #define _CRYPTO_CTR_H
105311f248SHerbert Xu 
11*d9ec772dSArd Biesheuvel #include <crypto/algapi.h>
12*d9ec772dSArd Biesheuvel #include <crypto/internal/skcipher.h>
13*d9ec772dSArd Biesheuvel #include <linux/string.h>
14*d9ec772dSArd Biesheuvel #include <linux/types.h>
15*d9ec772dSArd Biesheuvel 
165311f248SHerbert Xu #define CTR_RFC3686_NONCE_SIZE 4
175311f248SHerbert Xu #define CTR_RFC3686_IV_SIZE 8
185311f248SHerbert Xu #define CTR_RFC3686_BLOCK_SIZE 16
195311f248SHerbert Xu 
crypto_ctr_encrypt_walk(struct skcipher_request * req,void (* fn)(struct crypto_skcipher *,const u8 *,u8 *))20*d9ec772dSArd Biesheuvel static inline int crypto_ctr_encrypt_walk(struct skcipher_request *req,
21*d9ec772dSArd Biesheuvel 					  void (*fn)(struct crypto_skcipher *,
22*d9ec772dSArd Biesheuvel 						     const u8 *, u8 *))
23*d9ec772dSArd Biesheuvel {
24*d9ec772dSArd Biesheuvel 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
25*d9ec772dSArd Biesheuvel 	int blocksize = crypto_skcipher_chunksize(tfm);
26*d9ec772dSArd Biesheuvel 	u8 buf[MAX_CIPHER_BLOCKSIZE];
27*d9ec772dSArd Biesheuvel 	struct skcipher_walk walk;
28*d9ec772dSArd Biesheuvel 	int err;
29*d9ec772dSArd Biesheuvel 
30*d9ec772dSArd Biesheuvel 	/* avoid integer division due to variable blocksize parameter */
31*d9ec772dSArd Biesheuvel 	if (WARN_ON_ONCE(!is_power_of_2(blocksize)))
32*d9ec772dSArd Biesheuvel 		return -EINVAL;
33*d9ec772dSArd Biesheuvel 
34*d9ec772dSArd Biesheuvel 	err = skcipher_walk_virt(&walk, req, false);
35*d9ec772dSArd Biesheuvel 
36*d9ec772dSArd Biesheuvel 	while (walk.nbytes > 0) {
37*d9ec772dSArd Biesheuvel 		u8 *dst = walk.dst.virt.addr;
38*d9ec772dSArd Biesheuvel 		u8 *src = walk.src.virt.addr;
39*d9ec772dSArd Biesheuvel 		int nbytes = walk.nbytes;
40*d9ec772dSArd Biesheuvel 		int tail = 0;
41*d9ec772dSArd Biesheuvel 
42*d9ec772dSArd Biesheuvel 		if (nbytes < walk.total) {
43*d9ec772dSArd Biesheuvel 			tail = walk.nbytes & (blocksize - 1);
44*d9ec772dSArd Biesheuvel 			nbytes -= tail;
45*d9ec772dSArd Biesheuvel 		}
46*d9ec772dSArd Biesheuvel 
47*d9ec772dSArd Biesheuvel 		do {
48*d9ec772dSArd Biesheuvel 			int bsize = min(nbytes, blocksize);
49*d9ec772dSArd Biesheuvel 
50*d9ec772dSArd Biesheuvel 			fn(tfm, walk.iv, buf);
51*d9ec772dSArd Biesheuvel 
52*d9ec772dSArd Biesheuvel 			crypto_xor_cpy(dst, src, buf, bsize);
53*d9ec772dSArd Biesheuvel 			crypto_inc(walk.iv, blocksize);
54*d9ec772dSArd Biesheuvel 
55*d9ec772dSArd Biesheuvel 			dst += bsize;
56*d9ec772dSArd Biesheuvel 			src += bsize;
57*d9ec772dSArd Biesheuvel 			nbytes -= bsize;
58*d9ec772dSArd Biesheuvel 		} while (nbytes > 0);
59*d9ec772dSArd Biesheuvel 
60*d9ec772dSArd Biesheuvel 		err = skcipher_walk_done(&walk, tail);
61*d9ec772dSArd Biesheuvel 	}
62*d9ec772dSArd Biesheuvel 	return err;
63*d9ec772dSArd Biesheuvel }
64*d9ec772dSArd Biesheuvel 
655311f248SHerbert Xu #endif  /* _CRYPTO_CTR_H */
66