xref: /linux/drivers/crypto/qce/common.c (revision 18daae5b)
197fb5e8dSThomas Gleixner // SPDX-License-Identifier: GPL-2.0-only
2ec8f5d8fSStanimir Varbanov /*
3ec8f5d8fSStanimir Varbanov  * Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
4ec8f5d8fSStanimir Varbanov  */
5ec8f5d8fSStanimir Varbanov 
6*18daae5bSHerbert Xu #include <crypto/internal/hash.h>
7ec8f5d8fSStanimir Varbanov #include <linux/err.h>
8ec8f5d8fSStanimir Varbanov #include <linux/interrupt.h>
9ec8f5d8fSStanimir Varbanov #include <linux/types.h>
10ec8f5d8fSStanimir Varbanov #include <crypto/scatterwalk.h>
11a24d22b2SEric Biggers #include <crypto/sha1.h>
12a24d22b2SEric Biggers #include <crypto/sha2.h>
13ec8f5d8fSStanimir Varbanov 
14ec8f5d8fSStanimir Varbanov #include "cipher.h"
15ec8f5d8fSStanimir Varbanov #include "common.h"
16ec8f5d8fSStanimir Varbanov #include "core.h"
17ec8f5d8fSStanimir Varbanov #include "regs-v5.h"
18ec8f5d8fSStanimir Varbanov #include "sha.h"
19db0018a8SThara Gopinath #include "aead.h"
20ec8f5d8fSStanimir Varbanov 
qce_read(struct qce_device * qce,u32 offset)21ec8f5d8fSStanimir Varbanov static inline u32 qce_read(struct qce_device *qce, u32 offset)
22ec8f5d8fSStanimir Varbanov {
23ec8f5d8fSStanimir Varbanov 	return readl(qce->base + offset);
24ec8f5d8fSStanimir Varbanov }
25ec8f5d8fSStanimir Varbanov 
qce_write(struct qce_device * qce,u32 offset,u32 val)26ec8f5d8fSStanimir Varbanov static inline void qce_write(struct qce_device *qce, u32 offset, u32 val)
27ec8f5d8fSStanimir Varbanov {
28ec8f5d8fSStanimir Varbanov 	writel(val, qce->base + offset);
29ec8f5d8fSStanimir Varbanov }
30ec8f5d8fSStanimir Varbanov 
qce_write_array(struct qce_device * qce,u32 offset,const u32 * val,unsigned int len)31ec8f5d8fSStanimir Varbanov static inline void qce_write_array(struct qce_device *qce, u32 offset,
32ec8f5d8fSStanimir Varbanov 				   const u32 *val, unsigned int len)
33ec8f5d8fSStanimir Varbanov {
34ec8f5d8fSStanimir Varbanov 	int i;
35ec8f5d8fSStanimir Varbanov 
36ec8f5d8fSStanimir Varbanov 	for (i = 0; i < len; i++)
37ec8f5d8fSStanimir Varbanov 		qce_write(qce, offset + i * sizeof(u32), val[i]);
38ec8f5d8fSStanimir Varbanov }
39ec8f5d8fSStanimir Varbanov 
40ec8f5d8fSStanimir Varbanov static inline void
qce_clear_array(struct qce_device * qce,u32 offset,unsigned int len)41ec8f5d8fSStanimir Varbanov qce_clear_array(struct qce_device *qce, u32 offset, unsigned int len)
42ec8f5d8fSStanimir Varbanov {
43ec8f5d8fSStanimir Varbanov 	int i;
44ec8f5d8fSStanimir Varbanov 
45ec8f5d8fSStanimir Varbanov 	for (i = 0; i < len; i++)
46ec8f5d8fSStanimir Varbanov 		qce_write(qce, offset + i * sizeof(u32), 0);
47ec8f5d8fSStanimir Varbanov }
48ec8f5d8fSStanimir Varbanov 
qce_config_reg(struct qce_device * qce,int little)4959e056cdSEneas U de Queiroz static u32 qce_config_reg(struct qce_device *qce, int little)
50ec8f5d8fSStanimir Varbanov {
5159e056cdSEneas U de Queiroz 	u32 beats = (qce->burst_size >> 3) - 1;
5259e056cdSEneas U de Queiroz 	u32 pipe_pair = qce->pipe_pair_id;
5359e056cdSEneas U de Queiroz 	u32 config;
54ec8f5d8fSStanimir Varbanov 
5559e056cdSEneas U de Queiroz 	config = (beats << REQ_SIZE_SHIFT) & REQ_SIZE_MASK;
5659e056cdSEneas U de Queiroz 	config |= BIT(MASK_DOUT_INTR_SHIFT) | BIT(MASK_DIN_INTR_SHIFT) |
5759e056cdSEneas U de Queiroz 		  BIT(MASK_OP_DONE_INTR_SHIFT) | BIT(MASK_ERR_INTR_SHIFT);
5859e056cdSEneas U de Queiroz 	config |= (pipe_pair << PIPE_SET_SELECT_SHIFT) & PIPE_SET_SELECT_MASK;
5959e056cdSEneas U de Queiroz 	config &= ~HIGH_SPD_EN_N_SHIFT;
6059e056cdSEneas U de Queiroz 
6159e056cdSEneas U de Queiroz 	if (little)
6259e056cdSEneas U de Queiroz 		config |= BIT(LITTLE_ENDIAN_MODE_SHIFT);
6359e056cdSEneas U de Queiroz 
6459e056cdSEneas U de Queiroz 	return config;
65ec8f5d8fSStanimir Varbanov }
66ec8f5d8fSStanimir Varbanov 
qce_cpu_to_be32p_array(__be32 * dst,const u8 * src,unsigned int len)6759e056cdSEneas U de Queiroz void qce_cpu_to_be32p_array(__be32 *dst, const u8 *src, unsigned int len)
6859e056cdSEneas U de Queiroz {
6959e056cdSEneas U de Queiroz 	__be32 *d = dst;
7059e056cdSEneas U de Queiroz 	const u8 *s = src;
7159e056cdSEneas U de Queiroz 	unsigned int n;
72ec8f5d8fSStanimir Varbanov 
7359e056cdSEneas U de Queiroz 	n = len / sizeof(u32);
7459e056cdSEneas U de Queiroz 	for (; n > 0; n--) {
7559e056cdSEneas U de Queiroz 		*d = cpu_to_be32p((const __u32 *) s);
7659e056cdSEneas U de Queiroz 		s += sizeof(__u32);
7759e056cdSEneas U de Queiroz 		d++;
7859e056cdSEneas U de Queiroz 	}
79ec8f5d8fSStanimir Varbanov }
80ec8f5d8fSStanimir Varbanov 
qce_setup_config(struct qce_device * qce)8159e056cdSEneas U de Queiroz static void qce_setup_config(struct qce_device *qce)
8259e056cdSEneas U de Queiroz {
8359e056cdSEneas U de Queiroz 	u32 config;
8459e056cdSEneas U de Queiroz 
8559e056cdSEneas U de Queiroz 	/* get big endianness */
8659e056cdSEneas U de Queiroz 	config = qce_config_reg(qce, 0);
8759e056cdSEneas U de Queiroz 
8859e056cdSEneas U de Queiroz 	/* clear status */
8959e056cdSEneas U de Queiroz 	qce_write(qce, REG_STATUS, 0);
9059e056cdSEneas U de Queiroz 	qce_write(qce, REG_CONFIG, config);
91ec8f5d8fSStanimir Varbanov }
92ec8f5d8fSStanimir Varbanov 
qce_crypto_go(struct qce_device * qce,bool result_dump)936c34e446SThara Gopinath static inline void qce_crypto_go(struct qce_device *qce, bool result_dump)
9459e056cdSEneas U de Queiroz {
956c34e446SThara Gopinath 	if (result_dump)
9659e056cdSEneas U de Queiroz 		qce_write(qce, REG_GOPROC, BIT(GO_SHIFT) | BIT(RESULTS_DUMP_SHIFT));
976c34e446SThara Gopinath 	else
986c34e446SThara Gopinath 		qce_write(qce, REG_GOPROC, BIT(GO_SHIFT));
9959e056cdSEneas U de Queiroz }
10059e056cdSEneas U de Queiroz 
101db0018a8SThara Gopinath #if defined(CONFIG_CRYPTO_DEV_QCE_SHA) || defined(CONFIG_CRYPTO_DEV_QCE_AEAD)
qce_auth_cfg(unsigned long flags,u32 key_size,u32 auth_size)102e5d6181dSThara Gopinath static u32 qce_auth_cfg(unsigned long flags, u32 key_size, u32 auth_size)
103ec8f5d8fSStanimir Varbanov {
104ec8f5d8fSStanimir Varbanov 	u32 cfg = 0;
105ec8f5d8fSStanimir Varbanov 
106e5d6181dSThara Gopinath 	if (IS_CCM(flags) || IS_CMAC(flags))
107ec8f5d8fSStanimir Varbanov 		cfg |= AUTH_ALG_AES << AUTH_ALG_SHIFT;
108ec8f5d8fSStanimir Varbanov 	else
109ec8f5d8fSStanimir Varbanov 		cfg |= AUTH_ALG_SHA << AUTH_ALG_SHIFT;
110ec8f5d8fSStanimir Varbanov 
111ec8f5d8fSStanimir Varbanov 	if (IS_CCM(flags) || IS_CMAC(flags)) {
112ec8f5d8fSStanimir Varbanov 		if (key_size == AES_KEYSIZE_128)
113ec8f5d8fSStanimir Varbanov 			cfg |= AUTH_KEY_SZ_AES128 << AUTH_KEY_SIZE_SHIFT;
114ec8f5d8fSStanimir Varbanov 		else if (key_size == AES_KEYSIZE_256)
115ec8f5d8fSStanimir Varbanov 			cfg |= AUTH_KEY_SZ_AES256 << AUTH_KEY_SIZE_SHIFT;
116ec8f5d8fSStanimir Varbanov 	}
117ec8f5d8fSStanimir Varbanov 
118ec8f5d8fSStanimir Varbanov 	if (IS_SHA1(flags) || IS_SHA1_HMAC(flags))
119ec8f5d8fSStanimir Varbanov 		cfg |= AUTH_SIZE_SHA1 << AUTH_SIZE_SHIFT;
120ec8f5d8fSStanimir Varbanov 	else if (IS_SHA256(flags) || IS_SHA256_HMAC(flags))
121ec8f5d8fSStanimir Varbanov 		cfg |= AUTH_SIZE_SHA256 << AUTH_SIZE_SHIFT;
122ec8f5d8fSStanimir Varbanov 	else if (IS_CMAC(flags))
123ec8f5d8fSStanimir Varbanov 		cfg |= AUTH_SIZE_ENUM_16_BYTES << AUTH_SIZE_SHIFT;
124e5d6181dSThara Gopinath 	else if (IS_CCM(flags))
125e5d6181dSThara Gopinath 		cfg |= (auth_size - 1) << AUTH_SIZE_SHIFT;
126ec8f5d8fSStanimir Varbanov 
127ec8f5d8fSStanimir Varbanov 	if (IS_SHA1(flags) || IS_SHA256(flags))
128ec8f5d8fSStanimir Varbanov 		cfg |= AUTH_MODE_HASH << AUTH_MODE_SHIFT;
129e5d6181dSThara Gopinath 	else if (IS_SHA1_HMAC(flags) || IS_SHA256_HMAC(flags))
130ec8f5d8fSStanimir Varbanov 		cfg |= AUTH_MODE_HMAC << AUTH_MODE_SHIFT;
131e5d6181dSThara Gopinath 	else if (IS_CCM(flags))
132ec8f5d8fSStanimir Varbanov 		cfg |= AUTH_MODE_CCM << AUTH_MODE_SHIFT;
133e5d6181dSThara Gopinath 	else if (IS_CMAC(flags))
134ec8f5d8fSStanimir Varbanov 		cfg |= AUTH_MODE_CMAC << AUTH_MODE_SHIFT;
135ec8f5d8fSStanimir Varbanov 
136ec8f5d8fSStanimir Varbanov 	if (IS_SHA(flags) || IS_SHA_HMAC(flags))
137ec8f5d8fSStanimir Varbanov 		cfg |= AUTH_POS_BEFORE << AUTH_POS_SHIFT;
138ec8f5d8fSStanimir Varbanov 
139ec8f5d8fSStanimir Varbanov 	if (IS_CCM(flags))
140ec8f5d8fSStanimir Varbanov 		cfg |= QCE_MAX_NONCE_WORDS << AUTH_NONCE_NUM_WORDS_SHIFT;
141ec8f5d8fSStanimir Varbanov 
142ec8f5d8fSStanimir Varbanov 	return cfg;
143ec8f5d8fSStanimir Varbanov }
144db0018a8SThara Gopinath #endif
145ec8f5d8fSStanimir Varbanov 
146db0018a8SThara Gopinath #ifdef CONFIG_CRYPTO_DEV_QCE_SHA
qce_setup_regs_ahash(struct crypto_async_request * async_req)1474139fd58SThara Gopinath static int qce_setup_regs_ahash(struct crypto_async_request *async_req)
148ec8f5d8fSStanimir Varbanov {
149ec8f5d8fSStanimir Varbanov 	struct ahash_request *req = ahash_request_cast(async_req);
150ec8f5d8fSStanimir Varbanov 	struct crypto_ahash *ahash = __crypto_ahash_cast(async_req->tfm);
151*18daae5bSHerbert Xu 	struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
152ec8f5d8fSStanimir Varbanov 	struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm);
153ec8f5d8fSStanimir Varbanov 	struct qce_device *qce = tmpl->qce;
154ec8f5d8fSStanimir Varbanov 	unsigned int digestsize = crypto_ahash_digestsize(ahash);
155ec8f5d8fSStanimir Varbanov 	unsigned int blocksize = crypto_tfm_alg_blocksize(async_req->tfm);
156ec8f5d8fSStanimir Varbanov 	__be32 auth[SHA256_DIGEST_SIZE / sizeof(__be32)] = {0};
157ec8f5d8fSStanimir Varbanov 	__be32 mackey[QCE_SHA_HMAC_KEY_SIZE / sizeof(__be32)] = {0};
158ec8f5d8fSStanimir Varbanov 	u32 auth_cfg = 0, config;
159ec8f5d8fSStanimir Varbanov 	unsigned int iv_words;
160ec8f5d8fSStanimir Varbanov 
161ec8f5d8fSStanimir Varbanov 	/* if not the last, the size has to be on the block boundary */
162ec8f5d8fSStanimir Varbanov 	if (!rctx->last_blk && req->nbytes % blocksize)
163ec8f5d8fSStanimir Varbanov 		return -EINVAL;
164ec8f5d8fSStanimir Varbanov 
165ec8f5d8fSStanimir Varbanov 	qce_setup_config(qce);
166ec8f5d8fSStanimir Varbanov 
167ec8f5d8fSStanimir Varbanov 	if (IS_CMAC(rctx->flags)) {
168ec8f5d8fSStanimir Varbanov 		qce_write(qce, REG_AUTH_SEG_CFG, 0);
169ec8f5d8fSStanimir Varbanov 		qce_write(qce, REG_ENCR_SEG_CFG, 0);
170ec8f5d8fSStanimir Varbanov 		qce_write(qce, REG_ENCR_SEG_SIZE, 0);
171ec8f5d8fSStanimir Varbanov 		qce_clear_array(qce, REG_AUTH_IV0, 16);
172ec8f5d8fSStanimir Varbanov 		qce_clear_array(qce, REG_AUTH_KEY0, 16);
173ec8f5d8fSStanimir Varbanov 		qce_clear_array(qce, REG_AUTH_BYTECNT0, 4);
174ec8f5d8fSStanimir Varbanov 
175e5d6181dSThara Gopinath 		auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen, digestsize);
176ec8f5d8fSStanimir Varbanov 	}
177ec8f5d8fSStanimir Varbanov 
178ec8f5d8fSStanimir Varbanov 	if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) {
179ec8f5d8fSStanimir Varbanov 		u32 authkey_words = rctx->authklen / sizeof(u32);
180ec8f5d8fSStanimir Varbanov 
181ec8f5d8fSStanimir Varbanov 		qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx->authklen);
18258a6535fSStanimir Varbanov 		qce_write_array(qce, REG_AUTH_KEY0, (u32 *)mackey,
18358a6535fSStanimir Varbanov 				authkey_words);
184ec8f5d8fSStanimir Varbanov 	}
185ec8f5d8fSStanimir Varbanov 
186ec8f5d8fSStanimir Varbanov 	if (IS_CMAC(rctx->flags))
187ec8f5d8fSStanimir Varbanov 		goto go_proc;
188ec8f5d8fSStanimir Varbanov 
189ec8f5d8fSStanimir Varbanov 	if (rctx->first_blk)
190ec8f5d8fSStanimir Varbanov 		memcpy(auth, rctx->digest, digestsize);
191ec8f5d8fSStanimir Varbanov 	else
192ec8f5d8fSStanimir Varbanov 		qce_cpu_to_be32p_array(auth, rctx->digest, digestsize);
193ec8f5d8fSStanimir Varbanov 
194ec8f5d8fSStanimir Varbanov 	iv_words = (IS_SHA1(rctx->flags) || IS_SHA1_HMAC(rctx->flags)) ? 5 : 8;
19558a6535fSStanimir Varbanov 	qce_write_array(qce, REG_AUTH_IV0, (u32 *)auth, iv_words);
196ec8f5d8fSStanimir Varbanov 
197ec8f5d8fSStanimir Varbanov 	if (rctx->first_blk)
198ec8f5d8fSStanimir Varbanov 		qce_clear_array(qce, REG_AUTH_BYTECNT0, 4);
199ec8f5d8fSStanimir Varbanov 	else
20058a6535fSStanimir Varbanov 		qce_write_array(qce, REG_AUTH_BYTECNT0,
20158a6535fSStanimir Varbanov 				(u32 *)rctx->byte_count, 2);
202ec8f5d8fSStanimir Varbanov 
203e5d6181dSThara Gopinath 	auth_cfg = qce_auth_cfg(rctx->flags, 0, digestsize);
204ec8f5d8fSStanimir Varbanov 
205ec8f5d8fSStanimir Varbanov 	if (rctx->last_blk)
206ec8f5d8fSStanimir Varbanov 		auth_cfg |= BIT(AUTH_LAST_SHIFT);
207ec8f5d8fSStanimir Varbanov 	else
208ec8f5d8fSStanimir Varbanov 		auth_cfg &= ~BIT(AUTH_LAST_SHIFT);
209ec8f5d8fSStanimir Varbanov 
210ec8f5d8fSStanimir Varbanov 	if (rctx->first_blk)
211ec8f5d8fSStanimir Varbanov 		auth_cfg |= BIT(AUTH_FIRST_SHIFT);
212ec8f5d8fSStanimir Varbanov 	else
213ec8f5d8fSStanimir Varbanov 		auth_cfg &= ~BIT(AUTH_FIRST_SHIFT);
214ec8f5d8fSStanimir Varbanov 
215ec8f5d8fSStanimir Varbanov go_proc:
216ec8f5d8fSStanimir Varbanov 	qce_write(qce, REG_AUTH_SEG_CFG, auth_cfg);
217ec8f5d8fSStanimir Varbanov 	qce_write(qce, REG_AUTH_SEG_SIZE, req->nbytes);
218ec8f5d8fSStanimir Varbanov 	qce_write(qce, REG_AUTH_SEG_START, 0);
219ec8f5d8fSStanimir Varbanov 	qce_write(qce, REG_ENCR_SEG_CFG, 0);
220ec8f5d8fSStanimir Varbanov 	qce_write(qce, REG_SEG_SIZE, req->nbytes);
221ec8f5d8fSStanimir Varbanov 
222ec8f5d8fSStanimir Varbanov 	/* get little endianness */
223ec8f5d8fSStanimir Varbanov 	config = qce_config_reg(qce, 1);
224ec8f5d8fSStanimir Varbanov 	qce_write(qce, REG_CONFIG, config);
225ec8f5d8fSStanimir Varbanov 
2266c34e446SThara Gopinath 	qce_crypto_go(qce, true);
227ec8f5d8fSStanimir Varbanov 
228ec8f5d8fSStanimir Varbanov 	return 0;
229ec8f5d8fSStanimir Varbanov }
23059e056cdSEneas U de Queiroz #endif
23159e056cdSEneas U de Queiroz 
232db0018a8SThara Gopinath #if defined(CONFIG_CRYPTO_DEV_QCE_SKCIPHER) || defined(CONFIG_CRYPTO_DEV_QCE_AEAD)
qce_encr_cfg(unsigned long flags,u32 aes_key_size)23359e056cdSEneas U de Queiroz static u32 qce_encr_cfg(unsigned long flags, u32 aes_key_size)
23459e056cdSEneas U de Queiroz {
23559e056cdSEneas U de Queiroz 	u32 cfg = 0;
23659e056cdSEneas U de Queiroz 
23759e056cdSEneas U de Queiroz 	if (IS_AES(flags)) {
23859e056cdSEneas U de Queiroz 		if (aes_key_size == AES_KEYSIZE_128)
23959e056cdSEneas U de Queiroz 			cfg |= ENCR_KEY_SZ_AES128 << ENCR_KEY_SZ_SHIFT;
24059e056cdSEneas U de Queiroz 		else if (aes_key_size == AES_KEYSIZE_256)
24159e056cdSEneas U de Queiroz 			cfg |= ENCR_KEY_SZ_AES256 << ENCR_KEY_SZ_SHIFT;
24259e056cdSEneas U de Queiroz 	}
24359e056cdSEneas U de Queiroz 
24459e056cdSEneas U de Queiroz 	if (IS_AES(flags))
24559e056cdSEneas U de Queiroz 		cfg |= ENCR_ALG_AES << ENCR_ALG_SHIFT;
24659e056cdSEneas U de Queiroz 	else if (IS_DES(flags) || IS_3DES(flags))
24759e056cdSEneas U de Queiroz 		cfg |= ENCR_ALG_DES << ENCR_ALG_SHIFT;
24859e056cdSEneas U de Queiroz 
24959e056cdSEneas U de Queiroz 	if (IS_DES(flags))
25059e056cdSEneas U de Queiroz 		cfg |= ENCR_KEY_SZ_DES << ENCR_KEY_SZ_SHIFT;
25159e056cdSEneas U de Queiroz 
25259e056cdSEneas U de Queiroz 	if (IS_3DES(flags))
25359e056cdSEneas U de Queiroz 		cfg |= ENCR_KEY_SZ_3DES << ENCR_KEY_SZ_SHIFT;
25459e056cdSEneas U de Queiroz 
25559e056cdSEneas U de Queiroz 	switch (flags & QCE_MODE_MASK) {
25659e056cdSEneas U de Queiroz 	case QCE_MODE_ECB:
25759e056cdSEneas U de Queiroz 		cfg |= ENCR_MODE_ECB << ENCR_MODE_SHIFT;
25859e056cdSEneas U de Queiroz 		break;
25959e056cdSEneas U de Queiroz 	case QCE_MODE_CBC:
26059e056cdSEneas U de Queiroz 		cfg |= ENCR_MODE_CBC << ENCR_MODE_SHIFT;
26159e056cdSEneas U de Queiroz 		break;
26259e056cdSEneas U de Queiroz 	case QCE_MODE_CTR:
26359e056cdSEneas U de Queiroz 		cfg |= ENCR_MODE_CTR << ENCR_MODE_SHIFT;
26459e056cdSEneas U de Queiroz 		break;
26559e056cdSEneas U de Queiroz 	case QCE_MODE_XTS:
26659e056cdSEneas U de Queiroz 		cfg |= ENCR_MODE_XTS << ENCR_MODE_SHIFT;
26759e056cdSEneas U de Queiroz 		break;
26859e056cdSEneas U de Queiroz 	case QCE_MODE_CCM:
26959e056cdSEneas U de Queiroz 		cfg |= ENCR_MODE_CCM << ENCR_MODE_SHIFT;
27059e056cdSEneas U de Queiroz 		cfg |= LAST_CCM_XFR << LAST_CCM_SHIFT;
27159e056cdSEneas U de Queiroz 		break;
27259e056cdSEneas U de Queiroz 	default:
27359e056cdSEneas U de Queiroz 		return ~0;
27459e056cdSEneas U de Queiroz 	}
27559e056cdSEneas U de Queiroz 
27659e056cdSEneas U de Queiroz 	return cfg;
27759e056cdSEneas U de Queiroz }
278db0018a8SThara Gopinath #endif
27959e056cdSEneas U de Queiroz 
280db0018a8SThara Gopinath #ifdef CONFIG_CRYPTO_DEV_QCE_SKCIPHER
qce_xts_swapiv(__be32 * dst,const u8 * src,unsigned int ivsize)28159e056cdSEneas U de Queiroz static void qce_xts_swapiv(__be32 *dst, const u8 *src, unsigned int ivsize)
28259e056cdSEneas U de Queiroz {
28359e056cdSEneas U de Queiroz 	u8 swap[QCE_AES_IV_LENGTH];
28459e056cdSEneas U de Queiroz 	u32 i, j;
28559e056cdSEneas U de Queiroz 
28659e056cdSEneas U de Queiroz 	if (ivsize > QCE_AES_IV_LENGTH)
28759e056cdSEneas U de Queiroz 		return;
28859e056cdSEneas U de Queiroz 
28959e056cdSEneas U de Queiroz 	memset(swap, 0, QCE_AES_IV_LENGTH);
29059e056cdSEneas U de Queiroz 
29159e056cdSEneas U de Queiroz 	for (i = (QCE_AES_IV_LENGTH - ivsize), j = ivsize - 1;
29259e056cdSEneas U de Queiroz 	     i < QCE_AES_IV_LENGTH; i++, j--)
29359e056cdSEneas U de Queiroz 		swap[i] = src[j];
29459e056cdSEneas U de Queiroz 
29559e056cdSEneas U de Queiroz 	qce_cpu_to_be32p_array(dst, swap, QCE_AES_IV_LENGTH);
29659e056cdSEneas U de Queiroz }
29759e056cdSEneas U de Queiroz 
qce_xtskey(struct qce_device * qce,const u8 * enckey,unsigned int enckeylen,unsigned int cryptlen)29859e056cdSEneas U de Queiroz static void qce_xtskey(struct qce_device *qce, const u8 *enckey,
29959e056cdSEneas U de Queiroz 		       unsigned int enckeylen, unsigned int cryptlen)
30059e056cdSEneas U de Queiroz {
30159e056cdSEneas U de Queiroz 	u32 xtskey[QCE_MAX_CIPHER_KEY_SIZE / sizeof(u32)] = {0};
30259e056cdSEneas U de Queiroz 	unsigned int xtsklen = enckeylen / (2 * sizeof(u32));
30359e056cdSEneas U de Queiroz 
30459e056cdSEneas U de Queiroz 	qce_cpu_to_be32p_array((__be32 *)xtskey, enckey + enckeylen / 2,
30559e056cdSEneas U de Queiroz 			       enckeylen / 2);
30659e056cdSEneas U de Queiroz 	qce_write_array(qce, REG_ENCR_XTS_KEY0, xtskey, xtsklen);
30759e056cdSEneas U de Queiroz 
30824cbcc94SThara Gopinath 	/* Set data unit size to cryptlen. Anything else causes
30924cbcc94SThara Gopinath 	 * crypto engine to return back incorrect results.
31024cbcc94SThara Gopinath 	 */
31124cbcc94SThara Gopinath 	qce_write(qce, REG_ENCR_XTS_DU_SIZE, cryptlen);
31259e056cdSEneas U de Queiroz }
313ec8f5d8fSStanimir Varbanov 
qce_setup_regs_skcipher(struct crypto_async_request * async_req)3144139fd58SThara Gopinath static int qce_setup_regs_skcipher(struct crypto_async_request *async_req)
315ec8f5d8fSStanimir Varbanov {
3168bf08715SArd Biesheuvel 	struct skcipher_request *req = skcipher_request_cast(async_req);
3178bf08715SArd Biesheuvel 	struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req);
318ec8f5d8fSStanimir Varbanov 	struct qce_cipher_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
3198bf08715SArd Biesheuvel 	struct qce_alg_template *tmpl = to_cipher_tmpl(crypto_skcipher_reqtfm(req));
320ec8f5d8fSStanimir Varbanov 	struct qce_device *qce = tmpl->qce;
321ec8f5d8fSStanimir Varbanov 	__be32 enckey[QCE_MAX_CIPHER_KEY_SIZE / sizeof(__be32)] = {0};
322ec8f5d8fSStanimir Varbanov 	__be32 enciv[QCE_MAX_IV_SIZE / sizeof(__be32)] = {0};
323ec8f5d8fSStanimir Varbanov 	unsigned int enckey_words, enciv_words;
324ec8f5d8fSStanimir Varbanov 	unsigned int keylen;
325ec8f5d8fSStanimir Varbanov 	u32 encr_cfg = 0, auth_cfg = 0, config;
326ec8f5d8fSStanimir Varbanov 	unsigned int ivsize = rctx->ivsize;
327ec8f5d8fSStanimir Varbanov 	unsigned long flags = rctx->flags;
328ec8f5d8fSStanimir Varbanov 
329ec8f5d8fSStanimir Varbanov 	qce_setup_config(qce);
330ec8f5d8fSStanimir Varbanov 
331ec8f5d8fSStanimir Varbanov 	if (IS_XTS(flags))
332ec8f5d8fSStanimir Varbanov 		keylen = ctx->enc_keylen / 2;
333ec8f5d8fSStanimir Varbanov 	else
334ec8f5d8fSStanimir Varbanov 		keylen = ctx->enc_keylen;
335ec8f5d8fSStanimir Varbanov 
336ec8f5d8fSStanimir Varbanov 	qce_cpu_to_be32p_array(enckey, ctx->enc_key, keylen);
337ec8f5d8fSStanimir Varbanov 	enckey_words = keylen / sizeof(u32);
338ec8f5d8fSStanimir Varbanov 
339ec8f5d8fSStanimir Varbanov 	qce_write(qce, REG_AUTH_SEG_CFG, auth_cfg);
340ec8f5d8fSStanimir Varbanov 
341ec8f5d8fSStanimir Varbanov 	encr_cfg = qce_encr_cfg(flags, keylen);
342ec8f5d8fSStanimir Varbanov 
343ec8f5d8fSStanimir Varbanov 	if (IS_DES(flags)) {
344ec8f5d8fSStanimir Varbanov 		enciv_words = 2;
345ec8f5d8fSStanimir Varbanov 		enckey_words = 2;
346ec8f5d8fSStanimir Varbanov 	} else if (IS_3DES(flags)) {
347ec8f5d8fSStanimir Varbanov 		enciv_words = 2;
348ec8f5d8fSStanimir Varbanov 		enckey_words = 6;
349ec8f5d8fSStanimir Varbanov 	} else if (IS_AES(flags)) {
350ec8f5d8fSStanimir Varbanov 		if (IS_XTS(flags))
351ec8f5d8fSStanimir Varbanov 			qce_xtskey(qce, ctx->enc_key, ctx->enc_keylen,
352ec8f5d8fSStanimir Varbanov 				   rctx->cryptlen);
353ec8f5d8fSStanimir Varbanov 		enciv_words = 4;
354ec8f5d8fSStanimir Varbanov 	} else {
355ec8f5d8fSStanimir Varbanov 		return -EINVAL;
356ec8f5d8fSStanimir Varbanov 	}
357ec8f5d8fSStanimir Varbanov 
35858a6535fSStanimir Varbanov 	qce_write_array(qce, REG_ENCR_KEY0, (u32 *)enckey, enckey_words);
359ec8f5d8fSStanimir Varbanov 
360ec8f5d8fSStanimir Varbanov 	if (!IS_ECB(flags)) {
361ec8f5d8fSStanimir Varbanov 		if (IS_XTS(flags))
362ec8f5d8fSStanimir Varbanov 			qce_xts_swapiv(enciv, rctx->iv, ivsize);
363ec8f5d8fSStanimir Varbanov 		else
364ec8f5d8fSStanimir Varbanov 			qce_cpu_to_be32p_array(enciv, rctx->iv, ivsize);
365ec8f5d8fSStanimir Varbanov 
36658a6535fSStanimir Varbanov 		qce_write_array(qce, REG_CNTR0_IV0, (u32 *)enciv, enciv_words);
367ec8f5d8fSStanimir Varbanov 	}
368ec8f5d8fSStanimir Varbanov 
369ec8f5d8fSStanimir Varbanov 	if (IS_ENCRYPT(flags))
370ec8f5d8fSStanimir Varbanov 		encr_cfg |= BIT(ENCODE_SHIFT);
371ec8f5d8fSStanimir Varbanov 
372ec8f5d8fSStanimir Varbanov 	qce_write(qce, REG_ENCR_SEG_CFG, encr_cfg);
373ec8f5d8fSStanimir Varbanov 	qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen);
3744139fd58SThara Gopinath 	qce_write(qce, REG_ENCR_SEG_START, 0);
375ec8f5d8fSStanimir Varbanov 
376ec8f5d8fSStanimir Varbanov 	if (IS_CTR(flags)) {
377ec8f5d8fSStanimir Varbanov 		qce_write(qce, REG_CNTR_MASK, ~0);
378ec8f5d8fSStanimir Varbanov 		qce_write(qce, REG_CNTR_MASK0, ~0);
379ec8f5d8fSStanimir Varbanov 		qce_write(qce, REG_CNTR_MASK1, ~0);
380ec8f5d8fSStanimir Varbanov 		qce_write(qce, REG_CNTR_MASK2, ~0);
381ec8f5d8fSStanimir Varbanov 	}
382ec8f5d8fSStanimir Varbanov 
3834139fd58SThara Gopinath 	qce_write(qce, REG_SEG_SIZE, rctx->cryptlen);
384ec8f5d8fSStanimir Varbanov 
385ec8f5d8fSStanimir Varbanov 	/* get little endianness */
386ec8f5d8fSStanimir Varbanov 	config = qce_config_reg(qce, 1);
387ec8f5d8fSStanimir Varbanov 	qce_write(qce, REG_CONFIG, config);
388ec8f5d8fSStanimir Varbanov 
3896c34e446SThara Gopinath 	qce_crypto_go(qce, true);
390ec8f5d8fSStanimir Varbanov 
391ec8f5d8fSStanimir Varbanov 	return 0;
392ec8f5d8fSStanimir Varbanov }
39359e056cdSEneas U de Queiroz #endif
394ec8f5d8fSStanimir Varbanov 
395db0018a8SThara Gopinath #ifdef CONFIG_CRYPTO_DEV_QCE_AEAD
396db0018a8SThara Gopinath static const u32 std_iv_sha1[SHA256_DIGEST_SIZE / sizeof(u32)] = {
397db0018a8SThara Gopinath 	SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4, 0, 0, 0
398db0018a8SThara Gopinath };
399db0018a8SThara Gopinath 
400db0018a8SThara Gopinath static const u32 std_iv_sha256[SHA256_DIGEST_SIZE / sizeof(u32)] = {
401db0018a8SThara Gopinath 	SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3,
402db0018a8SThara Gopinath 	SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7
403db0018a8SThara Gopinath };
404db0018a8SThara Gopinath 
qce_be32_to_cpu_array(u32 * dst,const u8 * src,unsigned int len)405db0018a8SThara Gopinath static unsigned int qce_be32_to_cpu_array(u32 *dst, const u8 *src, unsigned int len)
406db0018a8SThara Gopinath {
407db0018a8SThara Gopinath 	u32 *d = dst;
408db0018a8SThara Gopinath 	const u8 *s = src;
409db0018a8SThara Gopinath 	unsigned int n;
410db0018a8SThara Gopinath 
411db0018a8SThara Gopinath 	n = len / sizeof(u32);
412db0018a8SThara Gopinath 	for (; n > 0; n--) {
413db0018a8SThara Gopinath 		*d = be32_to_cpup((const __be32 *)s);
414db0018a8SThara Gopinath 		s += sizeof(u32);
415db0018a8SThara Gopinath 		d++;
416db0018a8SThara Gopinath 	}
417db0018a8SThara Gopinath 	return DIV_ROUND_UP(len, sizeof(u32));
418db0018a8SThara Gopinath }
419db0018a8SThara Gopinath 
qce_setup_regs_aead(struct crypto_async_request * async_req)420db0018a8SThara Gopinath static int qce_setup_regs_aead(struct crypto_async_request *async_req)
421db0018a8SThara Gopinath {
422db0018a8SThara Gopinath 	struct aead_request *req = aead_request_cast(async_req);
423*18daae5bSHerbert Xu 	struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
424db0018a8SThara Gopinath 	struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
425db0018a8SThara Gopinath 	struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
426db0018a8SThara Gopinath 	struct qce_device *qce = tmpl->qce;
427db0018a8SThara Gopinath 	u32 enckey[QCE_MAX_CIPHER_KEY_SIZE / sizeof(u32)] = {0};
428db0018a8SThara Gopinath 	u32 enciv[QCE_MAX_IV_SIZE / sizeof(u32)] = {0};
429db0018a8SThara Gopinath 	u32 authkey[QCE_SHA_HMAC_KEY_SIZE / sizeof(u32)] = {0};
430db0018a8SThara Gopinath 	u32 authiv[SHA256_DIGEST_SIZE / sizeof(u32)] = {0};
431db0018a8SThara Gopinath 	u32 authnonce[QCE_MAX_NONCE / sizeof(u32)] = {0};
432db0018a8SThara Gopinath 	unsigned int enc_keylen = ctx->enc_keylen;
433db0018a8SThara Gopinath 	unsigned int auth_keylen = ctx->auth_keylen;
434db0018a8SThara Gopinath 	unsigned int enc_ivsize = rctx->ivsize;
435db0018a8SThara Gopinath 	unsigned int auth_ivsize = 0;
436db0018a8SThara Gopinath 	unsigned int enckey_words, enciv_words;
437db0018a8SThara Gopinath 	unsigned int authkey_words, authiv_words, authnonce_words;
438db0018a8SThara Gopinath 	unsigned long flags = rctx->flags;
439db0018a8SThara Gopinath 	u32 encr_cfg, auth_cfg, config, totallen;
440db0018a8SThara Gopinath 	u32 iv_last_word;
441db0018a8SThara Gopinath 
442db0018a8SThara Gopinath 	qce_setup_config(qce);
443db0018a8SThara Gopinath 
444db0018a8SThara Gopinath 	/* Write encryption key */
445db0018a8SThara Gopinath 	enckey_words = qce_be32_to_cpu_array(enckey, ctx->enc_key, enc_keylen);
446db0018a8SThara Gopinath 	qce_write_array(qce, REG_ENCR_KEY0, enckey, enckey_words);
447db0018a8SThara Gopinath 
448db0018a8SThara Gopinath 	/* Write encryption iv */
449db0018a8SThara Gopinath 	enciv_words = qce_be32_to_cpu_array(enciv, rctx->iv, enc_ivsize);
450db0018a8SThara Gopinath 	qce_write_array(qce, REG_CNTR0_IV0, enciv, enciv_words);
451db0018a8SThara Gopinath 
452db0018a8SThara Gopinath 	if (IS_CCM(rctx->flags)) {
453db0018a8SThara Gopinath 		iv_last_word = enciv[enciv_words - 1];
454db0018a8SThara Gopinath 		qce_write(qce, REG_CNTR3_IV3, iv_last_word + 1);
455db0018a8SThara Gopinath 		qce_write_array(qce, REG_ENCR_CCM_INT_CNTR0, (u32 *)enciv, enciv_words);
456db0018a8SThara Gopinath 		qce_write(qce, REG_CNTR_MASK, ~0);
457db0018a8SThara Gopinath 		qce_write(qce, REG_CNTR_MASK0, ~0);
458db0018a8SThara Gopinath 		qce_write(qce, REG_CNTR_MASK1, ~0);
459db0018a8SThara Gopinath 		qce_write(qce, REG_CNTR_MASK2, ~0);
460db0018a8SThara Gopinath 	}
461db0018a8SThara Gopinath 
462db0018a8SThara Gopinath 	/* Clear authentication IV and KEY registers of previous values */
463db0018a8SThara Gopinath 	qce_clear_array(qce, REG_AUTH_IV0, 16);
464db0018a8SThara Gopinath 	qce_clear_array(qce, REG_AUTH_KEY0, 16);
465db0018a8SThara Gopinath 
466db0018a8SThara Gopinath 	/* Clear byte count */
467db0018a8SThara Gopinath 	qce_clear_array(qce, REG_AUTH_BYTECNT0, 4);
468db0018a8SThara Gopinath 
469db0018a8SThara Gopinath 	/* Write authentication key */
470db0018a8SThara Gopinath 	authkey_words = qce_be32_to_cpu_array(authkey, ctx->auth_key, auth_keylen);
471db0018a8SThara Gopinath 	qce_write_array(qce, REG_AUTH_KEY0, (u32 *)authkey, authkey_words);
472db0018a8SThara Gopinath 
473db0018a8SThara Gopinath 	/* Write initial authentication IV only for HMAC algorithms */
474db0018a8SThara Gopinath 	if (IS_SHA_HMAC(rctx->flags)) {
475db0018a8SThara Gopinath 		/* Write default authentication iv */
476db0018a8SThara Gopinath 		if (IS_SHA1_HMAC(rctx->flags)) {
477db0018a8SThara Gopinath 			auth_ivsize = SHA1_DIGEST_SIZE;
478db0018a8SThara Gopinath 			memcpy(authiv, std_iv_sha1, auth_ivsize);
479db0018a8SThara Gopinath 		} else if (IS_SHA256_HMAC(rctx->flags)) {
480db0018a8SThara Gopinath 			auth_ivsize = SHA256_DIGEST_SIZE;
481db0018a8SThara Gopinath 			memcpy(authiv, std_iv_sha256, auth_ivsize);
482db0018a8SThara Gopinath 		}
483db0018a8SThara Gopinath 		authiv_words = auth_ivsize / sizeof(u32);
484db0018a8SThara Gopinath 		qce_write_array(qce, REG_AUTH_IV0, (u32 *)authiv, authiv_words);
485db0018a8SThara Gopinath 	} else if (IS_CCM(rctx->flags)) {
486db0018a8SThara Gopinath 		/* Write nonce for CCM algorithms */
487db0018a8SThara Gopinath 		authnonce_words = qce_be32_to_cpu_array(authnonce, rctx->ccm_nonce, QCE_MAX_NONCE);
488db0018a8SThara Gopinath 		qce_write_array(qce, REG_AUTH_INFO_NONCE0, authnonce, authnonce_words);
489db0018a8SThara Gopinath 	}
490db0018a8SThara Gopinath 
491db0018a8SThara Gopinath 	/* Set up ENCR_SEG_CFG */
492db0018a8SThara Gopinath 	encr_cfg = qce_encr_cfg(flags, enc_keylen);
493db0018a8SThara Gopinath 	if (IS_ENCRYPT(flags))
494db0018a8SThara Gopinath 		encr_cfg |= BIT(ENCODE_SHIFT);
495db0018a8SThara Gopinath 	qce_write(qce, REG_ENCR_SEG_CFG, encr_cfg);
496db0018a8SThara Gopinath 
497db0018a8SThara Gopinath 	/* Set up AUTH_SEG_CFG */
498db0018a8SThara Gopinath 	auth_cfg = qce_auth_cfg(rctx->flags, auth_keylen, ctx->authsize);
499db0018a8SThara Gopinath 	auth_cfg |= BIT(AUTH_LAST_SHIFT);
500db0018a8SThara Gopinath 	auth_cfg |= BIT(AUTH_FIRST_SHIFT);
501db0018a8SThara Gopinath 	if (IS_ENCRYPT(flags)) {
502db0018a8SThara Gopinath 		if (IS_CCM(rctx->flags))
503db0018a8SThara Gopinath 			auth_cfg |= AUTH_POS_BEFORE << AUTH_POS_SHIFT;
504db0018a8SThara Gopinath 		else
505db0018a8SThara Gopinath 			auth_cfg |= AUTH_POS_AFTER << AUTH_POS_SHIFT;
506db0018a8SThara Gopinath 	} else {
507db0018a8SThara Gopinath 		if (IS_CCM(rctx->flags))
508db0018a8SThara Gopinath 			auth_cfg |= AUTH_POS_AFTER << AUTH_POS_SHIFT;
509db0018a8SThara Gopinath 		else
510db0018a8SThara Gopinath 			auth_cfg |= AUTH_POS_BEFORE << AUTH_POS_SHIFT;
511db0018a8SThara Gopinath 	}
512db0018a8SThara Gopinath 	qce_write(qce, REG_AUTH_SEG_CFG, auth_cfg);
513db0018a8SThara Gopinath 
514db0018a8SThara Gopinath 	totallen = rctx->cryptlen + rctx->assoclen;
515db0018a8SThara Gopinath 
516db0018a8SThara Gopinath 	/* Set the encryption size and start offset */
517db0018a8SThara Gopinath 	if (IS_CCM(rctx->flags) && IS_DECRYPT(rctx->flags))
518db0018a8SThara Gopinath 		qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen + ctx->authsize);
519db0018a8SThara Gopinath 	else
520db0018a8SThara Gopinath 		qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen);
521db0018a8SThara Gopinath 	qce_write(qce, REG_ENCR_SEG_START, rctx->assoclen & 0xffff);
522db0018a8SThara Gopinath 
523db0018a8SThara Gopinath 	/* Set the authentication size and start offset */
524db0018a8SThara Gopinath 	qce_write(qce, REG_AUTH_SEG_SIZE, totallen);
525db0018a8SThara Gopinath 	qce_write(qce, REG_AUTH_SEG_START, 0);
526db0018a8SThara Gopinath 
527db0018a8SThara Gopinath 	/* Write total length */
528db0018a8SThara Gopinath 	if (IS_CCM(rctx->flags) && IS_DECRYPT(rctx->flags))
529db0018a8SThara Gopinath 		qce_write(qce, REG_SEG_SIZE, totallen + ctx->authsize);
530db0018a8SThara Gopinath 	else
531db0018a8SThara Gopinath 		qce_write(qce, REG_SEG_SIZE, totallen);
532db0018a8SThara Gopinath 
533db0018a8SThara Gopinath 	/* get little endianness */
534db0018a8SThara Gopinath 	config = qce_config_reg(qce, 1);
535db0018a8SThara Gopinath 	qce_write(qce, REG_CONFIG, config);
536db0018a8SThara Gopinath 
537db0018a8SThara Gopinath 	/* Start the process */
538db0018a8SThara Gopinath 	qce_crypto_go(qce, !IS_CCM(flags));
539db0018a8SThara Gopinath 
540db0018a8SThara Gopinath 	return 0;
541db0018a8SThara Gopinath }
542db0018a8SThara Gopinath #endif
543db0018a8SThara Gopinath 
qce_start(struct crypto_async_request * async_req,u32 type)5444139fd58SThara Gopinath int qce_start(struct crypto_async_request *async_req, u32 type)
545ec8f5d8fSStanimir Varbanov {
546ec8f5d8fSStanimir Varbanov 	switch (type) {
54759e056cdSEneas U de Queiroz #ifdef CONFIG_CRYPTO_DEV_QCE_SKCIPHER
5488bf08715SArd Biesheuvel 	case CRYPTO_ALG_TYPE_SKCIPHER:
5494139fd58SThara Gopinath 		return qce_setup_regs_skcipher(async_req);
55059e056cdSEneas U de Queiroz #endif
55159e056cdSEneas U de Queiroz #ifdef CONFIG_CRYPTO_DEV_QCE_SHA
552ec8f5d8fSStanimir Varbanov 	case CRYPTO_ALG_TYPE_AHASH:
5534139fd58SThara Gopinath 		return qce_setup_regs_ahash(async_req);
55459e056cdSEneas U de Queiroz #endif
555db0018a8SThara Gopinath #ifdef CONFIG_CRYPTO_DEV_QCE_AEAD
556db0018a8SThara Gopinath 	case CRYPTO_ALG_TYPE_AEAD:
557db0018a8SThara Gopinath 		return qce_setup_regs_aead(async_req);
558db0018a8SThara Gopinath #endif
559ec8f5d8fSStanimir Varbanov 	default:
560ec8f5d8fSStanimir Varbanov 		return -EINVAL;
561ec8f5d8fSStanimir Varbanov 	}
562ec8f5d8fSStanimir Varbanov }
563ec8f5d8fSStanimir Varbanov 
564ec8f5d8fSStanimir Varbanov #define STATUS_ERRORS	\
565ec8f5d8fSStanimir Varbanov 		(BIT(SW_ERR_SHIFT) | BIT(AXI_ERR_SHIFT) | BIT(HSD_ERR_SHIFT))
566ec8f5d8fSStanimir Varbanov 
qce_check_status(struct qce_device * qce,u32 * status)567ec8f5d8fSStanimir Varbanov int qce_check_status(struct qce_device *qce, u32 *status)
568ec8f5d8fSStanimir Varbanov {
569ec8f5d8fSStanimir Varbanov 	int ret = 0;
570ec8f5d8fSStanimir Varbanov 
571ec8f5d8fSStanimir Varbanov 	*status = qce_read(qce, REG_STATUS);
572ec8f5d8fSStanimir Varbanov 
573ec8f5d8fSStanimir Varbanov 	/*
574ec8f5d8fSStanimir Varbanov 	 * Don't use result dump status. The operation may not be complete.
575ec8f5d8fSStanimir Varbanov 	 * Instead, use the status we just read from device. In case, we need to
576ec8f5d8fSStanimir Varbanov 	 * use result_status from result dump the result_status needs to be byte
577ec8f5d8fSStanimir Varbanov 	 * swapped, since we set the device to little endian.
578ec8f5d8fSStanimir Varbanov 	 */
579ec8f5d8fSStanimir Varbanov 	if (*status & STATUS_ERRORS || !(*status & BIT(OPERATION_DONE_SHIFT)))
580ec8f5d8fSStanimir Varbanov 		ret = -ENXIO;
581a9ca8eacSThara Gopinath 	else if (*status & BIT(MAC_FAILED_SHIFT))
582a9ca8eacSThara Gopinath 		ret = -EBADMSG;
583ec8f5d8fSStanimir Varbanov 
584ec8f5d8fSStanimir Varbanov 	return ret;
585ec8f5d8fSStanimir Varbanov }
586ec8f5d8fSStanimir Varbanov 
qce_get_version(struct qce_device * qce,u32 * major,u32 * minor,u32 * step)587ec8f5d8fSStanimir Varbanov void qce_get_version(struct qce_device *qce, u32 *major, u32 *minor, u32 *step)
588ec8f5d8fSStanimir Varbanov {
589ec8f5d8fSStanimir Varbanov 	u32 val;
590ec8f5d8fSStanimir Varbanov 
591ec8f5d8fSStanimir Varbanov 	val = qce_read(qce, REG_VERSION);
592ec8f5d8fSStanimir Varbanov 	*major = (val & CORE_MAJOR_REV_MASK) >> CORE_MAJOR_REV_SHIFT;
593ec8f5d8fSStanimir Varbanov 	*minor = (val & CORE_MINOR_REV_MASK) >> CORE_MINOR_REV_SHIFT;
594ec8f5d8fSStanimir Varbanov 	*step = (val & CORE_STEP_REV_MASK) >> CORE_STEP_REV_SHIFT;
595ec8f5d8fSStanimir Varbanov }
596