1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /* Copyright(c) 2014 - 2022 Intel Corporation */
3 #ifndef QAT_BL_H
4 #define QAT_BL_H
5 #include <linux/crypto.h>
6 #include <linux/scatterlist.h>
7 #include <linux/types.h>
8
9 #define QAT_MAX_BUFF_DESC 4
10
11 struct qat_alg_buf {
12 u32 len;
13 u32 resrvd;
14 u64 addr;
15 } __packed;
16
17 struct qat_alg_buf_list {
18 /* New members must be added within the __struct_group() macro below. */
19 __struct_group(qat_alg_buf_list_hdr, hdr, __packed,
20 u64 resrvd;
21 u32 num_bufs;
22 u32 num_mapped_bufs;
23 );
24 struct qat_alg_buf buffers[];
25 } __packed;
26
27 struct qat_alg_fixed_buf_list {
28 struct qat_alg_buf_list_hdr sgl_hdr;
29 struct qat_alg_buf descriptors[QAT_MAX_BUFF_DESC];
30 } __packed __aligned(64);
31
32 struct qat_request_buffs {
33 struct qat_alg_buf_list *bl;
34 dma_addr_t blp;
35 struct qat_alg_buf_list *blout;
36 dma_addr_t bloutp;
37 size_t sz;
38 size_t sz_out;
39 bool sgl_src_valid;
40 bool sgl_dst_valid;
41 struct qat_alg_fixed_buf_list sgl_src;
42 struct qat_alg_fixed_buf_list sgl_dst;
43 };
44
45 struct qat_sgl_to_bufl_params {
46 dma_addr_t extra_dst_buff;
47 size_t sz_extra_dst_buff;
48 unsigned int sskip;
49 unsigned int dskip;
50 };
51
52 void qat_bl_free_bufl(struct adf_accel_dev *accel_dev,
53 struct qat_request_buffs *buf);
54 int qat_bl_sgl_to_bufl(struct adf_accel_dev *accel_dev,
55 struct scatterlist *sgl,
56 struct scatterlist *sglout,
57 struct qat_request_buffs *buf,
58 struct qat_sgl_to_bufl_params *params,
59 gfp_t flags);
60
qat_algs_alloc_flags(struct crypto_async_request * req)61 static inline gfp_t qat_algs_alloc_flags(struct crypto_async_request *req)
62 {
63 return req->flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL : GFP_ATOMIC;
64 }
65
66 int qat_bl_realloc_map_new_dst(struct adf_accel_dev *accel_dev,
67 struct scatterlist **newd,
68 unsigned int dlen,
69 struct qat_request_buffs *qat_bufs,
70 gfp_t gfp);
71
72 #endif
73