/linux/crypto/ |
H A D | rsa-pkcs1pad.c | 229 req->dst_len = ctx->key_size; in pkcs1pad_encrypt_sign_complete() 264 req->dst_len = ctx->key_size; in pkcs1pad_encrypt() 302 unsigned int dst_len; in pkcs1pad_decrypt_complete() local 310 dst_len = req_ctx->child_req.dst_len; in pkcs1pad_decrypt_complete() 320 dst_len--; in pkcs1pad_decrypt_complete() 336 if (req->dst_len < dst_len - pos) in pkcs1pad_decrypt_complete() 338 req->dst_len = dst_len - pos; in pkcs1pad_decrypt_complete() 464 unsigned int dst_len; in pkcs1pad_verify_complete() local 472 dst_len = req_ctx->child_req.dst_len; in pkcs1pad_verify_complete() 482 dst_len--; in pkcs1pad_verify_complete() [all …]
|
H A D | ecrdsa.c | 77 unsigned int ndigits = req->dst_len / sizeof(u64); in ecrdsa_verify() 96 req->dst_len != ctx->digest_len || in ecrdsa_verify() 97 req->dst_len != ctx->curve->g.ndigits * sizeof(u64) || in ecrdsa_verify() 99 req->dst_len * 2 != req->src_len || in ecrdsa_verify() 101 WARN_ON(req->dst_len > sizeof(digest))) in ecrdsa_verify() 108 req->src_len + req->dst_len), in ecrdsa_verify() 109 digest, req->dst_len, req->src_len); in ecrdsa_verify()
|
H A D | ecdsa.c | 154 buffer = kmalloc(req->src_len + req->dst_len, GFP_KERNEL); in ecdsa_verify() 159 sg_nents_for_len(req->src, req->src_len + req->dst_len), in ecdsa_verify() 160 buffer, req->src_len + req->dst_len, 0); in ecdsa_verify() 168 diff = bufsize - req->dst_len; in ecdsa_verify() 172 memcpy(&rawhash[diff], buffer + req->src_len, req->dst_len); in ecdsa_verify()
|
H A D | jitterentropy-kcapi.c | 172 int jent_read_random_block(void *hash_state, char *dst, unsigned int dst_len) in jent_read_random_block() argument 182 if (!ret && dst_len) in jent_read_random_block() 183 memcpy(dst, jent_block, dst_len); in jent_read_random_block()
|
/linux/fs/bcachefs/ |
H A D | compress.c | 167 src_len, dst_len, dst_len); in __bio_uncompress() 168 if (ret != dst_len) in __bio_uncompress() 176 .avail_out = dst_len, in __bio_uncompress() 202 dst_data, dst_len, in __bio_uncompress() 207 if (ret != dst_len) in __bio_uncompress() 310 &len, dst_len, in attempt_compress() 319 src_len, dst_len, in attempt_compress() 427 *dst_len = ret; in __bio_compress() 459 pad = round_up(*dst_len, block_bytes(c)) - *dst_len; in __bio_compress() 462 *dst_len += pad; in __bio_compress() [all …]
|
H A D | io_write.c | 948 size_t dst_len = 0, src_len = 0; in bch2_write_extent() local 968 dst_len = min_t(unsigned, dst_len, wp->sectors_free << 9); in bch2_write_extent() 971 dst_len = min_t(unsigned, dst_len, in bch2_write_extent() 975 swap(dst->bi_iter.bi_size, dst_len); in bch2_write_extent() 980 src_len = dst_len; in bch2_write_extent() 983 BUG_ON(!src_len || !dst_len); in bch2_write_extent() 1034 crc.compressed_size = dst_len >> 9; in bch2_write_extent() 1038 swap(dst->bi_iter.bi_size, dst_len); in bch2_write_extent() 1047 swap(dst->bi_iter.bi_size, dst_len); in bch2_write_extent() 1053 bio_advance(dst, dst_len); in bch2_write_extent() [all …]
|
/linux/lib/ |
H A D | decompress_unlzo.c | 105 u32 src_len, dst_len; in unlzo() local 181 dst_len = get_unaligned_be32(in_buf); in unlzo() 186 if (dst_len == 0) { in unlzo() 192 if (dst_len > LZO_BLOCK_SIZE) { in unlzo() 211 if (src_len <= 0 || src_len > dst_len) { in unlzo() 226 tmp = dst_len; in unlzo() 231 if (unlikely(dst_len == src_len)) in unlzo() 237 if (r != LZO_E_OK || dst_len != tmp) { in unlzo() 243 if (flush && flush(out_buf, dst_len) != dst_len) in unlzo() 246 out_buf += dst_len; in unlzo()
|
/linux/net/smc/ |
H A D | smc_tx.c | 398 if (src_len_sum == dst_len) in smcr_tx_rdma_writes() 411 dst_len = len - dst_len; /* remainder */ in smcr_tx_rdma_writes() 412 dst_len_sum += dst_len; in smcr_tx_rdma_writes() 442 if (src_len_sum == dst_len) in smcd_tx_rdma_writes() 452 dst_len = len - dst_len; /* remainder */ in smcd_tx_rdma_writes() 453 dst_len_sum += dst_len; in smcd_tx_rdma_writes() 506 dst_len = min_t(size_t, in smc_tx_rdma_writes() 513 dst_len = len; in smc_tx_rdma_writes() 518 src_len = dst_len; in smc_tx_rdma_writes() 526 dst_off, dst_len); in smc_tx_rdma_writes() [all …]
|
/linux/net/ipv4/ |
H A D | fib_rules.c | 37 u8 dst_len; member 53 if (r->dst_len || r->src_len || r->dscp) in fib4_rule_matchall() 263 if (frh->dst_len) in fib4_rule_configure() 279 rule4->dst_len = frh->dst_len; in fib4_rule_configure() 280 rule4->dstmask = inet_make_mask(rule4->dst_len); in fib4_rule_configure() 320 if (frh->dst_len && (rule4->dst_len != frh->dst_len)) in fib4_rule_compare() 334 if (frh->dst_len && (rule4->dst != nla_get_in_addr(tb[FRA_DST]))) in fib4_rule_compare() 345 frh->dst_len = rule4->dst_len; in fib4_rule_fill() 349 if ((rule4->dst_len && in fib4_rule_fill()
|
/linux/include/linux/ |
H A D | lzo.h | 25 unsigned char *dst, size_t *dst_len, void *wrkmem); 29 unsigned char *dst, size_t *dst_len, void *wrkmem); 33 unsigned char *dst, size_t *dst_len);
|
/linux/drivers/net/ethernet/mellanox/mlx5/core/lag/ |
H A D | mp.c | 103 static void mlx5_lag_fib_set(struct lag_mp *mp, struct fib_info *fi, u32 dst, int dst_len) in mlx5_lag_fib_set() argument 108 mp->fib.dst_len = dst_len; in mlx5_lag_fib_set() 167 (mp->fib.dst != fen_info->dst || mp->fib.dst_len != fen_info->dst_len) && in mlx5_lag_fib_route_event() 176 if (mp->fib.dst == fen_info->dst && mp->fib.dst_len == fen_info->dst_len) in mlx5_lag_fib_route_event() 193 mlx5_lag_fib_set(mp, fi, fen_info->dst, fen_info->dst_len); in mlx5_lag_fib_route_event() 208 mlx5_lag_fib_set(mp, fi, fen_info->dst, fen_info->dst_len); in mlx5_lag_fib_route_event()
|
/linux/drivers/block/zram/ |
H A D | zcomp.c | 119 const void *src, unsigned int *dst_len) in zcomp_compress() argument 135 *dst_len = PAGE_SIZE * 2; in zcomp_compress() 139 zstrm->buffer, dst_len); in zcomp_compress() 145 unsigned int dst_len = PAGE_SIZE; in zcomp_decompress() local 149 dst, &dst_len); in zcomp_decompress()
|
/linux/lib/crypto/ |
H A D | chacha20poly1305.c | 130 size_t dst_len; in __chacha20poly1305_decrypt() local 148 dst_len = src_len - POLY1305_DIGEST_SIZE; in __chacha20poly1305_decrypt() 149 poly1305_update(&poly1305_state, src, dst_len); in __chacha20poly1305_decrypt() 150 if (dst_len & 0xf) in __chacha20poly1305_decrypt() 151 poly1305_update(&poly1305_state, pad0, 0x10 - (dst_len & 0xf)); in __chacha20poly1305_decrypt() 154 b.lens[1] = cpu_to_le64(dst_len); in __chacha20poly1305_decrypt() 159 ret = crypto_memneq(b.mac, src + dst_len, POLY1305_DIGEST_SIZE); in __chacha20poly1305_decrypt() 161 chacha20_crypt(chacha_state, dst, src, dst_len); in __chacha20poly1305_decrypt()
|
/linux/include/crypto/ |
H A D | akcipher.h | 40 unsigned int dst_len; member 255 unsigned int dst_len) in akcipher_request_set_crypt() argument 260 req->dst_len = dst_len; in akcipher_request_set_crypt()
|
H A D | kpp.h | 36 unsigned int dst_len; member 254 req->dst_len = output_len; in kpp_request_set_output()
|
/linux/drivers/crypto/virtio/ |
H A D | virtio_crypto_skcipher_algs.c | 337 u64 dst_len; in __virtio_crypto_skcipher_do_req() local 387 dst_len = virtio_crypto_alg_sg_nents_length(req->dst); in __virtio_crypto_skcipher_do_req() 388 if (unlikely(dst_len > U32_MAX)) { in __virtio_crypto_skcipher_do_req() 394 dst_len = min_t(unsigned int, req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req() 396 req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req() 398 if (unlikely(req->cryptlen + dst_len + ivsize + in __virtio_crypto_skcipher_do_req() 406 cpu_to_le32((uint32_t)dst_len); in __virtio_crypto_skcipher_do_req()
|
H A D | virtio_crypto_akcipher_algs.c | 99 akcipher_req->dst_len = len - sizeof(vc_req->status); in virtio_crypto_dataq_akcipher_callback() 101 vc_akcipher_req->dst_buf, akcipher_req->dst_len); in virtio_crypto_dataq_akcipher_callback() 234 unsigned int src_len = verify ? req->src_len + req->dst_len : req->src_len; in __virtio_crypto_akcipher_do_req() 256 dst_buf = kcalloc_node(req->dst_len, 1, GFP_KERNEL, node); in __virtio_crypto_akcipher_do_req() 260 sg_init_one(&dstdata_sg, dst_buf, req->dst_len); in __virtio_crypto_akcipher_do_req() 314 akcipher_req->para.dst_data_len = cpu_to_le32(req->dst_len); in virtio_crypto_rsa_do_req()
|
/linux/security/keys/ |
H A D | dh.c | 271 if (copy_from_user(outbuf + req->dst_len, kdfcopy->otherinfo, in __keyctl_dh_compute() 278 req->dst_len + kdfcopy->otherinfolen); in __keyctl_dh_compute() 279 } else if (copy_to_user(buffer, outbuf, req->dst_len) == 0) { in __keyctl_dh_compute() 280 ret = req->dst_len; in __keyctl_dh_compute()
|
/linux/drivers/misc/mei/ |
H A D | vsc-tp.c | 161 int ret, offset = 0, cpy_len, src_len, dst_len = sizeof(struct vsc_tp_packet); in vsc_tp_xfer_helper() local 189 cpy_len = min(src_len, dst_len); in vsc_tp_xfer_helper() 195 dst_len -= cpy_len; in vsc_tp_xfer_helper() 206 dst_len = min(ilen, le16_to_cpu(ack.len)); in vsc_tp_xfer_helper() 209 dst_len = sizeof(recv_crc); in vsc_tp_xfer_helper() 225 dst_len = sizeof(recv_crc); in vsc_tp_xfer_helper()
|
/linux/kernel/bpf/ |
H A D | crypto.c | 270 u32 src_len, dst_len, siv_len; in bpf_crypto_crypt() local 280 dst_len = __bpf_dynptr_size(dst); in bpf_crypto_crypt() 281 if (!src_len || !dst_len) in bpf_crypto_crypt() 290 pdst = __bpf_dynptr_data_rw(dst, dst_len); in bpf_crypto_crypt()
|
/linux/net/sched/ |
H A D | act_tunnel_key.c | 88 tunnel_key_copy_geneve_opt(const struct nlattr *nla, void *dst, int dst_len, in tunnel_key_copy_geneve_opt() argument 123 WARN_ON(dst_len < opt_len); in tunnel_key_copy_geneve_opt() 140 tunnel_key_copy_vxlan_opt(const struct nlattr *nla, void *dst, int dst_len, in tunnel_key_copy_vxlan_opt() argument 167 tunnel_key_copy_erspan_opt(const struct nlattr *nla, void *dst, int dst_len, in tunnel_key_copy_erspan_opt() argument 220 int dst_len, struct netlink_ext_ack *extack) in tunnel_key_copy_opts() argument 238 dst_len, extack); in tunnel_key_copy_opts() 247 dst_len -= opt_len; in tunnel_key_copy_opts() 258 dst_len, extack); in tunnel_key_copy_opts() 270 dst_len, extack); in tunnel_key_copy_opts()
|
/linux/net/ipv6/ |
H A D | fib6_rules.c | 379 if (frh->dst_len) in fib6_rule_configure() 383 rule6->dst.plen = frh->dst_len; in fib6_rule_configure() 413 if (frh->dst_len && (rule6->dst.plen != frh->dst_len)) in fib6_rule_compare() 423 if (frh->dst_len && in fib6_rule_compare() 435 frh->dst_len = rule6->dst.plen; in fib6_rule_fill()
|
/linux/drivers/crypto/intel/qat/qat_common/ |
H A D | qat_asym_algs.c | 174 areq->dst_len = req->ctx.dh->p_size; in qat_dh_cb() 179 areq->dst_len, 1); in qat_dh_cb() 236 if (req->dst_len < ctx->p_size) { in qat_dh_compute_value() 237 req->dst_len = ctx->p_size; in qat_dh_compute_value() 318 if (sg_is_last(req->dst) && req->dst_len == ctx->p_size) { in qat_dh_compute_value() 581 areq->dst_len = req->ctx.rsa->key_sz; in qat_rsa_cb() 586 areq->dst_len, 1); in qat_rsa_cb() 714 if (req->dst_len < ctx->key_sz) { in qat_rsa_enc() 715 req->dst_len = ctx->key_sz; in qat_rsa_enc() 848 if (req->dst_len < ctx->key_sz) { in qat_rsa_dec() [all …]
|
/linux/drivers/crypto/hisilicon/hpre/ |
H A D | hpre_crypto.c | 445 areq->dst_len = ctx->key_sz; in hpre_dh_cb() 471 areq->dst_len = ctx->key_sz; in hpre_rsa_cb() 527 if (akreq->dst_len < ctx->key_sz) { in hpre_msg_request_set() 528 akreq->dst_len = ctx->key_sz; in hpre_msg_request_set() 541 if (kreq->dst_len < ctx->key_sz) { in hpre_msg_request_set() 542 kreq->dst_len = ctx->key_sz; in hpre_msg_request_set() 1482 areq->dst_len = ctx->key_sz << 1; in hpre_ecdh_cb() 1506 if (req->dst_len < ctx->key_sz << 1) { in hpre_ecdh_msg_request_set() 1507 req->dst_len = ctx->key_sz << 1; in hpre_ecdh_msg_request_set() 1796 areq->dst_len = ctx->key_sz; in hpre_curve25519_cb() [all …]
|
/linux/drivers/crypto/ccp/ |
H A D | ccp-dmaengine.c | 367 unsigned int dst_offset, dst_len; in ccp_create_desc() local 387 dst_len = sg_dma_len(dst_sg); in ccp_create_desc() 405 if (!dst_len) { in ccp_create_desc() 414 dst_len = sg_dma_len(dst_sg); in ccp_create_desc() 419 len = min(dst_len, src_len); in ccp_create_desc() 452 dst_len -= len; in ccp_create_desc()
|