Home
last modified time | relevance | path

Searched refs:walk (Results 26 – 50 of 222) sorted by relevance

123456789

/linux/include/crypto/
H A Dctr.h27 struct skcipher_walk walk; in crypto_ctr_encrypt_walk() local
34 err = skcipher_walk_virt(&walk, req, false); in crypto_ctr_encrypt_walk()
36 while (walk.nbytes > 0) { in crypto_ctr_encrypt_walk()
37 u8 *dst = walk.dst.virt.addr; in crypto_ctr_encrypt_walk()
38 u8 *src = walk.src.virt.addr; in crypto_ctr_encrypt_walk()
39 int nbytes = walk.nbytes; in crypto_ctr_encrypt_walk()
42 if (nbytes < walk.total) { in crypto_ctr_encrypt_walk()
43 tail = walk.nbytes & (blocksize - 1); in crypto_ctr_encrypt_walk()
50 fn(tfm, walk.iv, buf); in crypto_ctr_encrypt_walk()
53 crypto_inc(walk.iv, blocksize); in crypto_ctr_encrypt_walk()
[all …]
/linux/mm/
H A Dhmm.c67 struct vm_area_struct *vma = walk->vma; in hmm_vma_fault()
161 if (!walk->vma) { in hmm_vma_walk_hole()
323 struct mm_walk *walk) in hmm_vma_walk_pmd() argument
342 pmd_migration_entry_wait(walk->mm, pmdp); in hmm_vma_walk_pmd()
412 struct mm_walk *walk) in hmm_vma_walk_pud() argument
424 walk->action = ACTION_CONTINUE; in hmm_vma_walk_pud()
457 walk->action = ACTION_SUBTREE; in hmm_vma_walk_pud()
470 struct mm_walk *walk) in hmm_vma_walk_hugetlb_entry() argument
475 struct vm_area_struct *vma = walk->vma; in hmm_vma_walk_hugetlb_entry()
521 struct mm_walk *walk) in hmm_vma_walk_test() argument
[all …]
H A Dmincore.c26 unsigned long end, struct mm_walk *walk) in mincore_hugetlb() argument
30 unsigned char *vec = walk->private; in mincore_hugetlb()
39 walk->private = vec; in mincore_hugetlb()
93 struct mm_walk *walk) in mincore_unmapped_range() argument
95 walk->private += __mincore_unmapped_range(addr, end, in mincore_unmapped_range()
96 walk->vma, walk->private); in mincore_unmapped_range()
101 struct mm_walk *walk) in mincore_pte_range() argument
104 struct vm_area_struct *vma = walk->vma; in mincore_pte_range()
106 unsigned char *vec = walk->private; in mincore_pte_range()
118 walk->action = ACTION_AGAIN; in mincore_pte_range()
[all …]
/linux/arch/powerpc/crypto/
H A Daes-spe-glue.c196 ppc_encrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ecb_crypt()
199 ppc_decrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ecb_crypt()
203 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ppc_ecb_crypt()
235 ppc_encrypt_cbc(walk.dst.virt.addr, walk.src.virt.addr, in ppc_cbc_crypt()
237 walk.iv); in ppc_cbc_crypt()
239 ppc_decrypt_cbc(walk.dst.virt.addr, walk.src.virt.addr, in ppc_cbc_crypt()
241 walk.iv); in ppc_cbc_crypt()
244 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ppc_cbc_crypt()
276 ppc_crypt_ctr(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ctr_crypt()
280 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ppc_ctr_crypt()
[all …]
H A Daes_ctr.c70 struct skcipher_walk *walk) in p8_aes_ctr_final() argument
72 u8 *ctrblk = walk->iv; in p8_aes_ctr_final()
74 u8 *src = walk->src.virt.addr; in p8_aes_ctr_final()
75 u8 *dst = walk->dst.virt.addr; in p8_aes_ctr_final()
76 unsigned int nbytes = walk->nbytes; in p8_aes_ctr_final()
94 struct skcipher_walk walk; in p8_aes_ctr_crypt() local
112 walk.dst.virt.addr, in p8_aes_ctr_crypt()
114 &ctx->enc_key, walk.iv); in p8_aes_ctr_crypt()
120 crypto_inc(walk.iv, AES_BLOCK_SIZE); in p8_aes_ctr_crypt()
126 p8_aes_ctr_final(ctx, &walk); in p8_aes_ctr_crypt()
[all …]
H A Dchacha-p10-glue.c90 struct skcipher_walk walk; in chacha_p10_stream_xor() local
94 err = skcipher_walk_virt(&walk, req, false); in chacha_p10_stream_xor()
100 while (walk.nbytes > 0) { in chacha_p10_stream_xor()
101 unsigned int nbytes = walk.nbytes; in chacha_p10_stream_xor()
103 if (nbytes < walk.total) in chacha_p10_stream_xor()
104 nbytes = rounddown(nbytes, walk.stride); in chacha_p10_stream_xor()
107 chacha_crypt_generic(state, walk.dst.virt.addr, in chacha_p10_stream_xor()
108 walk.src.virt.addr, nbytes, in chacha_p10_stream_xor()
112 chacha_p10_do_8x(state, walk.dst.virt.addr, in chacha_p10_stream_xor()
113 walk.src.virt.addr, nbytes, ctx->nrounds); in chacha_p10_stream_xor()
[all …]
/linux/arch/x86/crypto/
H A Daegis128-aesni-glue.c73 struct scatter_walk walk; in crypto_aegis128_aesni_process_ad() local
77 scatterwalk_start(&walk, sg_src); in crypto_aegis128_aesni_process_ad()
107 scatterwalk_advance(&walk, size); in crypto_aegis128_aesni_process_ad()
108 scatterwalk_done(&walk, 0, assoclen); in crypto_aegis128_aesni_process_ad()
124 walk->src.virt.addr, walk->dst.virt.addr); in crypto_aegis128_aesni_process_crypt()
125 skcipher_walk_done(walk, walk->nbytes % AEGIS128_BLOCK_SIZE); in crypto_aegis128_aesni_process_crypt()
128 if (walk->nbytes) { in crypto_aegis128_aesni_process_crypt()
129 ops->crypt_tail(state, walk->nbytes, walk->src.virt.addr, in crypto_aegis128_aesni_process_crypt()
130 walk->dst.virt.addr); in crypto_aegis128_aesni_process_crypt()
131 skcipher_walk_done(walk, 0); in crypto_aegis128_aesni_process_crypt()
[all …]
H A Decb_cbc_helpers.h18 struct skcipher_walk walk; \
19 int err = skcipher_walk_virt(&walk, (req), false); \
20 while (walk.nbytes > 0) { \
21 unsigned int nbytes = walk.nbytes; \
24 const u8 *src = walk.src.virt.addr; \
25 u8 *dst = walk.dst.virt.addr; \
51 const u8 *__iv = walk.iv; \
58 memcpy(walk.iv, __iv, __bsize); \
72 crypto_xor(dst, walk.iv, __bsize); \
73 memcpy(walk.iv, __iv, __bsize); \
[all …]
H A Daria_gfni_avx512_glue.c80 struct skcipher_walk walk; in aria_avx512_ctr_encrypt() local
86 while ((nbytes = walk.nbytes) > 0) { in aria_avx512_ctr_encrypt()
87 const u8 *src = walk.src.virt.addr; in aria_avx512_ctr_encrypt()
88 u8 *dst = walk.dst.virt.addr; in aria_avx512_ctr_encrypt()
94 walk.iv); in aria_avx512_ctr_encrypt()
105 walk.iv); in aria_avx512_ctr_encrypt()
116 walk.iv); in aria_avx512_ctr_encrypt()
124 memcpy(&req_ctx->keystream[0], walk.iv, in aria_avx512_ctr_encrypt()
126 crypto_inc(walk.iv, ARIA_BLOCK_SIZE); in aria_avx512_ctr_encrypt()
138 if (walk.nbytes == walk.total && nbytes > 0) { in aria_avx512_ctr_encrypt()
[all …]
H A Daria_aesni_avx_glue.c91 struct skcipher_walk walk; in aria_avx_ctr_encrypt() local
95 err = skcipher_walk_virt(&walk, req, false); in aria_avx_ctr_encrypt()
97 while ((nbytes = walk.nbytes) > 0) { in aria_avx_ctr_encrypt()
98 const u8 *src = walk.src.virt.addr; in aria_avx_ctr_encrypt()
99 u8 *dst = walk.dst.virt.addr; in aria_avx_ctr_encrypt()
105 walk.iv); in aria_avx_ctr_encrypt()
114 crypto_inc(walk.iv, ARIA_BLOCK_SIZE); in aria_avx_ctr_encrypt()
126 if (walk.nbytes == walk.total && nbytes > 0) { in aria_avx_ctr_encrypt()
127 memcpy(&req_ctx->keystream[0], walk.iv, in aria_avx_ctr_encrypt()
129 crypto_inc(walk.iv, ARIA_BLOCK_SIZE); in aria_avx_ctr_encrypt()
[all …]
/linux/drivers/atm/
H A Didt77105.c86 struct idt77105_priv *walk; in idt77105_stats_timer_func() local
91 for (walk = idt77105_all; walk; walk = walk->next) { in idt77105_stats_timer_func()
92 dev = walk->dev; in idt77105_stats_timer_func()
94 stats = &walk->stats; in idt77105_stats_timer_func()
115 struct idt77105_priv *walk; in idt77105_restart_timer_func() local
120 for (walk = idt77105_all; walk; walk = walk->next) { in idt77105_restart_timer_func()
121 dev = walk->dev; in idt77105_restart_timer_func()
323 struct idt77105_priv *walk, *prev; in idt77105_stop() local
332 walk != NULL; in idt77105_stop()
333 prev = walk, walk = walk->next) { in idt77105_stop()
[all …]
/linux/arch/riscv/crypto/
H A Dchacha-riscv64-glue.c25 struct skcipher_walk walk; in riscv64_chacha20_crypt() local
35 err = skcipher_walk_virt(&walk, req, false); in riscv64_chacha20_crypt()
36 while (walk.nbytes) { in riscv64_chacha20_crypt()
37 nbytes = walk.nbytes & ~(CHACHA_BLOCK_SIZE - 1); in riscv64_chacha20_crypt()
38 tail_bytes = walk.nbytes & (CHACHA_BLOCK_SIZE - 1); in riscv64_chacha20_crypt()
41 chacha20_zvkb(ctx->key, walk.src.virt.addr, in riscv64_chacha20_crypt()
42 walk.dst.virt.addr, nbytes, iv); in riscv64_chacha20_crypt()
45 if (walk.nbytes == walk.total && tail_bytes > 0) { in riscv64_chacha20_crypt()
46 memcpy(block_buffer, walk.src.virt.addr + nbytes, in riscv64_chacha20_crypt()
50 memcpy(walk.dst.virt.addr + nbytes, block_buffer, in riscv64_chacha20_crypt()
[all …]
/linux/arch/sparc/crypto/
H A Ddes_glue.c99 struct skcipher_walk walk; in __ecb_crypt() local
112 des_sparc64_ecb_crypt(walk.src.virt.addr, walk.dst.virt.addr, in __ecb_crypt()
140 struct skcipher_walk walk; in __cbc_crypt() local
155 walk.dst.virt.addr, in __cbc_crypt()
158 walk.iv); in __cbc_crypt()
161 walk.dst.virt.addr, in __cbc_crypt()
164 walk.iv); in __cbc_crypt()
248 struct skcipher_walk walk; in __ecb3_crypt() local
264 walk.dst.virt.addr, in __ecb3_crypt()
314 walk.iv); in __cbc3_crypt()
[all …]
H A Dcamellia_glue.c92 struct skcipher_walk walk; in __ecb_crypt() local
102 err = skcipher_walk_virt(&walk, req, true); in __ecb_crypt()
111 while ((nbytes = walk.nbytes) != 0) { in __ecb_crypt()
112 op(walk.src.virt.addr, walk.dst.virt.addr, in __ecb_crypt()
142 struct skcipher_walk walk; in cbc_encrypt() local
152 err = skcipher_walk_virt(&walk, req, true); in cbc_encrypt()
158 while ((nbytes = walk.nbytes) != 0) { in cbc_encrypt()
159 op(walk.src.virt.addr, walk.dst.virt.addr, in cbc_encrypt()
171 struct skcipher_walk walk; in cbc_decrypt() local
187 while ((nbytes = walk.nbytes) != 0) { in cbc_decrypt()
[all …]
/linux/crypto/
H A Dctr.c33 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_final()
36 u8 *src = walk->src.virt.addr; in crypto_ctr_crypt_final()
37 u8 *dst = walk->dst.virt.addr; in crypto_ctr_crypt_final()
52 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_segment()
53 u8 *src = walk->src.virt.addr; in crypto_ctr_crypt_segment()
54 u8 *dst = walk->dst.virt.addr; in crypto_ctr_crypt_segment()
80 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_inplace()
81 u8 *src = walk->src.virt.addr; in crypto_ctr_crypt_inplace()
104 struct skcipher_walk walk; in crypto_ctr_crypt() local
111 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_ctr_crypt()
[all …]
H A Dscatterwalk.c26 void scatterwalk_copychunks(void *buf, struct scatter_walk *walk, in scatterwalk_copychunks() argument
30 unsigned int len_this_page = scatterwalk_pagelen(walk); in scatterwalk_copychunks()
37 vaddr = scatterwalk_map(walk); in scatterwalk_copychunks()
42 scatterwalk_advance(walk, len_this_page); in scatterwalk_copychunks()
50 scatterwalk_pagedone(walk, out & 1, 1); in scatterwalk_copychunks()
58 struct scatter_walk walk; in scatterwalk_map_and_copy() local
66 scatterwalk_start(&walk, sg); in scatterwalk_map_and_copy()
67 scatterwalk_copychunks(buf, &walk, nbytes, out); in scatterwalk_map_and_copy()
68 scatterwalk_done(&walk, out, 0); in scatterwalk_map_and_copy()
H A Daegis128-core.c281 struct scatter_walk walk; in crypto_aegis128_process_ad() local
285 scatterwalk_start(&walk, sg_src); in crypto_aegis128_process_ad()
324 struct skcipher_walk *walk, in crypto_aegis128_process_crypt() argument
331 while (walk->nbytes) { in crypto_aegis128_process_crypt()
334 if (nbytes < walk->total) in crypto_aegis128_process_crypt()
337 crypt(state, walk->dst.virt.addr, walk->src.virt.addr, nbytes); in crypto_aegis128_process_crypt()
339 err = skcipher_walk_done(walk, walk->nbytes - nbytes); in crypto_aegis128_process_crypt()
395 struct skcipher_walk walk; in crypto_aegis128_encrypt_generic() local
418 struct skcipher_walk walk; in crypto_aegis128_decrypt_generic() local
456 struct skcipher_walk walk; in crypto_aegis128_encrypt_simd() local
[all …]
/linux/arch/s390/crypto/
H A Ddes_s390.c93 struct skcipher_walk walk; in ecb_desall_crypt() local
98 while ((nbytes = walk.nbytes) != 0) { in ecb_desall_crypt()
102 walk.src.virt.addr, n); in ecb_desall_crypt()
112 struct skcipher_walk walk; in cbc_desall_crypt() local
125 while ((nbytes = walk.nbytes) != 0) { in cbc_desall_crypt()
129 walk.src.virt.addr, n); in cbc_desall_crypt()
323 struct skcipher_walk walk; in ctr_desall_crypt() local
336 walk.src.virt.addr, n, ctrptr); in ctr_desall_crypt()
340 crypto_inc(walk.iv, DES_BLOCK_SIZE); in ctr_desall_crypt()
348 DES_BLOCK_SIZE, walk.iv); in ctr_desall_crypt()
[all …]
H A Dchacha-glue.c39 struct skcipher_walk walk; in chacha20_s390() local
43 rc = skcipher_walk_virt(&walk, req, false); in chacha20_s390()
46 while (walk.nbytes > 0) { in chacha20_s390()
47 nbytes = walk.nbytes; in chacha20_s390()
48 if (nbytes < walk.total) in chacha20_s390()
49 nbytes = round_down(nbytes, walk.stride); in chacha20_s390()
52 chacha_crypt_generic(state, walk.dst.virt.addr, in chacha20_s390()
53 walk.src.virt.addr, nbytes, in chacha20_s390()
56 chacha20_crypt_s390(state, walk.dst.virt.addr, in chacha20_s390()
57 walk.src.virt.addr, nbytes, in chacha20_s390()
[all …]
H A Dpaes_s390.c215 struct skcipher_walk walk; in ecb_paes_crypt() local
230 while ((nbytes = walk.nbytes) != 0) { in ecb_paes_crypt()
234 walk.dst.virt.addr, walk.src.virt.addr, n); in ecb_paes_crypt()
330 struct skcipher_walk walk; in cbc_paes_crypt() local
347 while ((nbytes = walk.nbytes) != 0) { in cbc_paes_crypt()
351 walk.dst.virt.addr, walk.src.virt.addr, n); in cbc_paes_crypt()
496 struct skcipher_walk walk; in xts_paes_crypt() local
531 walk.dst.virt.addr, walk.src.virt.addr, n); in xts_paes_crypt()
647 struct skcipher_walk walk; in ctr_paes_crypt() local
670 walk.src.virt.addr, n, ctrptr); in ctr_paes_crypt()
[all …]
H A Daes_s390.c62 struct scatter_walk walk; member
226 struct skcipher_walk walk; in ecb_aes_crypt() local
238 walk.dst.virt.addr, walk.src.virt.addr, n); in ecb_aes_crypt()
322 struct skcipher_walk walk; in cbc_aes_crypt() local
342 walk.dst.virt.addr, walk.src.virt.addr, n); in cbc_aes_crypt()
422 struct skcipher_walk walk; in xts_aes_crypt() local
468 walk.dst.virt.addr, walk.src.virt.addr, n); in xts_aes_crypt()
570 struct skcipher_walk walk; in ctr_aes_crypt() local
603 AES_BLOCK_SIZE, walk.iv); in ctr_aes_crypt()
678 scatterwalk_start(&gw->walk, sg); in gcm_walk_start()
[all …]
/linux/include/linux/
H A Dpagewalk.h60 unsigned long next, struct mm_walk *walk);
62 unsigned long next, struct mm_walk *walk);
64 unsigned long next, struct mm_walk *walk);
66 unsigned long next, struct mm_walk *walk);
68 unsigned long next, struct mm_walk *walk);
70 int depth, struct mm_walk *walk);
73 struct mm_walk *walk);
75 struct mm_walk *walk);
77 struct mm_walk *walk);
78 void (*post_vma)(struct mm_walk *walk);
/linux/drivers/crypto/
H A Dpadlock-aes.c348 struct skcipher_walk walk; in ecb_aes_encrypt() local
356 while ((nbytes = walk.nbytes) != 0) { in ecb_aes_encrypt()
357 padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr, in ecb_aes_encrypt()
373 struct skcipher_walk walk; in ecb_aes_decrypt() local
381 while ((nbytes = walk.nbytes) != 0) { in ecb_aes_decrypt()
382 padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr, in ecb_aes_decrypt()
413 struct skcipher_walk walk; in cbc_aes_encrypt() local
421 while ((nbytes = walk.nbytes) != 0) { in cbc_aes_encrypt()
423 walk.dst.virt.addr, ctx->E, in cbc_aes_encrypt()
440 struct skcipher_walk walk; in cbc_aes_decrypt() local
[all …]
/linux/arch/arm/crypto/
H A Dghash-ce-glue.c456 struct scatter_walk walk; in gcm_calculate_auth_mac() local
466 scatterwalk_start(&walk, sg_next(walk.sg)); in gcm_calculate_auth_mac()
494 struct skcipher_walk walk; in gcm_encrypt() local
513 src = walk.src.virt.addr; in gcm_encrypt()
514 dst = walk.dst.virt.addr; in gcm_encrypt()
523 if (walk.nbytes == walk.total) { in gcm_encrypt()
567 if (walk.nbytes) { in gcm_encrypt()
609 src = walk.src.virt.addr; in gcm_decrypt()
610 dst = walk.dst.virt.addr; in gcm_decrypt()
619 if (walk.nbytes == walk.total) { in gcm_decrypt()
[all …]
/linux/arch/arm64/crypto/
H A Dghash-ce-glue.c305 struct scatter_walk walk; in gcm_calculate_auth_mac() local
315 scatterwalk_start(&walk, sg_next(walk.sg)); in gcm_calculate_auth_mac()
318 p = scatterwalk_map(&walk); in gcm_calculate_auth_mac()
340 struct skcipher_walk walk; in gcm_encrypt() local
360 int nbytes = walk.nbytes; in gcm_encrypt()
385 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in gcm_encrypt()
386 } while (walk.nbytes); in gcm_encrypt()
404 struct skcipher_walk walk; in gcm_decrypt() local
430 int nbytes = walk.nbytes; in gcm_decrypt()
455 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in gcm_decrypt()
[all …]

123456789