Home
last modified time | relevance | path

Searched refs:walk (Results 1 – 25 of 222) sorted by relevance

123456789

/linux/crypto/
H A Dskcipher.c168 if (walk->iv != walk->oiv) in skcipher_walk_done()
169 memcpy(walk->oiv, walk->iv, walk->ivsize); in skcipher_walk_done()
170 if (walk->buffer != walk->page) in skcipher_walk_done()
207 if (!err && walk->iv != walk->oiv) in skcipher_walk_complete()
208 memcpy(walk->oiv, walk->iv, walk->ivsize); in skcipher_walk_complete()
209 if (walk->buffer != walk->page) in skcipher_walk_complete()
235 walk->buffer = walk->page; in skcipher_next_slow()
307 if (offset_in_page(walk->page) + walk->nbytes + walk->stride > in skcipher_next_copy()
311 walk->page += walk->nbytes; in skcipher_next_copy()
365 if (unlikely((walk->in.offset | walk->out.offset) & walk->alignmask)) { in skcipher_walk_next()
[all …]
H A Dpcbc.c26 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_segment()
28 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_segment()
49 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_inplace()
68 struct skcipher_walk walk; in crypto_pcbc_encrypt() local
74 while (walk.nbytes) { in crypto_pcbc_encrypt()
75 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_pcbc_encrypt()
95 u8 * const iv = walk->iv; in crypto_pcbc_decrypt_segment()
116 u8 * const iv = walk->iv; in crypto_pcbc_decrypt_inplace()
135 struct skcipher_walk walk; in crypto_pcbc_decrypt() local
141 while (walk.nbytes) { in crypto_pcbc_decrypt()
[all …]
H A Dxctr.c38 const u8 *src = walk->src.virt.addr; in crypto_xctr_crypt_final()
39 u8 *dst = walk->dst.virt.addr; in crypto_xctr_crypt_final()
40 unsigned int nbytes = walk->nbytes; in crypto_xctr_crypt_final()
55 u8 *dst = walk->dst.virt.addr; in crypto_xctr_crypt_segment()
56 unsigned int nbytes = walk->nbytes; in crypto_xctr_crypt_segment()
80 unsigned int nbytes = walk->nbytes; in crypto_xctr_crypt_inplace()
81 u8 *data = walk->src.virt.addr; in crypto_xctr_crypt_inplace()
104 struct skcipher_walk walk; in crypto_xctr_crypt() local
112 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_xctr_crypt()
119 byte_ctr += walk.nbytes - nbytes; in crypto_xctr_crypt()
[all …]
H A Dahash.c146 walk->data = kmap_local_page(walk->pg); in hash_walk_next()
156 sg = walk->sg; in hash_walk_new_entry()
158 walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); in hash_walk_new_entry()
159 walk->offset = offset_in_page(walk->offset); in hash_walk_new_entry()
162 if (walk->entrylen > walk->total) in hash_walk_new_entry()
163 walk->entrylen = walk->total; in hash_walk_new_entry()
164 walk->total -= walk->entrylen; in hash_walk_new_entry()
171 walk->data -= walk->offset; in crypto_hash_walk_done()
181 walk->pg++; in crypto_hash_walk_done()
185 if (!walk->total) in crypto_hash_walk_done()
[all …]
/linux/include/crypto/
H A Dscatterwalk.h31 unsigned int len = walk->sg->offset + walk->sg->length - walk->offset; in scatterwalk_pagelen()
46 walk->offset += nbytes; in scatterwalk_advance()
51 return sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); in scatterwalk_page()
62 walk->sg = sg; in scatterwalk_start()
63 walk->offset = sg->offset; in scatterwalk_start()
69 offset_in_page(walk->offset); in scatterwalk_map()
78 page = sg_page(walk->sg) + ((walk->offset - 1) >> PAGE_SHIFT); in scatterwalk_pagedone()
82 if (more && walk->offset >= walk->sg->offset + walk->sg->length) in scatterwalk_pagedone()
83 scatterwalk_start(walk, sg_next(walk->sg)); in scatterwalk_pagedone()
89 if (!more || walk->offset >= walk->sg->offset + walk->sg->length || in scatterwalk_done()
[all …]
/linux/mm/
H A Dpagewalk.c42 struct mm_walk *walk) in walk_pte_range() argument
48 if (walk->no_vma) { in walk_pte_range()
159 if (walk->vma) in walk_pmd_range()
213 if (walk->vma) in walk_pud_range()
272 if (walk->pgd) in walk_pgd_range()
486 if (!walk.mm) in walk_page_range()
494 walk.vma = NULL; in walk_page_range()
499 walk.vma = NULL; in walk_page_range()
505 walk.vma = vma; in walk_page_range()
622 if (!walk.mm) in walk_page_vma()
[all …]
H A Dptdump.c19 struct ptdump_state *st = walk->private; in note_kasan_page_table()
23 walk->action = ACTION_CONTINUE; in note_kasan_page_table()
32 struct ptdump_state *st = walk->private; in ptdump_pgd_entry()
38 return note_kasan_page_table(walk, addr); in ptdump_pgd_entry()
46 walk->action = ACTION_CONTINUE; in ptdump_pgd_entry()
55 struct ptdump_state *st = walk->private; in ptdump_p4d_entry()
69 walk->action = ACTION_CONTINUE; in ptdump_p4d_entry()
78 struct ptdump_state *st = walk->private; in ptdump_pud_entry()
92 walk->action = ACTION_CONTINUE; in ptdump_pud_entry()
101 struct ptdump_state *st = walk->private; in ptdump_pmd_entry()
[all …]
H A Dmapping_dirty_helpers.c35 struct mm_walk *walk) in wp_pte() argument
127 struct mm_walk *walk) in wp_clean_pmd_entry() argument
134 walk->action = ACTION_CONTINUE; in wp_clean_pmd_entry()
149 struct mm_walk *walk) in wp_clean_pud_entry() argument
157 walk->action = ACTION_CONTINUE; in wp_clean_pud_entry()
170 struct mm_walk *walk) in wp_clean_pre_vma() argument
178 walk->mm, start, end); in wp_clean_pre_vma()
187 inc_tlb_flush_pending(walk->mm); in wp_clean_pre_vma()
202 if (mm_tlb_flush_nested(walk->mm)) in wp_clean_post_vma()
210 dec_tlb_flush_pending(walk->mm); in wp_clean_post_vma()
[all …]
H A Dhugetlb_vmemmap.c104 walk->action = ACTION_CONTINUE; in vmemmap_pmd_entry()
168 NULL, walk); in vmemmap_remap_range()
173 if (walk->remap_pte && !(walk->flags & VMEMMAP_REMAP_NO_TLB_FLUSH)) in vmemmap_remap_range()
216 list_del(&walk->reuse_page->lru); in vmemmap_remap_pte()
251 struct vmemmap_remap_walk *walk) in vmemmap_restore_pte() argument
287 struct vmemmap_remap_walk walk = { in vmemmap_remap_split() local
319 struct vmemmap_remap_walk walk = { in vmemmap_remap_free() local
337 if (walk.reuse_page) { in vmemmap_remap_free()
339 (void *)walk.reuse_addr); in vmemmap_remap_free()
359 if (ret && walk.nr_walked) { in vmemmap_remap_free()
[all …]
/linux/arch/arm/crypto/
H A Daes-ce-glue.c179 ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
199 ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt()
460 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in xts_encrypt()
482 if (walk.nbytes < walk.total) in xts_encrypt()
490 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xts_encrypt()
508 ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in xts_encrypt()
509 ctx->key1.key_enc, rounds, walk.nbytes, walk.iv, in xts_encrypt()
532 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in xts_decrypt()
554 if (walk.nbytes < walk.total) in xts_decrypt()
562 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xts_decrypt()
[all …]
H A Daes-neonbs-glue.c94 struct skcipher_walk walk; in __ecb_crypt() local
102 if (walk.nbytes < walk.total) in __ecb_crypt()
107 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt()
176 if (walk.nbytes < walk.total) in cbc_decrypt()
181 aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in cbc_decrypt()
183 walk.iv); in cbc_decrypt()
253 else if (walk.nbytes < walk.total) in ctr_encrypt()
264 err = skcipher_walk_done(&walk, walk.nbytes - bytes); in ctr_encrypt()
366 crypto_cipher_encrypt_one(ctx->tweak_tfm, walk.iv, walk.iv); in __xts_crypt()
372 if (walk.nbytes < walk.total) { in __xts_crypt()
[all …]
/linux/arch/arm64/crypto/
H A Daes-glue.c191 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
211 aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt()
328 ctx->key_enc, rounds, walk.nbytes, walk.iv); in cts_cbc_encrypt()
385 ctx->key_dec, rounds, walk.nbytes, walk.iv); in cts_cbc_decrypt()
491 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xctr_encrypt()
528 walk.iv); in ctr_encrypt()
535 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ctr_encrypt()
557 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in xts_encrypt()
579 if (walk.nbytes < walk.total) in xts_encrypt()
629 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in xts_decrypt()
[all …]
H A Daes-neonbs-glue.c108 if (walk.nbytes < walk.total) in __ecb_crypt()
113 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt()
170 neon_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in cbc_encrypt()
174 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
191 if (walk.nbytes < walk.total) in cbc_decrypt()
196 aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in cbc_decrypt()
198 walk.iv); in cbc_decrypt()
229 if (nbytes && walk.nbytes == walk.total) { in ctr_encrypt()
324 neon_aes_ecb_encrypt(walk.iv, walk.iv, in __xts_crypt()
330 walk.iv); in __xts_crypt()
[all …]
H A Daes-ce-ccm-glue.c140 struct scatter_walk walk; in ccm_calculate_auth_mac() local
163 scatterwalk_start(&walk, sg_next(walk.sg)); in ccm_calculate_auth_mac()
212 if (walk.nbytes == walk.total) { in ccm_encrypt()
219 src, walk.nbytes); in ccm_encrypt()
226 memcpy(walk.dst.virt.addr, dst, walk.nbytes); in ccm_encrypt()
228 if (walk.nbytes) { in ccm_encrypt()
231 } while (walk.nbytes); in ccm_encrypt()
279 if (walk.nbytes == walk.total) { in ccm_decrypt()
293 memcpy(walk.dst.virt.addr, dst, walk.nbytes); in ccm_decrypt()
295 if (walk.nbytes) { in ccm_decrypt()
[all …]
H A Dsm4-neon-glue.c38 struct skcipher_walk walk; in sm4_ecb_do_crypt() local
46 u8 *dst = walk.dst.virt.addr; in sm4_ecb_do_crypt()
84 struct skcipher_walk walk; in sm4_cbc_encrypt() local
91 const u8 *iv = walk.iv; in sm4_cbc_encrypt()
93 u8 *dst = walk.dst.virt.addr; in sm4_cbc_encrypt()
103 if (iv != walk.iv) in sm4_cbc_encrypt()
116 struct skcipher_walk walk; in sm4_cbc_decrypt() local
132 walk.iv, nblocks); in sm4_cbc_decrypt()
147 struct skcipher_walk walk; in sm4_ctr_crypt() local
163 walk.iv, nblocks); in sm4_ctr_crypt()
[all …]
H A Dsm4-ce-ccm-glue.c97 struct scatter_walk walk; in ccm_calculate_auth_mac() local
119 scatterwalk_start(&walk, sg_next(walk.sg)); in ccm_calculate_auth_mac()
125 scatterwalk_advance(&walk, n); in ccm_calculate_auth_mac()
180 while (walk->nbytes && walk->nbytes != walk->total) { in ccm_crypt()
184 walk->src.virt.addr, walk->iv, in ccm_crypt()
185 walk->nbytes - tail, mac); in ccm_crypt()
194 if (walk->nbytes) { in ccm_crypt()
196 walk->src.virt.addr, walk->iv, in ccm_crypt()
197 walk->nbytes, mac); in ccm_crypt()
218 struct skcipher_walk walk; in ccm_encrypt() local
[all …]
H A Dsm4-ce-glue.c111 struct skcipher_walk walk; in sm4_ecb_do_crypt() local
157 struct skcipher_walk walk; in sm4_cbc_crypt() local
258 walk.src.virt.addr, walk.iv, walk.nbytes); in sm4_cbc_cts_crypt()
261 walk.src.virt.addr, walk.iv, walk.nbytes); in sm4_cbc_cts_crypt()
304 if (walk.nbytes == walk.total && nbytes > 0) { in sm4_ctr_crypt()
341 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in sm4_xts_crypt()
368 walk.src.virt.addr, walk.iv, nbytes, in sm4_xts_crypt()
372 walk.src.virt.addr, walk.iv, nbytes, in sm4_xts_crypt()
379 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in sm4_xts_crypt()
404 walk.src.virt.addr, walk.iv, walk.nbytes, in sm4_xts_crypt()
[all …]
H A Dsm4-ce-gcm-glue.c79 struct scatter_walk walk; in gcm_calculate_auth_mac() local
82 scatterwalk_start(&walk, req->src); in gcm_calculate_auth_mac()
89 scatterwalk_start(&walk, sg_next(walk.sg)); in gcm_calculate_auth_mac()
93 p = ptr = scatterwalk_map(&walk); in gcm_calculate_auth_mac()
95 scatterwalk_advance(&walk, n); in gcm_calculate_auth_mac()
162 while (walk->nbytes) { in gcm_crypt()
165 u8 *dst = walk->dst.virt.addr; in gcm_crypt()
167 if (walk->nbytes == walk->total) { in gcm_crypt()
169 walk->nbytes, ghash, in gcm_crypt()
202 struct skcipher_walk walk; in gcm_encrypt() local
[all …]
/linux/drivers/gpu/drm/xe/
H A Dxe_pt_walk.c28 const struct xe_pt_walk *walk) in xe_pt_addr_end() argument
30 u64 size = 1ull << walk->shifts[level]; in xe_pt_addr_end()
42 if (unlikely(walk->shared_pt_mode)) { in xe_pt_next()
43 unsigned int shift = walk->shifts[level]; in xe_pt_next()
78 const struct xe_pt_walk_ops *ops = walk->ops; in xe_pt_walk_range()
87 walk)) in xe_pt_walk_range()
93 &child, &action, walk); in xe_pt_walk_range()
108 next, &child, &action, walk); in xe_pt_walk_range()
147 walk->shared_pt_mode = true; in xe_pt_walk_shared()
149 &child, &action, walk); in xe_pt_walk_shared()
[all …]
H A Dxe_pt_walk.h63 struct xe_pt_walk *walk);
84 u64 addr, u64 end, struct xe_pt_walk *walk);
87 u64 addr, u64 end, struct xe_pt_walk *walk);
102 const struct xe_pt_walk *walk) in xe_pt_covers() argument
104 u64 pt_size = 1ull << walk->shifts[level]; in xe_pt_covers()
122 const struct xe_pt_walk *walk) in xe_pt_num_entries() argument
124 u64 pt_size = 1ull << walk->shifts[level]; in xe_pt_num_entries()
127 walk->shifts[level]; in xe_pt_num_entries()
142 if (level < walk->max_level) in xe_pt_offset()
143 addr &= ((1ull << walk->shifts[level + 1]) - 1); in xe_pt_offset()
[all …]
/linux/arch/riscv/crypto/
H A Daes-riscv64-glue.c139 struct skcipher_walk walk; in riscv64_aes_ecb_crypt() local
177 struct skcipher_walk walk; in riscv64_aes_cbc_crypt() local
188 walk.iv); in riscv64_aes_cbc_crypt()
193 walk.iv); in riscv64_aes_cbc_crypt()
259 aes_cbc_cts_crypt_zvkned(ctx, walk.src.virt.addr, walk.dst.virt.addr, in riscv64_aes_cbc_cts_crypt()
331 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in riscv64_aes_ctr_crypt()
382 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in riscv64_aes_xts_crypt()
398 while (walk.nbytes) { in riscv64_aes_xts_crypt()
414 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in riscv64_aes_xts_crypt()
437 walk.dst.virt.addr, walk.nbytes, req->iv); in riscv64_aes_xts_crypt()
[all …]
/linux/arch/x86/crypto/
H A Dsm4_aesni_avx_glue.c41 struct skcipher_walk walk; in ecb_do_crypt() local
49 u8 *dst = walk.dst.virt.addr; in ecb_do_crypt()
95 struct skcipher_walk walk; in sm4_cbc_encrypt() local
102 const u8 *iv = walk.iv; in sm4_cbc_encrypt()
104 u8 *dst = walk.dst.virt.addr; in sm4_cbc_encrypt()
114 if (iv != walk.iv) in sm4_cbc_encrypt()
129 struct skcipher_walk walk; in sm4_avx_cbc_decrypt() local
137 u8 *dst = walk.dst.virt.addr; in sm4_avx_cbc_decrypt()
194 struct skcipher_walk walk; in sm4_avx_ctr_crypt() local
220 walk.iv, SM4_BLOCK_SIZE); in sm4_avx_ctr_crypt()
[all …]
H A Daesni-intel_glue.c299 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
425 walk.nbytes, walk.iv); in cts_cbc_encrypt()
481 walk.nbytes, walk.iv); in cts_cbc_decrypt()
525 if (walk.nbytes == walk.total && nbytes > 0) { in ctr_crypt()
528 walk.src.virt.addr + walk.nbytes - nbytes, in ctr_crypt()
576 if (walk.nbytes == walk.total && nbytes > 0) { in xctr_crypt()
580 crypto_xor_cpy(walk.dst.virt.addr + walk.nbytes - in xctr_crypt()
581 nbytes, walk.src.virt.addr + walk.nbytes in xctr_crypt()
738 walk.src.virt.addr, walk.nbytes); in gcmaes_crypt_by_sg()
741 walk.src.virt.addr, walk.nbytes); in gcmaes_crypt_by_sg()
[all …]
H A Ddes3_ede_glue.c69 struct skcipher_walk walk; in ecb_crypt() local
76 u8 *wsrc = walk.src.virt.addr; in ecb_crypt()
77 u8 *wdst = walk.dst.virt.addr; in ecb_crypt()
133 u64 *iv = (u64 *)walk->iv; in __cbc_encrypt()
145 *(u64 *)walk->iv = *iv; in __cbc_encrypt()
153 struct skcipher_walk walk; in cbc_encrypt() local
159 while (walk.nbytes) { in cbc_encrypt()
222 *dst ^= *(u64 *)walk->iv; in __cbc_decrypt()
223 *(u64 *)walk->iv = last_iv; in __cbc_decrypt()
232 struct skcipher_walk walk; in cbc_decrypt() local
[all …]
/linux/arch/sparc/crypto/
H A Daes_glue.c223 struct skcipher_walk walk; in ecb_encrypt() local
247 struct skcipher_walk walk; in ecb_decrypt() local
272 struct skcipher_walk walk; in cbc_encrypt() local
285 walk.iv); in cbc_encrypt()
297 struct skcipher_walk walk; in cbc_decrypt() local
311 walk.iv); in cbc_decrypt()
322 u8 *ctrblk = walk->iv; in ctr_crypt_final()
338 struct skcipher_walk walk; in ctr_crypt() local
349 walk.dst.virt.addr, in ctr_crypt()
351 walk.iv); in ctr_crypt()
[all …]

123456789