Home
last modified time | relevance | path

Searched refs:walk (Results 51 – 75 of 222) sorted by relevance

123456789

/linux/arch/x86/crypto/
H A Daria_aesni_avx2_glue.c93 struct skcipher_walk walk; in aria_avx2_ctr_encrypt() local
99 while ((nbytes = walk.nbytes) > 0) { in aria_avx2_ctr_encrypt()
100 const u8 *src = walk.src.virt.addr; in aria_avx2_ctr_encrypt()
101 u8 *dst = walk.dst.virt.addr; in aria_avx2_ctr_encrypt()
107 walk.iv); in aria_avx2_ctr_encrypt()
118 walk.iv); in aria_avx2_ctr_encrypt()
127 crypto_inc(walk.iv, ARIA_BLOCK_SIZE); in aria_avx2_ctr_encrypt()
139 if (walk.nbytes == walk.total && nbytes > 0) { in aria_avx2_ctr_encrypt()
140 memcpy(&req_ctx->keystream[0], walk.iv, in aria_avx2_ctr_encrypt()
142 crypto_inc(walk.iv, ARIA_BLOCK_SIZE); in aria_avx2_ctr_encrypt()
[all …]
H A Dchacha_glue.c167 struct skcipher_walk walk; in chacha_simd_stream_xor() local
170 err = skcipher_walk_virt(&walk, req, false); in chacha_simd_stream_xor()
174 while (walk.nbytes > 0) { in chacha_simd_stream_xor()
175 unsigned int nbytes = walk.nbytes; in chacha_simd_stream_xor()
177 if (nbytes < walk.total) in chacha_simd_stream_xor()
178 nbytes = round_down(nbytes, walk.stride); in chacha_simd_stream_xor()
182 chacha_crypt_generic(state, walk.dst.virt.addr, in chacha_simd_stream_xor()
183 walk.src.virt.addr, nbytes, in chacha_simd_stream_xor()
187 chacha_dosimd(state, walk.dst.virt.addr, in chacha_simd_stream_xor()
188 walk.src.virt.addr, nbytes, in chacha_simd_stream_xor()
[all …]
/linux/arch/mips/crypto/
H A Dchacha-glue.c32 struct skcipher_walk walk; in chacha_mips_stream_xor() local
36 err = skcipher_walk_virt(&walk, req, false); in chacha_mips_stream_xor()
40 while (walk.nbytes > 0) { in chacha_mips_stream_xor()
41 unsigned int nbytes = walk.nbytes; in chacha_mips_stream_xor()
43 if (nbytes < walk.total) in chacha_mips_stream_xor()
44 nbytes = round_down(nbytes, walk.stride); in chacha_mips_stream_xor()
46 chacha_crypt(state, walk.dst.virt.addr, walk.src.virt.addr, in chacha_mips_stream_xor()
48 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in chacha_mips_stream_xor()
/linux/crypto/
H A Dchacha_generic.c18 struct skcipher_walk walk; in chacha_stream_xor() local
22 err = skcipher_walk_virt(&walk, req, false); in chacha_stream_xor()
26 while (walk.nbytes > 0) { in chacha_stream_xor()
27 unsigned int nbytes = walk.nbytes; in chacha_stream_xor()
29 if (nbytes < walk.total) in chacha_stream_xor()
32 chacha_crypt_generic(state, walk.dst.virt.addr, in chacha_stream_xor()
33 walk.src.virt.addr, nbytes, ctx->nrounds); in chacha_stream_xor()
34 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in chacha_stream_xor()
H A Dcrypto_null.c78 struct skcipher_walk walk; in null_skcipher_crypt() local
81 err = skcipher_walk_virt(&walk, req, false); in null_skcipher_crypt()
83 while (walk.nbytes) { in null_skcipher_crypt()
84 if (walk.src.virt.addr != walk.dst.virt.addr) in null_skcipher_crypt()
85 memcpy(walk.dst.virt.addr, walk.src.virt.addr, in null_skcipher_crypt()
86 walk.nbytes); in null_skcipher_crypt()
87 err = skcipher_walk_done(&walk, 0); in null_skcipher_crypt()
/linux/drivers/media/mc/
H A Dmc-entity.c479 return &walk->stack.entries[walk->stack.top]; in media_pipeline_walk_top()
518 if (walk->stack.top + 1 >= walk->stack.size) { in media_pipeline_walk_push()
524 walk->stack.top++; in media_pipeline_walk_push()
529 dev_dbg(walk->mdev->dev, in media_pipeline_walk_push()
553 walk->stack.top); in media_pipeline_walk_pop()
555 walk->stack.top--; in media_pipeline_walk_pop()
561 dev_dbg(walk->mdev->dev, in media_pipeline_walk_pop()
563 walk->stack.top); in media_pipeline_walk_pop()
628 dev_dbg(walk->mdev->dev, in media_pipeline_explore_next_link()
732 walk.mdev = pipe->mdev; in media_pipeline_populate()
[all …]
/linux/mm/damon/
H A Dvaddr.c308 ptl = pmd_lock(walk->mm, pmd); in damon_mkold_pmd_entry()
326 walk->action = ACTION_AGAIN; in damon_mkold_pmd_entry()
331 damon_ptep_mkold(pte, walk->vma, addr); in damon_mkold_pmd_entry()
369 struct mm_walk *walk) in damon_mkold_hugetlb_entry() argument
375 ptl = huge_pte_lock(h, walk->mm, pte); in damon_mkold_hugetlb_entry()
380 damon_hugetlb_mkold(pte, walk->mm, walk->vma, addr); in damon_mkold_hugetlb_entry()
450 ptl = pmd_lock(walk->mm, pmd); in damon_young_pmd_entry()
466 mmu_notifier_test_young(walk->mm, in damon_young_pmd_entry()
481 walk->action = ACTION_AGAIN; in damon_young_pmd_entry()
503 struct mm_walk *walk) in damon_young_hugetlb_entry() argument
[all …]
/linux/arch/powerpc/crypto/
H A Daes_xts.c82 struct skcipher_walk walk; in p8_aes_xts_crypt() local
99 ret = skcipher_walk_virt(&walk, req, false); in p8_aes_xts_crypt()
107 aes_p8_encrypt(walk.iv, tweak, &ctx->tweak_key); in p8_aes_xts_crypt()
113 while ((nbytes = walk.nbytes) != 0) { in p8_aes_xts_crypt()
118 aes_p8_xts_encrypt(walk.src.virt.addr, in p8_aes_xts_crypt()
119 walk.dst.virt.addr, in p8_aes_xts_crypt()
123 aes_p8_xts_decrypt(walk.src.virt.addr, in p8_aes_xts_crypt()
124 walk.dst.virt.addr, in p8_aes_xts_crypt()
131 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in p8_aes_xts_crypt()
H A Daes_cbc.c75 struct skcipher_walk walk; in p8_aes_cbc_crypt() local
88 ret = skcipher_walk_virt(&walk, req, false); in p8_aes_cbc_crypt()
89 while ((nbytes = walk.nbytes) != 0) { in p8_aes_cbc_crypt()
93 aes_p8_cbc_encrypt(walk.src.virt.addr, in p8_aes_cbc_crypt()
94 walk.dst.virt.addr, in p8_aes_cbc_crypt()
97 walk.iv, enc); in p8_aes_cbc_crypt()
102 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in p8_aes_cbc_crypt()
H A Daes-gcm-p10-glue.c210 struct skcipher_walk walk; in p10_aes_gcm_crypt() local
254 ret = skcipher_walk_aead_encrypt(&walk, req, false); in p10_aes_gcm_crypt()
260 while (walk.nbytes > 0 && ret == 0) { in p10_aes_gcm_crypt()
264 aes_p10_gcm_encrypt(walk.src.virt.addr, in p10_aes_gcm_crypt()
265 walk.dst.virt.addr, in p10_aes_gcm_crypt()
266 walk.nbytes, in p10_aes_gcm_crypt()
269 aes_p10_gcm_decrypt(walk.src.virt.addr, in p10_aes_gcm_crypt()
270 walk.dst.virt.addr, in p10_aes_gcm_crypt()
271 walk.nbytes, in p10_aes_gcm_crypt()
275 total_processed += walk.nbytes; in p10_aes_gcm_crypt()
[all …]
/linux/arch/arm64/crypto/
H A Dchacha-neon-glue.c107 struct skcipher_walk walk; in chacha_neon_stream_xor() local
111 err = skcipher_walk_virt(&walk, req, false); in chacha_neon_stream_xor()
115 while (walk.nbytes > 0) { in chacha_neon_stream_xor()
116 unsigned int nbytes = walk.nbytes; in chacha_neon_stream_xor()
118 if (nbytes < walk.total) in chacha_neon_stream_xor()
119 nbytes = rounddown(nbytes, walk.stride); in chacha_neon_stream_xor()
123 chacha_crypt_generic(state, walk.dst.virt.addr, in chacha_neon_stream_xor()
124 walk.src.virt.addr, nbytes, in chacha_neon_stream_xor()
128 chacha_doneon(state, walk.dst.virt.addr, in chacha_neon_stream_xor()
129 walk.src.virt.addr, nbytes, ctx->nrounds); in chacha_neon_stream_xor()
[all …]
/linux/Documentation/filesystems/
H A Dpath-lookup.txt209 RCU-walk path walking design
212 Path walking code now has two distinct modes, ref-walk and rcu-walk. ref-walk
223 path walk.
225 Where ref-walk uses a stable, refcounted ``parent'' to walk the remaining
275 "dropping rcu" or dropping from rcu-walk into ref-walk mode.
280 the path walk must be fully restarted (which we do in ref-walk mode, to avoid
285 requires ref-walk, then instead of restarting the walk, we attempt to drop rcu
291 * LOOKUP_RCU is set in nd->flags, which distinguishes rcu-walk from ref-walk.
306 during the path walk.
360 Dropping from rcu-walk to ref-walk mean that we have encountered a dentry where
[all …]
H A Dpath-lookup.rst30 will allow us to review "REF-walk" and "RCU-walk" separately. But we
606 REF-walk. The difficulty with RCU-walk comes from a different
632 using REF-walk.
640 REF-walk.
644 that a path walk with REF-walk would have found the same entries.
653 This pattern of "try RCU-walk, if that fails try REF-walk" can be
699 REF-walk increments a reference count or takes a spinlock, RCU-walk
752 REF-walk.
1064 method. This is called both in RCU-walk and REF-walk. In RCU-walk the
1272 path walk drop down to REF-walk. All of this is handled in the
[all …]
/linux/drivers/atm/
H A Dsuni.c58 struct suni_priv *walk; in suni_hz() local
62 for (walk = sunis; walk; walk = walk->next) { in suni_hz()
63 dev = walk->dev; in suni_hz()
64 stats = &walk->sonet_stats; in suni_hz()
342 struct suni_priv **walk; in suni_stop() local
347 for (walk = &sunis; *walk != PRIV(dev); in suni_stop()
348 walk = &PRIV((*walk)->dev)->next); in suni_stop()
349 *walk = PRIV((*walk)->dev)->next; in suni_stop()
/linux/fs/proc/
H A Dtask_mmu.c520 if (walk->ops->pte_hole) { in smaps_pte_hole_lookup()
528 struct mm_walk *walk) in smaps_pte_entry() argument
575 struct mm_walk *walk) in smaps_pmd_entry() argument
611 struct mm_walk *walk) in smaps_pmd_entry() argument
617 struct mm_walk *walk) in smaps_pte_range() argument
632 walk->action = ACTION_AGAIN; in smaps_pte_range()
729 struct mm_walk *walk) in smaps_hugetlb_range() argument
1228 struct mm_walk *walk) in clear_refs_test_walk() argument
1574 struct mm_walk *walk) in pagemap_hugetlb_range() argument
2002 struct mm_walk *walk) in pagemap_scan_test_walk() argument
[all …]
/linux/arch/arm/crypto/
H A Dchacha-glue.c113 struct skcipher_walk walk; in chacha_stream_xor() local
117 err = skcipher_walk_virt(&walk, req, false); in chacha_stream_xor()
121 while (walk.nbytes > 0) { in chacha_stream_xor()
122 unsigned int nbytes = walk.nbytes; in chacha_stream_xor()
124 if (nbytes < walk.total) in chacha_stream_xor()
125 nbytes = round_down(nbytes, walk.stride); in chacha_stream_xor()
128 chacha_doarm(walk.dst.virt.addr, walk.src.virt.addr, in chacha_stream_xor()
133 chacha_doneon(state, walk.dst.virt.addr, in chacha_stream_xor()
134 walk.src.virt.addr, nbytes, ctx->nrounds); in chacha_stream_xor()
137 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in chacha_stream_xor()
/linux/include/crypto/internal/
H A Dskcipher.h208 int skcipher_walk_done(struct skcipher_walk *walk, int err);
209 int skcipher_walk_virt(struct skcipher_walk *walk,
212 int skcipher_walk_async(struct skcipher_walk *walk,
214 int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
216 int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
218 void skcipher_walk_complete(struct skcipher_walk *walk, int err);
220 static inline void skcipher_walk_abort(struct skcipher_walk *walk) in skcipher_walk_abort() argument
222 skcipher_walk_done(walk, -ECANCELED); in skcipher_walk_abort()
H A Dhash.h60 int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err);
62 struct crypto_hash_walk *walk);
64 static inline int crypto_hash_walk_last(struct crypto_hash_walk *walk) in crypto_hash_walk_last() argument
66 return !(walk->entrylen | walk->total); in crypto_hash_walk_last()
/linux/security/
H A Ddevice_cgroup.c111 if (walk->type != ex->type) in dev_exception_add()
113 if (walk->major != ex->major) in dev_exception_add()
115 if (walk->minor != ex->minor) in dev_exception_add()
118 walk->access |= ex->access; in dev_exception_add()
139 if (walk->type != ex->type) in dev_exception_rm()
141 if (walk->major != ex->major) in dev_exception_rm()
143 if (walk->minor != ex->minor) in dev_exception_rm()
146 walk->access &= ~ex->access; in dev_exception_rm()
147 if (!walk->access) { in dev_exception_rm()
148 list_del_rcu(&walk->list); in dev_exception_rm()
[all …]
/linux/arch/riscv/mm/
H A Dpageattr.c18 static unsigned long set_pageattr_masks(unsigned long val, struct mm_walk *walk) in set_pageattr_masks() argument
20 struct pageattr_masks *masks = walk->private; in set_pageattr_masks()
30 unsigned long next, struct mm_walk *walk) in pageattr_p4d_entry() argument
35 val = __p4d(set_pageattr_masks(p4d_val(val), walk)); in pageattr_p4d_entry()
43 unsigned long next, struct mm_walk *walk) in pageattr_pud_entry() argument
48 val = __pud(set_pageattr_masks(pud_val(val), walk)); in pageattr_pud_entry()
56 unsigned long next, struct mm_walk *walk) in pageattr_pmd_entry() argument
61 val = __pmd(set_pageattr_masks(pmd_val(val), walk)); in pageattr_pmd_entry()
69 unsigned long next, struct mm_walk *walk) in pageattr_pte_entry() argument
73 val = __pte(set_pageattr_masks(pte_val(val), walk)); in pageattr_pte_entry()
[all …]
/linux/fs/
H A Dselect.c908 for (walk = list; walk != NULL; walk = walk->next) { in do_poll()
911 pfd = walk->entries; in do_poll()
993 walk->next = NULL; in do_sys_poll()
994 walk->len = len; in do_sys_poll()
1004 todo -= walk->len; in do_sys_poll()
1007 walk = walk->next = kmalloc(struct_size(walk, entries, len), in do_sys_poll()
1009 if (!walk) { in do_sys_poll()
1022 for (walk = head; walk; walk = walk->next) { in do_sys_poll()
1033 walk = head->next; in do_sys_poll()
1034 while (walk) { in do_sys_poll()
[all …]
/linux/mm/
H A Dvmscan.c3179 walk->batched++; in update_batch_size()
3191 walk->batched = 0; in reset_batch_size()
3682 return walk; in set_mm_walk()
3689 VM_WARN_ON_ONCE(walk && memchr_inv(walk->nr_pages, 0, sizeof(walk->nr_pages))); in clear_mm_walk()
3690 VM_WARN_ON_ONCE(walk && memchr_inv(walk->mm_stats, 0, sizeof(walk->mm_stats))); in clear_mm_walk()
3695 kfree(walk); in clear_mm_walk()
3875 if (!walk) { in try_to_inc_max_seq()
3881 walk->seq = seq; in try_to_inc_max_seq()
3888 walk_mm(mm, walk); in try_to_inc_max_seq()
4083 if (walk) { in lru_gen_look_around()
[all …]
H A Dmigrate_device.c22 struct mm_walk *walk) in migrate_vma_collect_skip() argument
24 struct migrate_vma *migrate = walk->private; in migrate_vma_collect_skip()
38 struct mm_walk *walk) in migrate_vma_collect_hole() argument
40 struct migrate_vma *migrate = walk->private; in migrate_vma_collect_hole()
44 if (!vma_is_anonymous(walk->vma)) in migrate_vma_collect_hole()
60 struct mm_walk *walk) in migrate_vma_collect_pmd() argument
62 struct migrate_vma *migrate = walk->private; in migrate_vma_collect_pmd()
63 struct vm_area_struct *vma = walk->vma; in migrate_vma_collect_pmd()
93 walk); in migrate_vma_collect_pmd()
99 walk); in migrate_vma_collect_pmd()
[all …]
/linux/drivers/crypto/axis/
H A Dartpec6_crypto.c808 bbuf->sg = walk->sg; in setup_bounce_buffer_in()
831 while (walk->sg && count) { in artpec6_crypto_setup_sg_descrs_in()
858 sg_page(walk->sg), in artpec6_crypto_setup_sg_descrs_in()
859 walk->sg->offset + in artpec6_crypto_setup_sg_descrs_in()
860 walk->offset, in artpec6_crypto_setup_sg_descrs_in()
894 while (walk->sg && count) { in artpec6_crypto_setup_sg_descrs_out()
906 walk->offset); in artpec6_crypto_setup_sg_descrs_out()
915 sg_page(walk->sg), in artpec6_crypto_setup_sg_descrs_out()
916 walk->sg->offset + in artpec6_crypto_setup_sg_descrs_out()
917 walk->offset, in artpec6_crypto_setup_sg_descrs_out()
[all …]
/linux/fs/fat/
H A Dnamei_msdos.c28 unsigned char *walk; in msdos_format_name() local
45 for (walk = res; len && walk - res < 8; walk++) { in msdos_format_name()
64 if ((res == walk) && (c == 0xE5)) in msdos_format_name()
69 *walk = (!opts->nocase && c >= 'a' && c <= 'z') ? c - 32 : c; in msdos_format_name()
82 while (walk - res < 8) in msdos_format_name()
83 *walk++ = ' '; in msdos_format_name()
84 while (len > 0 && walk - res < MSDOS_NAME) { in msdos_format_name()
103 *walk++ = c - 32; in msdos_format_name()
105 *walk++ = c; in msdos_format_name()
112 while (walk - res < MSDOS_NAME) in msdos_format_name()
[all …]

123456789