Home
last modified time | relevance | path

Searched refs:bufs (Results 1 – 25 of 125) sorted by relevance

12345

/linux/sound/core/oss/
H A Dio.c50 void **bufs = (void**)plugin->extra_data; in io_playback_transfer() local
51 if (snd_BUG_ON(!bufs)) in io_playback_transfer()
55 bufs[channel] = src_channels[channel].area.addr; in io_playback_transfer()
57 bufs[channel] = NULL; in io_playback_transfer()
59 return pcm_writev(plugin->plug, bufs, frames); in io_playback_transfer()
76 void **bufs = (void**)plugin->extra_data; in io_capture_transfer() local
77 if (snd_BUG_ON(!bufs)) in io_capture_transfer()
81 bufs[channel] = dst_channels[channel].area.addr; in io_capture_transfer()
83 bufs[channel] = NULL; in io_capture_transfer()
85 return pcm_readv(plugin->plug, bufs, frames); in io_capture_transfer()
/linux/arch/riscv/kernel/
H A Dunaligned_access_speed.c221 struct page **bufs = kcalloc(cpu_count, sizeof(*bufs), GFP_KERNEL); in check_unaligned_access_speed_all_cpus() local
223 if (!bufs) { in check_unaligned_access_speed_all_cpus()
233 bufs[cpu] = alloc_pages(GFP_KERNEL, MISALIGNED_BUFFER_ORDER); in check_unaligned_access_speed_all_cpus()
234 if (!bufs[cpu]) { in check_unaligned_access_speed_all_cpus()
241 on_each_cpu(check_unaligned_access_nonboot_cpu, bufs, 1); in check_unaligned_access_speed_all_cpus()
244 smp_call_on_cpu(0, check_unaligned_access, bufs[0], true); in check_unaligned_access_speed_all_cpus()
255 if (bufs[cpu]) in check_unaligned_access_speed_all_cpus()
256 __free_pages(bufs[cpu], MISALIGNED_BUFFER_ORDER); in check_unaligned_access_speed_all_cpus()
259 kfree(bufs); in check_unaligned_access_speed_all_cpus()
/linux/drivers/net/ethernet/cisco/enic/
H A Dvnic_wq.c25 wq->bufs[i] = kzalloc(VNIC_WQ_BUF_BLK_SZ(count), GFP_KERNEL); in vnic_wq_alloc_bufs()
26 if (!wq->bufs[i]) in vnic_wq_alloc_bufs()
31 buf = wq->bufs[i]; in vnic_wq_alloc_bufs()
37 buf->next = wq->bufs[0]; in vnic_wq_alloc_bufs()
41 buf->next = wq->bufs[i + 1]; in vnic_wq_alloc_bufs()
51 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_alloc_bufs()
66 if (wq->bufs[i]) { in vnic_wq_free()
67 kfree(wq->bufs[i]); in vnic_wq_free()
68 wq->bufs[i] = NULL; in vnic_wq_free()
140 &wq->bufs[fetch_index / VNIC_WQ_BUF_BLK_ENTRIES(count)] in enic_wq_init_start()
[all …]
H A Dvnic_rq.c25 rq->bufs[i] = kzalloc(VNIC_RQ_BUF_BLK_SZ(count), GFP_KERNEL); in vnic_rq_alloc_bufs()
26 if (!rq->bufs[i]) in vnic_rq_alloc_bufs()
31 buf = rq->bufs[i]; in vnic_rq_alloc_bufs()
37 buf->next = rq->bufs[0]; in vnic_rq_alloc_bufs()
40 buf->next = rq->bufs[i + 1]; in vnic_rq_alloc_bufs()
48 rq->to_use = rq->to_clean = rq->bufs[0]; in vnic_rq_alloc_bufs()
63 if (rq->bufs[i]) { in vnic_rq_free()
64 kfree(rq->bufs[i]); in vnic_rq_free()
65 rq->bufs[i] = NULL; in vnic_rq_free()
121 &rq->bufs[fetch_index / VNIC_RQ_BUF_BLK_ENTRIES(count)] in vnic_rq_init_start()
[all …]
/linux/drivers/scsi/fnic/
H A Dvnic_rq.c22 rq->bufs[i] = kzalloc(VNIC_RQ_BUF_BLK_SZ, GFP_ATOMIC); in vnic_rq_alloc_bufs()
23 if (!rq->bufs[i]) { in vnic_rq_alloc_bufs()
30 buf = rq->bufs[i]; in vnic_rq_alloc_bufs()
36 buf->next = rq->bufs[0]; in vnic_rq_alloc_bufs()
39 buf->next = rq->bufs[i + 1]; in vnic_rq_alloc_bufs()
47 rq->to_use = rq->to_clean = rq->bufs[0]; in vnic_rq_alloc_bufs()
63 kfree(rq->bufs[i]); in vnic_rq_free()
64 rq->bufs[i] = NULL; in vnic_rq_free()
118 &rq->bufs[fetch_index / VNIC_RQ_BUF_BLK_ENTRIES] in vnic_rq_init()
174 &rq->bufs[fetch_index / VNIC_RQ_BUF_BLK_ENTRIES] in vnic_rq_clean()
H A Dvnic_wq.c42 wq->bufs[i] = kzalloc(VNIC_WQ_BUF_BLK_SZ, GFP_ATOMIC); in vnic_wq_alloc_bufs()
43 if (!wq->bufs[i]) { in vnic_wq_alloc_bufs()
50 buf = wq->bufs[i]; in vnic_wq_alloc_bufs()
56 buf->next = wq->bufs[0]; in vnic_wq_alloc_bufs()
59 buf->next = wq->bufs[i + 1]; in vnic_wq_alloc_bufs()
67 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_alloc_bufs()
82 kfree(wq->bufs[i]); in vnic_wq_free()
83 wq->bufs[i] = NULL; in vnic_wq_free()
160 &wq->bufs[fetch_index / VNIC_WQ_BUF_BLK_ENTRIES] in vnic_wq_init_start()
227 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_clean()
/linux/drivers/scsi/snic/
H A Dvnic_wq.c36 wq->bufs[i] = kzalloc(VNIC_WQ_BUF_BLK_SZ, GFP_ATOMIC); in vnic_wq_alloc_bufs()
37 if (!wq->bufs[i]) { in vnic_wq_alloc_bufs()
45 buf = wq->bufs[i]; in vnic_wq_alloc_bufs()
51 buf->next = wq->bufs[0]; in vnic_wq_alloc_bufs()
54 buf->next = wq->bufs[i + 1]; in vnic_wq_alloc_bufs()
62 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_alloc_bufs()
77 kfree(wq->bufs[i]); in svnic_wq_free()
78 wq->bufs[i] = NULL; in svnic_wq_free()
159 &wq->bufs[fetch_index / VNIC_WQ_BUF_BLK_ENTRIES(count)] in vnic_wq_init_start()
216 wq->to_use = wq->to_clean = wq->bufs[0]; in svnic_wq_clean()
/linux/tools/virtio/ringtest/
H A Dmain.c111 int bufs = runcycles; in run_guest() local
123 if (started < bufs && in run_guest()
141 if (__builtin_expect(completed == bufs, false)) in run_guest()
148 assert(completed <= bufs); in run_guest()
149 assert(started <= bufs); in run_guest()
170 int bufs = runcycles; in run_host() local
188 if (__builtin_expect(completed == bufs, false)) in run_host()
193 assert(completed <= bufs); in run_host()
194 if (completed == bufs) in run_host()
/linux/drivers/scsi/arm/
H A Darm_scsi.h35 int bufs = SCp->buffers_residual; in copy_SCp_to_sg() local
40 BUG_ON(bufs + 1 > max); in copy_SCp_to_sg()
44 if (bufs) { in copy_SCp_to_sg()
48 for_each_sg(sg_next(SCp->buffer), src_sg, bufs, i) in copy_SCp_to_sg()
53 return bufs + 1; in copy_SCp_to_sg()
/linux/drivers/net/ethernet/marvell/prestera/
H A Dprestera_rxtx.c81 struct prestera_sdma_buf *bufs; member
86 struct prestera_sdma_buf *bufs; member
297 buf = &ring->bufs[buf_idx]; in prestera_sdma_rx_poll()
345 if (!ring->bufs) in prestera_sdma_rx_fini()
383 if (!ring->bufs) in prestera_sdma_rx_init()
388 tail = &ring->bufs[bnum - 1]; in prestera_sdma_rx_init()
389 head = &ring->bufs[0]; in prestera_sdma_rx_init()
533 if (!tx_ring->bufs) in prestera_sdma_tx_init()
536 tail = &tx_ring->bufs[bnum - 1]; in prestera_sdma_tx_init()
537 head = &tx_ring->bufs[0]; in prestera_sdma_tx_init()
[all …]
/linux/tools/virtio/
H A Dvhost_net_test.c276 bool delayed, int bufs) in run_tx_test() argument
289 while (vq->started < bufs && in run_tx_test()
306 if (vq->started >= bufs) in run_tx_test()
325 assert(vq->completed <= bufs); in run_tx_test()
326 assert(vq->started <= bufs); in run_tx_test()
327 if (vq->completed == bufs) in run_tx_test()
343 bool delayed, int bufs) in run_rx_test() argument
355 while (vq->started < bufs && in run_rx_test()
375 if (vq->started >= bufs) in run_rx_test()
401 assert(vq->started <= bufs); in run_rx_test()
[all …]
H A Dvirtio_test.c170 bool delayed, int batch, int reset_n, int bufs) in run_test() argument
195 while (started < bufs && in run_test()
218 if (started >= bufs) in run_test()
258 assert(completed <= bufs); in run_test()
259 assert(started <= bufs); in run_test()
260 if (completed == bufs) in run_test()
/linux/drivers/soc/fsl/qbman/
H A Dbman.c113 struct bm_buffer bufs[8]; member
151 struct bm_buffer bufs[8]; member
738 int bman_release(struct bman_pool *pool, const struct bm_buffer *bufs, u8 num) in bman_release() argument
772 bm_buffer_set64(r->bufs, bm_buffer_get64(bufs)); in bman_release()
773 bm_buffer_set_bpid(r->bufs, pool->bpid); in bman_release()
775 memcpy(&r->bufs[1], &bufs[1], i * sizeof(bufs[0])); in bman_release()
786 int bman_acquire(struct bman_pool *pool, struct bm_buffer *bufs, u8 num) in bman_acquire() argument
805 if (bufs) in bman_acquire()
806 memcpy(&bufs[0], &mcr->bufs[0], num * sizeof(bufs[0])); in bman_acquire()
/linux/fs/
H A Dpipe.c818 if (pipe->bufs) { in alloc_pipe_info()
861 kfree(pipe->bufs); in free_pipe_info()
1267 struct pipe_buffer *bufs; in pipe_resize_ring() local
1270 bufs = kcalloc(nr_slots, sizeof(*bufs), in pipe_resize_ring()
1272 if (unlikely(!bufs)) in pipe_resize_ring()
1283 kfree(bufs); in pipe_resize_ring()
1295 memcpy(bufs, pipe->bufs + t, in pipe_resize_ring()
1300 memcpy(bufs + tsize, pipe->bufs, in pipe_resize_ring()
1302 memcpy(bufs, pipe->bufs + t, in pipe_resize_ring()
1310 kfree(pipe->bufs); in pipe_resize_ring()
[all …]
/linux/tools/testing/selftests/powerpc/papr_vpd/
H A Dpapr_vpd.c198 char *bufs[2]; in papr_vpd_reread() local
200 for (size_t i = 0; i < ARRAY_SIZE(bufs); ++i) { in papr_vpd_reread()
201 bufs[i] = malloc(size); in papr_vpd_reread()
202 FAIL_IF(!bufs[i]); in papr_vpd_reread()
203 ssize_t consumed = pread(fd, bufs[i], size, 0); in papr_vpd_reread()
207 FAIL_IF(memcmp(bufs[0], bufs[1], size)); in papr_vpd_reread()
/linux/drivers/net/ethernet/fungible/funeth/
H A Dfuneth_rx.c55 c->bufs[c->prod_cnt & c->mask] = *buf; in cache_offer()
75 buf = &c->bufs[c->cons_cnt & c->mask]; in cache_get()
538 struct funeth_rxbuf *b = q->bufs; in fun_rxq_free_bufs()
551 struct funeth_rxbuf *b = q->bufs; in fun_rxq_alloc_bufs()
561 q->cur_buf = q->bufs; in fun_rxq_alloc_bufs()
570 c->bufs = kvzalloc_node(depth * sizeof(*c->bufs), GFP_KERNEL, node); in fun_rxq_init_cache()
571 return c->bufs ? 0 : -ENOMEM; in fun_rxq_init_cache()
583 kvfree(q->cache.bufs); in fun_rxq_free_cache()
584 q->cache.bufs = NULL; in fun_rxq_free_cache()
669 q->rq_dma_addr, q->bufs); in fun_rxq_create_sw()
[all …]
/linux/drivers/net/ethernet/mellanox/mlx5/core/fpga/
H A Dconn.c122 conn->qp.rq.bufs[ix] = buf; in mlx5_fpga_conn_post_recv()
171 conn->qp.sq.bufs[ix] = buf; in mlx5_fpga_conn_post_send()
257 buf = conn->qp.rq.bufs[ix]; in mlx5_fpga_conn_rq_cqe()
258 conn->qp.rq.bufs[ix] = NULL; in mlx5_fpga_conn_rq_cqe()
300 buf = conn->qp.sq.bufs[ix]; in mlx5_fpga_conn_sq_cqe()
539 if (!conn->qp.rq.bufs) { in mlx5_fpga_conn_create_qp()
547 if (!conn->qp.sq.bufs) { in mlx5_fpga_conn_create_qp()
595 kvfree(conn->qp.sq.bufs); in mlx5_fpga_conn_create_qp()
597 kvfree(conn->qp.rq.bufs); in mlx5_fpga_conn_create_qp()
652 kvfree(conn->qp.sq.bufs); in mlx5_fpga_conn_destroy_qp()
[all …]
H A Dconn.h74 struct mlx5_fpga_dma_buf **bufs; member
81 struct mlx5_fpga_dma_buf **bufs; member
/linux/arch/mips/include/asm/octeon/
H A Dcvmx-wqe.h62 uint64_t bufs:8; member
193 uint64_t bufs:8;
198 uint64_t bufs:8; member
240 uint64_t bufs:8;
268 uint64_t bufs:8; member
399 uint64_t bufs:8;
/linux/drivers/infiniband/hw/hns/
H A Dhns_roce_alloc.c132 int hns_roce_get_kmem_bufs(struct hns_roce_dev *hr_dev, dma_addr_t *bufs, in hns_roce_get_kmem_bufs() argument
149 bufs[total++] = hns_roce_buf_dma_addr(buf, offset); in hns_roce_get_kmem_bufs()
156 int hns_roce_get_umem_bufs(dma_addr_t *bufs, int buf_cnt, struct ib_umem *umem, in hns_roce_get_umem_bufs() argument
164 bufs[total++] = rdma_block_iter_dma_address(&biter); in hns_roce_get_umem_bufs()
/linux/drivers/md/
H A Ddm-verity-fec.c109 return &fio->bufs[i][j * v->fec->rsn]; in fec_buffer_rs_block()
317 if (fio->bufs[n]) in fec_alloc_bufs()
320 fio->bufs[n] = mempool_alloc(&v->fec->prealloc_pool, GFP_NOWAIT); in fec_alloc_bufs()
321 if (unlikely(!fio->bufs[n])) { in fec_alloc_bufs()
329 if (fio->bufs[n]) in fec_alloc_bufs()
332 fio->bufs[n] = mempool_alloc(&v->fec->extra_pool, GFP_NOWAIT); in fec_alloc_bufs()
334 if (unlikely(!fio->bufs[n])) in fec_alloc_bufs()
354 memset(fio->bufs[n], 0, v->fec->rsn << DM_VERITY_FEC_BUF_RS_BITS); in fec_init_bufs()
501 mempool_free(fio->bufs[n], &f->prealloc_pool); in verity_fec_finish_io()
504 mempool_free(fio->bufs[n], &f->extra_pool); in verity_fec_finish_io()
[all …]
/linux/fs/jbd2/
H A Dcommit.c355 int bufs; in jbd2_journal_commit_transaction() local
574 bufs = 0; in jbd2_journal_commit_transaction()
606 J_ASSERT (bufs == 0); in jbd2_journal_commit_transaction()
627 wbuf[bufs++] = descriptor; in jbd2_journal_commit_transaction()
664 jh, &wbuf[bufs], blocknr); in jbd2_journal_commit_transaction()
669 jbd2_file_log_bh(&io_bufs, wbuf[bufs]); in jbd2_journal_commit_transaction()
687 bufs++; in jbd2_journal_commit_transaction()
699 if (bufs == journal->j_wbufsize || in jbd2_journal_commit_transaction()
703 jbd2_debug(4, "JBD2: Submit %d IOs\n", bufs); in jbd2_journal_commit_transaction()
715 for (i = 0; i < bufs; i++) { in jbd2_journal_commit_transaction()
[all …]
/linux/include/linux/
H A Dpipe_fs_i.h78 struct pipe_buffer *bufs; member
183 return &pipe->bufs[slot & (pipe->ring_size - 1)]; in pipe_buf()
254 pipe_buf_release(pipe, &pipe->bufs[--pipe->head & mask]); in pipe_discard_from()
/linux/include/soc/fsl/
H A Dbman.h114 int bman_release(struct bman_pool *pool, const struct bm_buffer *bufs, u8 num);
127 int bman_acquire(struct bman_pool *pool, struct bm_buffer *bufs, u8 num);
/linux/drivers/staging/rtl8723bs/os_dep/
H A Dosdep_service.c192 cbuf->bufs[cbuf->write] = buf; in rtw_cbuf_push()
211 buf = cbuf->bufs[cbuf->read]; in rtw_cbuf_pop()
227 cbuf = rtw_malloc(struct_size(cbuf, bufs, size)); in rtw_cbuf_alloc()

12345