/linux/drivers/net/ethernet/huawei/hinic/ |
H A D | hinic_hw_qp.c | 635 void hinic_sq_write_db(struct hinic_sq *sq, u16 prod_idx, unsigned int wqe_size, in hinic_sq_write_db() 658 unsigned int wqe_size, u16 *prod_idx) in hinic_sq_get_wqe() 674 void hinic_sq_return_wqe(struct hinic_sq *sq, unsigned int wqe_size) in hinic_sq_return_wqe() 689 struct sk_buff *skb, unsigned int wqe_size) in hinic_sq_write_wqe() 713 unsigned int *wqe_size, u16 *cons_idx) in hinic_sq_read_wqebb() 751 unsigned int wqe_size, u16 *cons_idx) in hinic_sq_read_wqe() 766 void hinic_sq_put_wqe(struct hinic_sq *sq, unsigned int wqe_size) in hinic_sq_put_wqe() 797 unsigned int wqe_size, u16 *prod_idx) in hinic_rq_get_wqe() 838 unsigned int wqe_size, in hinic_rq_read_wqe() 873 unsigned int wqe_size, in hinic_rq_read_next_wqe() [all …]
|
H A D | hinic_hw_wq.c | 740 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_get_wqe() 789 void hinic_return_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_return_wqe() 803 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_put_wqe() 821 struct hinic_hw_wqe *hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_read_wqe() 889 unsigned int wqe_size) in hinic_write_wqe()
|
H A D | hinic_tx.c | 498 unsigned int wqe_size; in hinic_lb_xmit_frame() local 559 unsigned int wqe_size; in hinic_xmit_frame() local 670 unsigned int wqe_size; in free_all_tx_skbs() local 706 unsigned int wqe_size; in free_tx_poll() local
|
H A D | hinic_hw_cmdq.c | 155 unsigned int wqe_size = 0; in cmdq_wqe_size_from_bdlen() local 532 unsigned int bufdesc_len, wqe_size; in clear_wqe_complete_bit() local
|
/linux/drivers/infiniband/sw/rxe/ |
H A D | rxe_srq.c | 49 int wqe_size; in rxe_srq_from_init() local 157 int wqe_size; in rxe_srq_from_attr() local
|
H A D | rxe_qp.c | 191 int wqe_size; in rxe_init_sq() local 283 int wqe_size; in rxe_init_rq() local
|
/linux/drivers/net/ethernet/mellanox/mlx5/core/en/ |
H A D | txrx.h | 49 #define MLX5E_KSM_MAX_ENTRIES_PER_WQE(wqe_size)\ argument 52 #define MLX5E_KSM_ENTRIES_PER_WQE(wqe_size)\ argument 130 static inline void *mlx5e_fetch_wqe(struct mlx5_wq_cyc *wq, u16 pi, size_t wqe_size) in mlx5e_fetch_wqe() 521 static inline u16 mlx5e_stop_room_for_wqe(struct mlx5_core_dev *mdev, u16 wqe_size) in mlx5e_stop_room_for_wqe() 552 static inline bool mlx5e_icosq_can_post_wqe(struct mlx5e_icosq *sq, u16 wqe_size) in mlx5e_icosq_can_post_wqe()
|
H A D | params.c | 835 int wqe_size = BIT(log_stride_sz) * num_strides; in mlx5e_shampo_get_log_cq_size() local 1043 int wqe_size = BIT(log_stride_sz) * num_strides; in mlx5e_shampo_hd_per_wqe() local
|
/linux/drivers/infiniband/hw/qedr/ |
H A D | qedr_hsi_rdma.h | 310 u8 wqe_size; member 338 u8 wqe_size; member 374 u8 wqe_size; member 420 u8 wqe_size; member 475 u8 wqe_size; member 498 u8 wqe_size; member 548 u8 wqe_size; member 602 u8 wqe_size; member 628 u8 wqe_size; member 663 u8 wqe_size; member [all …]
|
H A D | qedr.h | 434 u8 wqe_size; member 445 u8 wqe_size; member
|
H A D | verbs.c | 3291 struct qedr_qp *qp, u8 *wqe_size, in qedr_prepare_sq_inline_data() 3376 static u32 qedr_prepare_sq_sges(struct qedr_qp *qp, u8 *wqe_size, in qedr_prepare_sq_sges()
|
/linux/drivers/infiniband/hw/ocrdma/ |
H A D | ocrdma_verbs.c | 1922 const struct ib_send_wr *wr, u32 wqe_size) in ocrdma_build_inline_sges() 1963 u32 wqe_size = sizeof(*hdr); in ocrdma_build_send() local 1982 u32 wqe_size = sizeof(*hdr) + sizeof(*ext_rw); in ocrdma_build_write() local 1999 u32 wqe_size = ((wr->num_sge + 1) * sizeof(struct ocrdma_sge)) + in ocrdma_build_read() local 2032 u32 wqe_size = sizeof(*fast_reg) + sizeof(*hdr); in ocrdma_build_reg() local 2201 u32 wqe_size = 0; in ocrdma_build_rqe() local
|
/linux/drivers/infiniband/hw/erdma/ |
H A D | erdma_qp.c | 288 u32 wqe_size, wqebb_cnt, hw_op, flags, sgl_offset; in erdma_push_one_sqe() local
|
/linux/drivers/infiniband/hw/vmw_pvrdma/ |
H A D | pvrdma.h | 155 int wqe_size; member 170 int wqe_size; member
|
/linux/include/uapi/rdma/ |
H A D | ocrdma-abi.h | 55 __u32 wqe_size; member
|
H A D | ib_user_verbs.h | 842 __u32 wqe_size; member 861 __u32 wqe_size; member 874 __u32 wqe_size; member
|
/linux/drivers/infiniband/hw/bnxt_re/ |
H A D | qplib_fp.h | 95 u16 wqe_size; member 256 u16 wqe_size; member 615 static inline u32 bnxt_qplib_set_rq_max_slot(u32 wqe_size) in bnxt_qplib_set_rq_max_slot()
|
/linux/drivers/infiniband/hw/mlx5/ |
H A D | qp.c | 274 size_t wqe_size = 1 << wq->wqe_shift; in mlx5_ib_read_wqe_rq() local 306 size_t wqe_size = 1 << srq->msrq.wqe_shift; in mlx5_ib_read_wqe_srq() local 436 int wqe_size; in set_rq_size() local 568 static int get_send_sge(struct ib_qp_init_attr *attr, int wqe_size) in get_send_sge() 594 int wqe_size; in calc_sq_size() local
|
H A D | umr.c | 262 unsigned int wqe_size = in mlx5r_umr_post_send() local
|
H A D | odp.c | 1252 int wqe_size = 1 << srq->msrq.wqe_shift; in mlx5_ib_mr_responder_pfault_handler_srq() local 1271 int wqe_size = 1 << wq->wqe_shift; in mlx5_ib_mr_responder_pfault_handler_rq() local
|
H A D | wr.c | 431 int wqe_size; in set_sig_data_segment() local
|
/linux/drivers/net/ethernet/ibm/ehea/ |
H A D | ehea_qmr.c | 358 int nr_pages, int wqe_size, int act_nr_sges, in ehea_qp_alloc_register()
|
/linux/drivers/infiniband/hw/irdma/ |
H A D | uk.c | 1615 int irdma_fragcnt_to_wqesize_rq(u32 frag_cnt, u16 *wqe_size) in irdma_fragcnt_to_wqesize_rq()
|
/linux/drivers/net/ethernet/microsoft/mana/ |
H A D | gdma_main.c | 1100 u32 wqe_size; in mana_gd_post_work_request() local
|
/linux/drivers/infiniband/core/ |
H A D | uverbs_cmd.c | 2190 u32 wqe_size, u32 sge_count) in ib_uverbs_unmarshall_recv()
|