Home
last modified time | relevance | path

Searched refs:blk_mq_rq_to_pdu (Results 1 – 25 of 46) sorted by relevance

12

/linux/block/
H A Dbsg-lib.c49 job = blk_mq_rq_to_pdu(rq); in bsg_transport_sg_io_fn()
207 struct bsg_job *job = blk_mq_rq_to_pdu(rq); in bsg_complete()
234 struct bsg_job *job = blk_mq_rq_to_pdu(req); in bsg_prepare_job()
291 ret = bset->job_fn(blk_mq_rq_to_pdu(req)); in bsg_queue_rq()
304 struct bsg_job *job = blk_mq_rq_to_pdu(req); in bsg_init_rq()
315 struct bsg_job *job = blk_mq_rq_to_pdu(req); in bsg_exit_rq()
/linux/include/scsi/
H A Dscsi_tcq.h39 return blk_mq_rq_to_pdu(req); in scsi_host_find_tag()
/linux/drivers/nvme/host/
H A Dapple.c337 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_iod_list()
345 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_free_prps()
360 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_unmap_data()
397 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_setup_prps()
492 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_setup_prp_simple()
511 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_map_data()
551 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_unmap_rq()
739 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_queue_rq()
788 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_init_request()
879 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_timeout()
H A Drdma.c288 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_exit_request()
298 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_init_request()
1223 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_dma_unmap_req()
1240 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_unmap_data()
1475 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_dma_map_req()
1535 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_map_data()
1711 req = blk_mq_rq_to_pdu(rq); in nvme_rdma_process_nvme_rsp()
1951 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_complete_timed_out()
1960 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_timeout()
2003 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_queue_rq()
[all …]
H A Dtcp.c507 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_exit_request()
517 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_init_request()
598 req = blk_mq_rq_to_pdu(rq); in nvme_tcp_process_nvme_cqe()
714 req = blk_mq_rq_to_pdu(rq); in nvme_tcp_handle_r2t()
812 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_recv_data()
899 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_recv_ddgst()
912 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_recv_ddgst()
2437 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_complete_timed_out()
2446 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_timeout()
2486 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_map_data()
[all …]
H A Dpci.c423 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_pci_init_request()
527 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_free_prps()
542 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_unmap_data()
582 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_pci_setup_prps()
688 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_pci_setup_sgls()
734 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_setup_prp_simple()
755 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_setup_sgl_simple()
772 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_map_data()
828 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_map_metadata()
840 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_prep_rq()
[all …]
H A Dfc.c1836 struct nvme_fc_fcp_op *op = blk_mq_rq_to_pdu(rq); in nvme_fc_exit_request()
2145 struct nvme_fcp_op_w_sgl *op = blk_mq_rq_to_pdu(rq); in nvme_fc_init_request()
2457 struct nvme_fc_fcp_op *op = blk_mq_rq_to_pdu(req); in nvme_fc_terminate_exchange()
2568 struct nvme_fc_fcp_op *op = blk_mq_rq_to_pdu(rq); in nvme_fc_timeout()
2794 struct nvme_fc_fcp_op *op = blk_mq_rq_to_pdu(rq); in nvme_fc_queue_rq()
2851 struct nvme_fc_fcp_op *op = blk_mq_rq_to_pdu(rq); in nvme_fc_complete_rq()
/linux/drivers/nvme/target/
H A Dloop.c76 struct nvme_loop_iod *iod = blk_mq_rq_to_pdu(req); in nvme_loop_complete_rq()
137 struct nvme_loop_iod *iod = blk_mq_rq_to_pdu(req); in nvme_loop_queue_rq()
208 struct nvme_loop_iod *iod = blk_mq_rq_to_pdu(req); in nvme_loop_init_request()
212 return nvme_loop_init_iod(ctrl, blk_mq_rq_to_pdu(req), in nvme_loop_init_request()
/linux/drivers/md/
H A Ddm-rq.c124 return blk_mq_rq_to_pdu(rq); in tio_from_request()
461 struct dm_rq_target_io *tio = blk_mq_rq_to_pdu(rq); in dm_mq_init_request()
481 struct dm_rq_target_io *tio = blk_mq_rq_to_pdu(rq); in dm_mq_queue_rq()
/linux/drivers/scsi/
H A Dscsi_lib.c314 scmd = blk_mq_rq_to_pdu(req); in scsi_execute_cmd()
627 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_end_request()
1214 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(rq); in scsi_initialize_rq()
1243 scsi_mq_uninit_cmd(blk_mq_rq_to_pdu(rq)); in scsi_cleanup_rq()
1266 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_setup_scsi_cmnd()
1513 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(rq); in scsi_complete()
1636 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_prepare_cmd()
1785 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_mq_set_rq_budget_token()
1792 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_mq_get_rq_budget_token()
1902 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(rq); in scsi_mq_init_request()
[all …]
H A Dscsi_debugfs.c56 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(rq); in scsi_show_rq()
H A Dhosts.c595 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(rq); in scsi_host_check_in_flight()
689 struct scsi_cmnd *scmd = blk_mq_rq_to_pdu(rq); in complete_all_cmds_iter()
725 struct scsi_cmnd *sc = blk_mq_rq_to_pdu(req); in __scsi_host_busy_iter_fn()
H A Dscsi_ioctl.c351 struct scsi_cmnd *scmd = blk_mq_rq_to_pdu(rq); in scsi_fill_sghdr_rq()
375 struct scsi_cmnd *scmd = blk_mq_rq_to_pdu(rq); in scsi_complete_sghdr_rq()
445 scmd = blk_mq_rq_to_pdu(rq); in sg_io()
541 scmd = blk_mq_rq_to_pdu(rq); in sg_scsi_ioctl()
H A Dscsi_bsg.c34 scmd = blk_mq_rq_to_pdu(rq); in scsi_bsg_sg_io_fn()
/linux/drivers/mmc/core/
H A Dqueue.h27 return blk_mq_rq_to_pdu(rq); in req_to_mmc_queue_req()
/linux/drivers/block/
H A Dvirtio_blk.c337 struct virtblk_req *vbr = blk_mq_rq_to_pdu(req); in virtblk_request_done()
432 struct virtblk_req *vbr = blk_mq_rq_to_pdu(req); in virtio_queue_rq()
469 struct virtblk_req *vbr = blk_mq_rq_to_pdu(req); in virtblk_prep_rq_batch()
485 struct virtblk_req *vbr = blk_mq_rq_to_pdu(req); in virtblk_add_req_batch()
575 vbr = blk_mq_rq_to_pdu(req); in virtblk_submit_zone_report()
821 vbr = blk_mq_rq_to_pdu(req); in virtblk_get_id()
1199 virtblk_unmap_data(req, blk_mq_rq_to_pdu(req)); in virtblk_complete_batch()
H A Dnbd.c372 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(req); in nbd_complete_rq()
436 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(req); in nbd_xmit_timeout()
803 cmd = blk_mq_rq_to_pdu(req); in nbd_handle_reply()
940 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(req); in nbd_clear_req()
1095 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(bd->rq); in nbd_queue_rq()
1786 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(rq); in nbd_init_request()
H A Dublk_drv.c321 pdu = blk_mq_rq_to_pdu(req); in ublk_report_zones()
368 struct ublk_rq_data *pdu = blk_mq_rq_to_pdu(req); in ublk_setup_iod_zoned()
572 struct ublk_rq_data *data = blk_mq_rq_to_pdu(req); in ublk_init_req_ref()
582 struct ublk_rq_data *data = blk_mq_rq_to_pdu(req); in ublk_get_req_ref()
594 struct ublk_rq_data *data = blk_mq_rq_to_pdu(req); in ublk_put_req_ref()
1177 struct ublk_rq_data *data = blk_mq_rq_to_pdu(rq); in ublk_queue_cmd()
/linux/drivers/mtd/ubi/
H A Dblock.c182 struct ubiblock_pdu *pdu = blk_mq_rq_to_pdu(req); in ubiblock_read()
317 struct ubiblock_pdu *pdu = blk_mq_rq_to_pdu(req); in ubiblock_init_request()
/linux/drivers/block/mtip32xx/
H A Dmtip32xx.c155 return blk_mq_rq_to_pdu(blk_mq_tag_to_rq(dd->tags.tags[0], tag)); in mtip_cmd_from_tag()
999 int_cmd = blk_mq_rq_to_pdu(rq); in mtip_exec_internal_command()
2431 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_softirq_done_fn()
2446 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(req); in mtip_abort_cmd()
3253 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_check_unal_depth()
3277 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_issue_reserved_cmd()
3315 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_queue_rq()
3336 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_free_cmd()
3349 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_init_cmd()
3365 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(req); in mtip_cmd_timeout()
/linux/drivers/block/null_blk/
H A Dmain.c820 struct nullb_cmd *cmd = blk_mq_rq_to_pdu(rq); in null_complete_rq()
1424 struct nullb_cmd *cmd = blk_mq_rq_to_pdu(rq); in should_timeout_request()
1432 struct nullb_cmd *cmd = blk_mq_rq_to_pdu(rq); in should_requeue_request()
1534 cmd = blk_mq_rq_to_pdu(req); in null_poll()
1549 struct nullb_cmd *cmd = blk_mq_rq_to_pdu(rq); in null_timeout_rq()
1583 struct nullb_cmd *cmd = blk_mq_rq_to_pdu(rq); in null_queue_rq()
/linux/drivers/s390/block/
H A Dscm_blk.c257 error = blk_mq_rq_to_pdu(scmrq->request[i]); in scm_request_finish()
420 blk_status_t *error = blk_mq_rq_to_pdu(req); in scm_blk_request_done()
H A Ddasd_fba.c347 blk_mq_rq_to_pdu(req)); in dasd_fba_build_cp_discard()
482 blk_mq_rq_to_pdu(req)); in dasd_fba_build_cp_regular()
/linux/drivers/target/
H A Dtarget_core_pscsi.c958 scmd = blk_mq_rq_to_pdu(req); in pscsi_execute_cmd()
1009 struct scsi_cmnd *scmd = blk_mq_rq_to_pdu(req); in pscsi_req_done()
/linux/drivers/block/aoe/
H A Daoedev.c171 req = blk_mq_rq_to_pdu(rq); in aoe_failip()

12