/linux-6.14.4/block/ |
D | bsg-lib.c | 49 job = blk_mq_rq_to_pdu(rq); in bsg_transport_sg_io_fn() 207 struct bsg_job *job = blk_mq_rq_to_pdu(rq); in bsg_complete() 234 struct bsg_job *job = blk_mq_rq_to_pdu(req); in bsg_prepare_job() 291 ret = bset->job_fn(blk_mq_rq_to_pdu(req)); in bsg_queue_rq() 304 struct bsg_job *job = blk_mq_rq_to_pdu(req); in bsg_init_rq() 315 struct bsg_job *job = blk_mq_rq_to_pdu(req); in bsg_exit_rq()
|
/linux-6.14.4/rust/helpers/ |
D | blk.c | 8 return blk_mq_rq_to_pdu(rq); in rust_helper_blk_mq_rq_to_pdu()
|
/linux-6.14.4/include/scsi/ |
D | scsi_tcq.h | 39 return blk_mq_rq_to_pdu(req); in scsi_host_find_tag()
|
/linux-6.14.4/drivers/nvme/host/ |
D | apple.c | 337 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_iod_list() 345 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_free_prps() 360 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_unmap_data() 397 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_setup_prps() 492 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_setup_prp_simple() 511 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_map_data() 551 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_unmap_rq() 740 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_queue_rq() 789 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_init_request() 880 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_timeout()
|
D | rdma.c | 288 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_exit_request() 298 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_init_request() 1217 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_dma_unmap_req() 1234 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_unmap_data() 1469 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_dma_map_req() 1529 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_map_data() 1705 req = blk_mq_rq_to_pdu(rq); in nvme_rdma_process_nvme_rsp() 1947 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_complete_timed_out() 1956 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_timeout() 1999 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_queue_rq() [all …]
|
D | pci.c | 428 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_pci_init_request() 543 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_free_prps() 558 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_unmap_data() 598 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_pci_setup_prps() 704 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_pci_setup_sgls() 750 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_setup_prp_simple() 771 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_setup_sgl_simple() 788 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_map_data() 845 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_pci_setup_meta_sgls() 902 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_pci_setup_meta_mptr() [all …]
|
D | tcp.c | 538 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_exit_request() 548 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_init_request() 629 req = blk_mq_rq_to_pdu(rq); in nvme_tcp_process_nvme_cqe() 745 req = blk_mq_rq_to_pdu(rq); in nvme_tcp_handle_r2t() 899 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_recv_data() 986 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_recv_ddgst() 999 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_recv_ddgst() 2565 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_complete_timed_out() 2574 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_timeout() 2614 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_map_data() [all …]
|
/linux-6.14.4/drivers/scsi/ |
D | scsi_lib.c | 321 scmd = blk_mq_rq_to_pdu(req); in scsi_execute_cmd() 634 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_end_request() 1219 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(rq); in scsi_initialize_rq() 1257 scsi_mq_uninit_cmd(blk_mq_rq_to_pdu(rq)); in scsi_cleanup_rq() 1280 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_setup_scsi_cmnd() 1527 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(rq); in scsi_complete() 1650 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_prepare_cmd() 1792 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_mq_set_rq_budget_token() 1799 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_mq_get_rq_budget_token() 1811 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_queue_rq() [all …]
|
D | scsi_debugfs.c | 56 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(rq); in scsi_show_rq()
|
D | hosts.c | 594 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(rq); in scsi_host_check_in_flight() 688 struct scsi_cmnd *scmd = blk_mq_rq_to_pdu(rq); in complete_all_cmds_iter() 724 struct scsi_cmnd *sc = blk_mq_rq_to_pdu(req); in __scsi_host_busy_iter_fn()
|
D | scsi_ioctl.c | 367 struct scsi_cmnd *scmd = blk_mq_rq_to_pdu(rq); in scsi_fill_sghdr_rq() 391 struct scsi_cmnd *scmd = blk_mq_rq_to_pdu(rq); in scsi_complete_sghdr_rq() 461 scmd = blk_mq_rq_to_pdu(rq); in sg_io() 557 scmd = blk_mq_rq_to_pdu(rq); in sg_scsi_ioctl()
|
D | scsi_bsg.c | 34 scmd = blk_mq_rq_to_pdu(rq); in scsi_bsg_sg_io_fn()
|
/linux-6.14.4/drivers/md/ |
D | dm-rq.c | 124 return blk_mq_rq_to_pdu(rq); in tio_from_request() 461 struct dm_rq_target_io *tio = blk_mq_rq_to_pdu(rq); in dm_mq_init_request() 481 struct dm_rq_target_io *tio = blk_mq_rq_to_pdu(rq); in dm_mq_queue_rq()
|
/linux-6.14.4/drivers/nvme/target/ |
D | loop.c | 76 struct nvme_loop_iod *iod = blk_mq_rq_to_pdu(req); in nvme_loop_complete_rq() 137 struct nvme_loop_iod *iod = blk_mq_rq_to_pdu(req); in nvme_loop_queue_rq() 208 struct nvme_loop_iod *iod = blk_mq_rq_to_pdu(req); in nvme_loop_init_request() 212 return nvme_loop_init_iod(ctrl, blk_mq_rq_to_pdu(req), in nvme_loop_init_request()
|
/linux-6.14.4/drivers/mmc/core/ |
D | queue.h | 27 return blk_mq_rq_to_pdu(rq); in req_to_mmc_queue_req()
|
/linux-6.14.4/drivers/block/ |
D | virtio_blk.c | 336 struct virtblk_req *vbr = blk_mq_rq_to_pdu(req); in virtblk_request_done() 431 struct virtblk_req *vbr = blk_mq_rq_to_pdu(req); in virtio_queue_rq() 468 struct virtblk_req *vbr = blk_mq_rq_to_pdu(req); in virtblk_prep_rq_batch() 483 struct virtblk_req *vbr = blk_mq_rq_to_pdu(req); in virtblk_add_req_batch() 569 vbr = blk_mq_rq_to_pdu(req); in virtblk_submit_zone_report() 815 vbr = blk_mq_rq_to_pdu(req); in virtblk_get_id() 1191 virtblk_unmap_data(req, blk_mq_rq_to_pdu(req)); in virtblk_complete_batch()
|
D | nbd.c | 387 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(req); in nbd_complete_rq() 453 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(req); in nbd_xmit_timeout() 809 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(req); in nbd_pending_cmd_work() 892 cmd = blk_mq_rq_to_pdu(req); in nbd_handle_reply() 1029 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(req); in nbd_clear_req() 1184 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(bd->rq); in nbd_queue_rq() 1872 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(rq); in nbd_init_request()
|
/linux-6.14.4/rust/kernel/block/mq/ |
D | request.rs | 145 unsafe { bindings::blk_mq_rq_to_pdu(request_ptr).cast::<RequestDataWrapper>() }; in wrapper_ptr()
|
D | operations.rs | 211 let pdu = unsafe { bindings::blk_mq_rq_to_pdu(rq) }.cast::<RequestDataWrapper>(); in exit_request_callback()
|
/linux-6.14.4/drivers/mtd/ubi/ |
D | block.c | 182 struct ubiblock_pdu *pdu = blk_mq_rq_to_pdu(req); in ubiblock_read() 317 struct ubiblock_pdu *pdu = blk_mq_rq_to_pdu(req); in ubiblock_init_request()
|
/linux-6.14.4/drivers/block/mtip32xx/ |
D | mtip32xx.c | 155 return blk_mq_rq_to_pdu(blk_mq_tag_to_rq(dd->tags.tags[0], tag)); in mtip_cmd_from_tag() 999 int_cmd = blk_mq_rq_to_pdu(rq); in mtip_exec_internal_command() 2416 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_softirq_done_fn() 2431 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(req); in mtip_abort_cmd() 3243 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_check_unal_depth() 3267 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_issue_reserved_cmd() 3305 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_queue_rq() 3326 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_free_cmd() 3339 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_init_cmd() 3355 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(req); in mtip_cmd_timeout()
|
/linux-6.14.4/drivers/s390/block/ |
D | scm_blk.c | 257 error = blk_mq_rq_to_pdu(scmrq->request[i]); in scm_request_finish() 420 blk_status_t *error = blk_mq_rq_to_pdu(req); in scm_blk_request_done()
|
D | dasd_fba.c | 348 blk_mq_rq_to_pdu(req)); in dasd_fba_build_cp_discard() 483 blk_mq_rq_to_pdu(req)); in dasd_fba_build_cp_regular()
|
/linux-6.14.4/drivers/block/null_blk/ |
D | main.c | 835 struct nullb_cmd *cmd = blk_mq_rq_to_pdu(rq); in null_complete_rq() 1438 struct nullb_cmd *cmd = blk_mq_rq_to_pdu(rq); in should_timeout_request() 1446 struct nullb_cmd *cmd = blk_mq_rq_to_pdu(rq); in should_requeue_request() 1548 cmd = blk_mq_rq_to_pdu(req); in null_poll() 1563 struct nullb_cmd *cmd = blk_mq_rq_to_pdu(rq); in null_timeout_rq() 1597 struct nullb_cmd *cmd = blk_mq_rq_to_pdu(rq); in null_queue_rq()
|
/linux-6.14.4/drivers/target/ |
D | target_core_pscsi.c | 956 scmd = blk_mq_rq_to_pdu(req); in pscsi_execute_cmd() 1007 struct scsi_cmnd *scmd = blk_mq_rq_to_pdu(req); in pscsi_req_done()
|