Home
last modified time | relevance | path

Searched refs:nvme_sq (Results 1 - 14 of 14) sorted by relevance

/kernel/linux/linux-5.10/drivers/nvme/target/
H A Dloop.c53 struct nvmet_sq nvme_sq; member
94 container_of(req->sq, struct nvme_loop_queue, nvme_sq); in nvme_loop_queue_response()
152 &queue->nvme_sq, &nvme_loop_ops)) in nvme_loop_queue_rq()
184 if (!nvmet_req_init(&iod->req, &queue->nvme_cq, &queue->nvme_sq, in nvme_loop_submit_async_event()
266 nvmet_sq_destroy(&ctrl->queues[0].nvme_sq); in nvme_loop_destroy_admin_queue()
299 nvmet_sq_destroy(&ctrl->queues[i].nvme_sq); in nvme_loop_destroy_io_queues()
319 ret = nvmet_sq_init(&ctrl->queues[i].nvme_sq); in nvme_loop_init_io_queues()
364 error = nvmet_sq_init(&ctrl->queues[0].nvme_sq); in nvme_loop_configure_admin_queue()
417 nvmet_sq_destroy(&ctrl->queues[0].nvme_sq); in nvme_loop_configure_admin_queue()
H A Dtcp.c99 struct nvmet_sq nvme_sq; member
332 if (queue->nvme_sq.ctrl) in nvmet_tcp_fatal_error()
333 nvmet_ctrl_fatal_error(queue->nvme_sq.ctrl); in nvmet_tcp_fatal_error()
420 pdu->hdr.flags = NVME_TCP_F_DATA_LAST | (queue->nvme_sq.sqhd_disabled ? in nvmet_setup_c2h_data_pdu()
595 queue->data_digest || !queue->nvme_sq.sqhd_disabled) in nvmet_try_send_data()
617 if (queue->nvme_sq.sqhd_disabled) { in nvmet_try_send_data()
625 if (queue->nvme_sq.sqhd_disabled) { in nvmet_try_send_data()
716 if (queue->nvme_sq.sqhd_disabled) { in nvmet_try_send_ddgst()
1005 &queue->nvme_sq, &nvmet_tcp_ops))) { in nvmet_tcp_done_recv_pdu()
1436 nvmet_sq_destroy(&queue->nvme_sq); in nvmet_tcp_release_queue_work()
[all...]
H A Drdma.c96 struct nvmet_sq nvme_sq; member
687 if (queue->nvme_sq.ctrl) { in nvmet_rdma_error_comp()
688 nvmet_ctrl_fatal_error(queue->nvme_sq.ctrl); in nvmet_rdma_error_comp()
959 queue->nvme_sq.ctrl->cntlid); in nvmet_rdma_execute_command()
988 &queue->nvme_sq, &nvmet_rdma_ops)) in nvmet_rdma_handle_command()
1350 nvmet_sq_destroy(&queue->nvme_sq); in nvmet_rdma_free_queue()
1434 ret = nvmet_sq_init(&queue->nvme_sq); in nvmet_rdma_alloc_queue()
1515 nvmet_sq_destroy(&queue->nvme_sq); in nvmet_rdma_alloc_queue()
1811 if (queue->nvme_sq.ctrl == ctrl) { in nvmet_rdma_delete_ctrl()
H A Dfc.c141 struct nvmet_sq nvme_sq; member
827 ret = nvmet_sq_init(&queue->nvme_sq); in nvmet_fc_alloc_target_queue()
955 nvmet_sq_destroy(&queue->nvme_sq); in nvmet_fc_delete_target_queue()
1574 if (queue && queue->nvme_sq.ctrl == ctrl) { in nvmet_fc_delete_ctrl()
2542 &fod->queue->nvme_sq, in nvmet_fc_handle_fcp_rqst()
H A Dnvmet.h304 u16 (*install_queue)(struct nvmet_sq *nvme_sq);
/kernel/linux/linux-6.6/drivers/nvme/target/
H A Dloop.c53 struct nvmet_sq nvme_sq; member
94 container_of(req->sq, struct nvme_loop_queue, nvme_sq); in nvme_loop_queue_response()
152 &queue->nvme_sq, &nvme_loop_ops)) in nvme_loop_queue_rq()
184 if (!nvmet_req_init(&iod->req, &queue->nvme_cq, &queue->nvme_sq, in nvme_loop_submit_async_event()
268 nvmet_sq_destroy(&ctrl->queues[0].nvme_sq); in nvme_loop_destroy_admin_queue()
297 nvmet_sq_destroy(&ctrl->queues[i].nvme_sq); in nvme_loop_destroy_io_queues()
317 ret = nvmet_sq_init(&ctrl->queues[i].nvme_sq); in nvme_loop_init_io_queues()
350 error = nvmet_sq_init(&ctrl->queues[0].nvme_sq); in nvme_loop_configure_admin_queue()
390 nvmet_sq_destroy(&ctrl->queues[0].nvme_sq); in nvme_loop_configure_admin_queue()
H A Dtcp.c135 struct nvmet_sq nvme_sq; member
368 if (queue->nvme_sq.ctrl) in nvmet_tcp_fatal_error()
369 nvmet_ctrl_fatal_error(queue->nvme_sq.ctrl); in nvmet_tcp_fatal_error()
439 pdu->hdr.flags = NVME_TCP_F_DATA_LAST | (queue->nvme_sq.sqhd_disabled ? in nvmet_setup_c2h_data_pdu()
621 queue->data_digest || !queue->nvme_sq.sqhd_disabled) in nvmet_try_send_data()
644 if (queue->nvme_sq.sqhd_disabled) { in nvmet_try_send_data()
652 if (queue->nvme_sq.sqhd_disabled) in nvmet_try_send_data()
744 if (queue->nvme_sq.sqhd_disabled) { in nvmet_try_send_ddgst()
1047 &queue->nvme_sq, &nvmet_tcp_ops))) { in nvmet_tcp_done_recv_pdu()
1504 nvmet_sq_destroy(&queue->nvme_sq); in nvmet_tcp_release_queue_work()
[all...]
H A Drdma.c97 struct nvmet_sq nvme_sq; member
688 if (queue->nvme_sq.ctrl) { in nvmet_rdma_error_comp()
689 nvmet_ctrl_fatal_error(queue->nvme_sq.ctrl); in nvmet_rdma_error_comp()
960 queue->nvme_sq.ctrl->cntlid); in nvmet_rdma_execute_command()
989 &queue->nvme_sq, &nvmet_rdma_ops)) in nvmet_rdma_handle_command()
1350 nvmet_sq_destroy(&queue->nvme_sq); in nvmet_rdma_free_queue()
1434 ret = nvmet_sq_init(&queue->nvme_sq); in nvmet_rdma_alloc_queue()
1515 nvmet_sq_destroy(&queue->nvme_sq); in nvmet_rdma_alloc_queue()
1811 if (queue->nvme_sq.ctrl == ctrl) { in nvmet_rdma_delete_ctrl()
H A Dfc.c143 struct nvmet_sq nvme_sq; member
835 ret = nvmet_sq_init(&queue->nvme_sq); in nvmet_fc_alloc_target_queue()
952 nvmet_sq_destroy(&queue->nvme_sq); in nvmet_fc_delete_target_queue()
1597 if (queue && queue->nvme_sq.ctrl == ctrl) { in nvmet_fc_delete_ctrl()
2566 &fod->queue->nvme_sq, in nvmet_fc_handle_fcp_rqst()
H A Dnvmet.h340 u16 (*install_queue)(struct nvmet_sq *nvme_sq);
/kernel/linux/linux-5.10/drivers/nvme/host/
H A Dtrace.c256 EXPORT_TRACEPOINT_SYMBOL_GPL(nvme_sq); variable
H A Dtrace.h141 TRACE_EVENT(nvme_sq,
/kernel/linux/linux-6.6/drivers/nvme/host/
H A Dtrace.h141 TRACE_EVENT(nvme_sq,
H A Dtrace.c357 EXPORT_TRACEPOINT_SYMBOL_GPL(nvme_sq); variable

Completed in 23 milliseconds