Home
last modified time | relevance | path

Searched refs:cur_seg (Results 1 - 25 of 25) sorted by relevance

/kernel/linux/linux-6.6/drivers/usb/cdns3/
H A Dcdnsp-ring.c537 struct cdnsp_segment *cur_seg; in cdnsp_trb_in_td() local
544 cur_seg = start_seg; in cdnsp_trb_in_td()
550 temp_trb = &cur_seg->trbs[TRBS_PER_SEGMENT - 1]; in cdnsp_trb_in_td()
552 end_seg_dma = cdnsp_trb_virt_to_dma(cur_seg, temp_trb); in cdnsp_trb_in_td()
554 end_trb_dma = cdnsp_trb_virt_to_dma(cur_seg, end_trb); in cdnsp_trb_in_td()
557 end_trb_dma, cur_seg->dma, in cdnsp_trb_in_td()
568 return cur_seg; in cdnsp_trb_in_td()
577 (suspect_dma >= cur_seg->dma && in cdnsp_trb_in_td()
579 return cur_seg; in cdnsp_trb_in_td()
588 return cur_seg; in cdnsp_trb_in_td()
[all...]
/kernel/linux/linux-6.6/drivers/scsi/elx/efct/
H A Defct_lio.c434 ocp->cur_seg = 0; in efct_lio_write_pending()
440 curcnt = (ocp->seg_map_cnt - ocp->cur_seg); in efct_lio_write_pending()
443 for (cnt = 0, sg = cmd->t_data_sg; cnt < ocp->cur_seg; cnt++, in efct_lio_write_pending()
452 ocp->cur_seg++; in efct_lio_write_pending()
455 if (ocp->cur_seg == ocp->seg_cnt) in efct_lio_write_pending()
478 ocp->cur_seg = 0; in efct_lio_queue_data_in()
496 curcnt = min(ocp->seg_map_cnt - ocp->cur_seg, io->sgl_allocated); in efct_lio_queue_data_in()
499 sg = &cmd->t_data_sg[ocp->cur_seg]; in efct_lio_queue_data_in()
509 ocp->cur_seg++; in efct_lio_queue_data_in()
517 ocp->seg_cnt = ocp->cur_seg; in efct_lio_queue_data_in()
[all...]
H A Defct_lio.h118 u32 cur_seg; member
/kernel/linux/linux-5.10/drivers/infiniband/hw/hfi1/
H A Dtid_rdma.c253 * Therefore, when priv->pkts_ps is used to calculate req->cur_seg in tid_rdma_conn_reply()
254 * during retry, it will lead to req->cur_seg = 0, which is exactly in tid_rdma_conn_reply()
1077 * into the sg_list. This is done by the cur_seg cursor in the tid_rdma_request
1742 req->cur_seg * req->seg_len + flow->sent); in hfi1_build_tid_rdma_read_packet()
1763 req->cur_seg++; in hfi1_build_tid_rdma_read_packet()
1964 req->cur_seg = 0; in tid_rdma_rcv_read_request()
2085 (req->state == TID_REQUEST_SYNC && !req->cur_seg)) { in tid_rdma_rcv_error()
2139 req->cur_seg = delta_psn(psn, e->psn); in tid_rdma_rcv_error()
2158 req->cur_seg == req->comp_seg || in tid_rdma_rcv_error()
2171 req->cur_seg in tid_rdma_rcv_error()
[all...]
H A Drc.c227 qp->s_ack_rdma_psn = mask_psn(e->psn + req->cur_seg); in make_rc_ack()
322 req->cur_seg == req->alloc_seg) { in make_rc_ack()
339 if (req->cur_seg != req->total_segs) in make_rc_ack()
845 req->cur_seg = 0; in hfi1_make_rc_req()
854 wqe->length - req->seg_len * req->cur_seg); in hfi1_make_rc_req()
867 if (req->cur_seg >= req->total_segs && in hfi1_make_rc_req()
1089 req->cur_seg = delta_psn(qp->s_psn, wqe->psn) / priv->pkts_ps; in hfi1_make_rc_req()
1112 wqe->length - req->seg_len * req->cur_seg); in hfi1_make_rc_req()
1125 if (req->cur_seg >= req->total_segs && in hfi1_make_rc_req()
1158 wqe->length - req->seg_len * req->cur_seg); in hfi1_make_rc_req()
1473 u32 cur_seg; update_num_rd_atomic() local
[all...]
H A Dtid_rdma.h121 u32 cur_seg; /* index of current segment */ member
H A Dtrace_tid.h59 "cur_seg %u comp_seg %u ack_seg %u alloc_seg %u " \
1054 __field(u32, cur_seg)
1076 __entry->cur_seg = req->cur_seg;
1099 __entry->cur_seg,
/kernel/linux/linux-6.6/drivers/infiniband/hw/hfi1/
H A Dtid_rdma.c253 * Therefore, when priv->pkts_ps is used to calculate req->cur_seg in tid_rdma_conn_reply()
254 * during retry, it will lead to req->cur_seg = 0, which is exactly in tid_rdma_conn_reply()
1084 * into the sg_list. This is done by the cur_seg cursor in the tid_rdma_request
1749 req->cur_seg * req->seg_len + flow->sent); in hfi1_build_tid_rdma_read_packet()
1770 req->cur_seg++; in hfi1_build_tid_rdma_read_packet()
1971 req->cur_seg = 0; in tid_rdma_rcv_read_request()
2092 (req->state == TID_REQUEST_SYNC && !req->cur_seg)) { in tid_rdma_rcv_error()
2146 req->cur_seg = delta_psn(psn, e->psn); in tid_rdma_rcv_error()
2165 req->cur_seg == req->comp_seg || in tid_rdma_rcv_error()
2178 req->cur_seg in tid_rdma_rcv_error()
[all...]
H A Drc.c185 qp->s_ack_rdma_psn = mask_psn(e->psn + req->cur_seg); in make_rc_ack()
280 req->cur_seg == req->alloc_seg) { in make_rc_ack()
297 if (req->cur_seg != req->total_segs) in make_rc_ack()
804 req->cur_seg = 0; in hfi1_make_rc_req()
813 wqe->length - req->seg_len * req->cur_seg); in hfi1_make_rc_req()
826 if (req->cur_seg >= req->total_segs && in hfi1_make_rc_req()
1048 req->cur_seg = delta_psn(qp->s_psn, wqe->psn) / priv->pkts_ps; in hfi1_make_rc_req()
1071 wqe->length - req->seg_len * req->cur_seg); in hfi1_make_rc_req()
1084 if (req->cur_seg >= req->total_segs && in hfi1_make_rc_req()
1117 wqe->length - req->seg_len * req->cur_seg); in hfi1_make_rc_req()
1431 u32 cur_seg; update_num_rd_atomic() local
[all...]
H A Dtid_rdma.h121 u32 cur_seg; /* index of current segment */ member
H A Dtrace_tid.h59 "cur_seg %u comp_seg %u ack_seg %u alloc_seg %u " \
1054 __field(u32, cur_seg)
1076 __entry->cur_seg = req->cur_seg;
1099 __entry->cur_seg,
/kernel/linux/linux-5.10/drivers/usb/host/
H A Dxhci-ring.c1954 struct xhci_segment *cur_seg; in trb_in_td() local
1957 cur_seg = start_seg; in trb_in_td()
1963 end_seg_dma = xhci_trb_virt_to_dma(cur_seg, in trb_in_td()
1964 &cur_seg->trbs[TRBS_PER_SEGMENT - 1]); in trb_in_td()
1966 end_trb_dma = xhci_trb_virt_to_dma(cur_seg, end_trb); in trb_in_td()
1974 (unsigned long long)cur_seg->dma, in trb_in_td()
1981 return cur_seg; in trb_in_td()
1988 (suspect_dma >= cur_seg->dma && in trb_in_td()
1990 return cur_seg; in trb_in_td()
1996 return cur_seg; in trb_in_td()
[all...]
/kernel/linux/linux-6.6/drivers/usb/host/
H A Dxhci-ring.c2052 struct xhci_segment *cur_seg; in trb_in_td() local
2055 cur_seg = start_seg; in trb_in_td()
2061 end_seg_dma = xhci_trb_virt_to_dma(cur_seg, in trb_in_td()
2062 &cur_seg->trbs[TRBS_PER_SEGMENT - 1]); in trb_in_td()
2064 end_trb_dma = xhci_trb_virt_to_dma(cur_seg, end_trb); in trb_in_td()
2072 (unsigned long long)cur_seg->dma, in trb_in_td()
2079 return cur_seg; in trb_in_td()
2086 (suspect_dma >= cur_seg->dma && in trb_in_td()
2088 return cur_seg; in trb_in_td()
2094 return cur_seg; in trb_in_td()
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/core/
H A Dmad_priv.h142 struct ib_rmpp_segment *cur_seg; member
H A Dmad.c828 send_wr->cur_seg = container_of(send_wr->rmpp_list.next, in alloc_send_rmpp_list()
830 send_wr->last_ack_seg = send_wr->cur_seg; in alloc_send_rmpp_list()
960 list = &mad_send_wr->cur_seg->list; in ib_get_rmpp_segment()
962 if (mad_send_wr->cur_seg->num < seg_num) { in ib_get_rmpp_segment()
963 list_for_each_entry(mad_send_wr->cur_seg, list, list) in ib_get_rmpp_segment()
964 if (mad_send_wr->cur_seg->num == seg_num) in ib_get_rmpp_segment()
966 } else if (mad_send_wr->cur_seg->num > seg_num) { in ib_get_rmpp_segment()
967 list_for_each_entry_reverse(mad_send_wr->cur_seg, list, list) in ib_get_rmpp_segment()
968 if (mad_send_wr->cur_seg->num == seg_num) in ib_get_rmpp_segment()
971 return mad_send_wr->cur_seg in ib_get_rmpp_segment()
[all...]
H A Dmad_rmpp.c953 mad_send_wr->cur_seg = mad_send_wr->last_ack_seg; in ib_retry_rmpp()
/kernel/linux/linux-6.6/drivers/infiniband/core/
H A Dmad_priv.h141 struct ib_rmpp_segment *cur_seg; member
H A Dmad.c821 send_wr->cur_seg = container_of(send_wr->rmpp_list.next, in alloc_send_rmpp_list()
823 send_wr->last_ack_seg = send_wr->cur_seg; in alloc_send_rmpp_list()
952 list = &mad_send_wr->cur_seg->list; in ib_get_rmpp_segment()
954 if (mad_send_wr->cur_seg->num < seg_num) { in ib_get_rmpp_segment()
955 list_for_each_entry(mad_send_wr->cur_seg, list, list) in ib_get_rmpp_segment()
956 if (mad_send_wr->cur_seg->num == seg_num) in ib_get_rmpp_segment()
958 } else if (mad_send_wr->cur_seg->num > seg_num) { in ib_get_rmpp_segment()
959 list_for_each_entry_reverse(mad_send_wr->cur_seg, list, list) in ib_get_rmpp_segment()
960 if (mad_send_wr->cur_seg->num == seg_num) in ib_get_rmpp_segment()
963 return mad_send_wr->cur_seg in ib_get_rmpp_segment()
[all...]
H A Dmad_rmpp.c953 mad_send_wr->cur_seg = mad_send_wr->last_ack_seg; in ib_retry_rmpp()
/kernel/linux/linux-5.10/drivers/scsi/aic7xxx/
H A Daic7xxx_osm.c1505 struct scatterlist *cur_seg; in ahc_linux_run_command() local
1514 scsi_for_each_sg(cmd, cur_seg, nseg, i) { in ahc_linux_run_command()
1519 addr = sg_dma_address(cur_seg); in ahc_linux_run_command()
1520 len = sg_dma_len(cur_seg); in ahc_linux_run_command()
H A Daic79xx_osm.c1622 struct scatterlist *cur_seg; in ahd_linux_run_command() local
1627 scsi_for_each_sg(cmd, cur_seg, nseg, i) { in ahd_linux_run_command()
1631 addr = sg_dma_address(cur_seg); in ahd_linux_run_command()
1632 len = sg_dma_len(cur_seg); in ahd_linux_run_command()
/kernel/linux/linux-6.6/drivers/scsi/aic7xxx/
H A Daic7xxx_osm.c1504 struct scatterlist *cur_seg; in ahc_linux_run_command() local
1513 scsi_for_each_sg(cmd, cur_seg, nseg, i) { in ahc_linux_run_command()
1518 addr = sg_dma_address(cur_seg); in ahc_linux_run_command()
1519 len = sg_dma_len(cur_seg); in ahc_linux_run_command()
H A Daic79xx_osm.c1616 struct scatterlist *cur_seg; in ahd_linux_run_command() local
1621 scsi_for_each_sg(cmd, cur_seg, nseg, i) { in ahd_linux_run_command()
1625 addr = sg_dma_address(cur_seg); in ahd_linux_run_command()
1626 len = sg_dma_len(cur_seg); in ahd_linux_run_command()
/kernel/linux/linux-5.10/drivers/scsi/qla2xxx/
H A Dqla_iocb.c590 struct scatterlist *cur_seg; in qla24xx_build_scsi_type_6_iocbs() local
624 cur_seg = scsi_sglist(cmd); in qla24xx_build_scsi_type_6_iocbs()
655 append_dsd64(&cur_dsd, cur_seg); in qla24xx_build_scsi_type_6_iocbs()
656 cur_seg = sg_next(cur_seg); in qla24xx_build_scsi_type_6_iocbs()
/kernel/linux/linux-6.6/drivers/scsi/qla2xxx/
H A Dqla_iocb.c595 struct scatterlist *cur_seg; in qla24xx_build_scsi_type_6_iocbs() local
626 cur_seg = scsi_sglist(cmd); in qla24xx_build_scsi_type_6_iocbs()
656 append_dsd64(&cur_dsd, cur_seg); in qla24xx_build_scsi_type_6_iocbs()
657 cur_seg = sg_next(cur_seg); in qla24xx_build_scsi_type_6_iocbs()

Completed in 57 milliseconds