Home
last modified time | relevance | path

Searched refs:sde (Results 1 - 25 of 54) sorted by relevance

123

/kernel/linux/linux-5.10/drivers/infiniband/hw/hfi1/
H A Dsdma.c243 struct sdma_engine *sde,
246 struct sdma_engine *sde,
248 static void dump_sdma_state(struct sdma_engine *sde);
249 static void sdma_make_progress(struct sdma_engine *sde, u64 status);
250 static void sdma_desc_avail(struct sdma_engine *sde, uint avail);
251 static void sdma_flush_descq(struct sdma_engine *sde);
287 struct sdma_engine *sde, in write_sde_csr()
291 write_kctxt_csr(sde->dd, sde->this_idx, offset0, value); in write_sde_csr()
295 struct sdma_engine *sde, in read_sde_csr()
286 write_sde_csr( struct sdma_engine *sde, u32 offset0, u64 value) write_sde_csr() argument
294 read_sde_csr( struct sdma_engine *sde, u32 offset0) read_sde_csr() argument
305 sdma_wait_for_packet_egress(struct sdma_engine *sde, int pause) sdma_wait_for_packet_egress() argument
346 struct sdma_engine *sde = &dd->per_sdma[i]; sdma_wait() local
352 sdma_set_desc_cnt(struct sdma_engine *sde, unsigned cnt) sdma_set_desc_cnt() argument
364 complete_tx(struct sdma_engine *sde, struct sdma_txreq *tx, int res) complete_tx() argument
404 sdma_flush(struct sdma_engine *sde) sdma_flush() argument
451 struct sdma_engine *sde = sdma_field_flush() local
462 struct sdma_engine *sde = container_of(work, struct sdma_engine, sdma_err_halt_wait() local
489 sdma_err_progress_check_schedule(struct sdma_engine *sde) sdma_err_progress_check_schedule() argument
512 struct sdma_engine *sde = from_timer(sde, t, err_progress_check_timer); sdma_err_progress_check() local
550 struct sdma_engine *sde = from_tasklet(sde, t, sdma_hw_clean_up_task() local
570 get_txhead(struct sdma_engine *sde) get_txhead() argument
578 sdma_flush_descq(struct sdma_engine *sde) sdma_flush_descq() argument
610 struct sdma_engine *sde = from_tasklet(sde, t, sdma_sw_clean_up_task); sdma_sw_clean_up_task() local
655 sdma_sw_tear_down(struct sdma_engine *sde) sdma_sw_tear_down() argument
667 sdma_start_hw_clean_up(struct sdma_engine *sde) sdma_start_hw_clean_up() argument
672 sdma_set_state(struct sdma_engine *sde, enum sdma_states next_state) sdma_set_state() argument
750 sdma_engine_get_vl(struct sdma_engine *sde) sdma_engine_get_vl() argument
837 struct sdma_engine *sde[]; global() member
873 struct sdma_engine *sde = NULL; sdma_select_user_engine() local
910 sdma_cleanup_sde_map(struct sdma_rht_map_elem *map, struct sdma_engine *sde) sdma_cleanup_sde_map() argument
934 sdma_set_cpu_to_sde_map(struct sdma_engine *sde, const char *buf, size_t count) sdma_set_cpu_to_sde_map() argument
1087 sdma_get_cpu_to_sde_map(struct sdma_engine *sde, char *buf) sdma_get_cpu_to_sde_map() argument
1298 struct sdma_engine *sde; sdma_clean() local
1364 struct sdma_engine *sde; sdma_init() local
1555 struct sdma_engine *sde; sdma_all_running() local
1573 struct sdma_engine *sde; sdma_all_idle() local
1594 struct sdma_engine *sde; sdma_start() local
1610 struct sdma_engine *sde; sdma_exit() local
1702 sdma_gethead(struct sdma_engine *sde) sdma_gethead() argument
1765 sdma_desc_avail(struct sdma_engine *sde, uint avail) sdma_desc_avail() argument
1826 sdma_make_progress(struct sdma_engine *sde, u64 status) sdma_make_progress() argument
1895 sdma_engine_interrupt(struct sdma_engine *sde, u64 status) sdma_engine_interrupt() argument
1915 sdma_engine_error(struct sdma_engine *sde, u64 status) sdma_engine_error() argument
1941 sdma_sendctrl(struct sdma_engine *sde, unsigned op) sdma_sendctrl() argument
1990 sdma_setlengen(struct sdma_engine *sde) sdma_setlengen() argument
2009 sdma_update_tail(struct sdma_engine *sde, u16 tail) sdma_update_tail() argument
2020 sdma_hw_start_up(struct sdma_engine *sde) sdma_hw_start_up() argument
2043 set_sdma_integrity(struct sdma_engine *sde) set_sdma_integrity() argument
2051 init_sdma_regs( struct sdma_engine *sde, u32 credits, uint idle_cnt) init_sdma_regs() argument
2102 sdma_dumpstate(struct sdma_engine *sde) sdma_dumpstate() argument
2143 dump_sdma_state(struct sdma_engine *sde) dump_sdma_state() argument
2211 sdma_seqfile_dump_sde(struct seq_file *s, struct sdma_engine *sde) sdma_seqfile_dump_sde() argument
2281 add_gen(struct sdma_engine *sde, u64 qw1) add_gen() argument
2307 submit_tx(struct sdma_engine *sde, struct sdma_txreq *tx) submit_tx() argument
2354 sdma_check_progress( struct sdma_engine *sde, struct iowait_work *wait, struct sdma_txreq *tx, bool pkts_sent) sdma_check_progress() argument
2395 sdma_send_txreq(struct sdma_engine *sde, struct iowait_work *wait, struct sdma_txreq *tx, bool pkts_sent) sdma_send_txreq() argument
2474 sdma_send_txlist(struct sdma_engine *sde, struct iowait_work *wait, struct list_head *tx_list, u16 *count_out) sdma_send_txlist() argument
2544 sdma_process_event(struct sdma_engine *sde, enum sdma_events event) sdma_process_event() argument
2560 __sdma_process_event(struct sdma_engine *sde, enum sdma_events event) __sdma_process_event() argument
3181 struct sdma_engine *sde; sdma_update_lmc() local
3290 sdma_ahg_alloc(struct sdma_engine *sde) sdma_ahg_alloc() argument
3321 sdma_ahg_free(struct sdma_engine *sde, int ahg_index) sdma_ahg_free() argument
3415 _sdma_engine_progress_schedule( struct sdma_engine *sde) _sdma_engine_progress_schedule() argument
[all...]
H A Dsdma.h432 static inline int sdma_empty(struct sdma_engine *sde) in sdma_empty() argument
434 return sde->descq_tail == sde->descq_head; in sdma_empty()
437 static inline u16 sdma_descq_freecnt(struct sdma_engine *sde) in sdma_descq_freecnt() argument
439 return sde->descq_cnt - in sdma_descq_freecnt()
440 (sde->descq_tail - in sdma_descq_freecnt()
441 READ_ONCE(sde->descq_head)) - 1; in sdma_descq_freecnt()
444 static inline u16 sdma_descq_inprocess(struct sdma_engine *sde) in sdma_descq_inprocess() argument
446 return sde->descq_cnt - sdma_descq_freecnt(sde); in sdma_descq_inprocess()
921 sdma_progress(struct sdma_engine *sde, unsigned seq, struct sdma_txreq *tx) sdma_progress() argument
942 sdma_iowait_schedule( struct sdma_engine *sde, struct iowait *wait) sdma_iowait_schedule() argument
1028 struct sdma_engine *sde[]; global() member
1069 sdma_engine_progress_schedule( struct sdma_engine *sde) sdma_engine_progress_schedule() argument
[all...]
H A Dtrace_tx.h152 TP_PROTO(struct sdma_engine *sde,
157 TP_ARGS(sde, desc0, desc1, e, descp),
158 TP_STRUCT__entry(DD_DEV_ENTRY(sde->dd)
165 TP_fast_assign(DD_DEV_ASSIGN(sde->dd);
168 __entry->idx = sde->this_idx;
346 TP_PROTO(struct sdma_engine *sde, u64 status),
347 TP_ARGS(sde, status),
348 TP_STRUCT__entry(DD_DEV_ENTRY(sde->dd)
352 TP_fast_assign(DD_DEV_ASSIGN(sde->dd);
354 __entry->idx = sde
[all...]
H A Dvnic_sdma.c91 static noinline int build_vnic_ulp_payload(struct sdma_engine *sde, in build_vnic_ulp_payload() argument
97 sde->dd, in build_vnic_ulp_payload()
108 ret = sdma_txadd_page(sde->dd, in build_vnic_ulp_payload()
119 ret = sdma_txadd_kvaddr(sde->dd, &tx->txreq, in build_vnic_ulp_payload()
127 static int build_vnic_tx_desc(struct sdma_engine *sde, in build_vnic_tx_desc() argument
149 sde->dd, in build_vnic_tx_desc()
157 ret = build_vnic_ulp_payload(sde, tx); in build_vnic_tx_desc()
173 struct sdma_engine *sde = vnic_sdma->sde; in hfi1_vnic_send_dma() local
180 if (unlikely(!sde || !sdma_runnin in hfi1_vnic_send_dma()
228 hfi1_vnic_sdma_sleep(struct sdma_engine *sde, struct iowait_work *wait, struct sdma_txreq *txreq, uint seq, bool pkts_sent) hfi1_vnic_sdma_sleep() argument
[all...]
H A Dmsix.c225 * @sde: valid sdma engine
228 int msix_request_sdma_irq(struct sdma_engine *sde) in msix_request_sdma_irq() argument
234 sde->dd->unit, sde->this_idx); in msix_request_sdma_irq()
235 nr = msix_request_irq(sde->dd, sde, sdma_interrupt, NULL, in msix_request_sdma_irq()
239 sde->msix_intr = nr; in msix_request_sdma_irq()
240 remap_sdma_interrupts(sde->dd, sde->this_idx, nr); in msix_request_sdma_irq()
302 struct sdma_engine *sde in msix_request_irqs() local
[all...]
H A Dqp.c68 struct sdma_engine *sde,
478 struct sdma_engine *sde, in iowait_sleep()
502 write_seqlock(&sde->waitlock); in iowait_sleep()
503 if (sdma_progress(sde, seq, stx)) in iowait_sleep()
513 &sde->dmawait); in iowait_sleep()
514 priv->s_iowait.lock = &sde->waitlock; in iowait_sleep()
518 write_sequnlock(&sde->waitlock); in iowait_sleep()
528 write_sequnlock(&sde->waitlock); in iowait_sleep()
583 struct sdma_engine *sde; in qp_to_sdma_engine() local
593 sde in qp_to_sdma_engine()
477 iowait_sleep( struct sdma_engine *sde, struct iowait_work *wait, struct sdma_txreq *stx, uint seq, bool pkts_sent) iowait_sleep() argument
644 struct sdma_engine *sde; qp_iter_print() local
[all...]
H A Dipoib_tx.c135 "%s: Status = 0x%x pbc 0x%llx txq = %d sde = %d\n", in hfi1_ipoib_free_tx()
138 tx->txq->sde->this_idx); in hfi1_ipoib_free_tx()
412 txp->txq->sde = in hfi1_ipoib_send_dma_common()
434 ret = sdma_send_txlist(txq->sde, in hfi1_ipoib_submit_tx_list()
468 ret = sdma_send_txreq(txq->sde, in hfi1_ipoib_submit_tx()
627 static int hfi1_ipoib_sdma_sleep(struct sdma_engine *sde, in hfi1_ipoib_sdma_sleep() argument
636 write_seqlock(&sde->waitlock); in hfi1_ipoib_sdma_sleep()
639 if (sdma_progress(sde, seq, txreq)) { in hfi1_ipoib_sdma_sleep()
640 write_sequnlock(&sde->waitlock); in hfi1_ipoib_sdma_sleep()
652 iowait_queue(pkts_sent, wait->iow, &sde in hfi1_ipoib_sdma_sleep()
[all...]
H A Dsysfs.c754 ssize_t (*show)(struct sdma_engine *sde, char *buf);
755 ssize_t (*store)(struct sdma_engine *sde, const char *buf, size_t cnt);
762 struct sdma_engine *sde = in sde_show() local
768 return sde_attr->show(sde, buf); in sde_show()
776 struct sdma_engine *sde = in sde_store() local
785 return sde_attr->store(sde, buf, count); in sde_store()
801 static ssize_t sde_show_cpu_to_sde_map(struct sdma_engine *sde, char *buf) in sde_show_cpu_to_sde_map() argument
803 return sdma_get_cpu_to_sde_map(sde, buf); in sde_show_cpu_to_sde_map()
806 static ssize_t sde_store_cpu_to_sde_map(struct sdma_engine *sde, in sde_store_cpu_to_sde_map() argument
809 return sdma_set_cpu_to_sde_map(sde, bu in sde_store_cpu_to_sde_map()
812 sde_show_vl(struct sdma_engine *sde, char *buf) sde_show_vl() argument
[all...]
H A Daffinity.c785 struct sdma_engine *sde = msix->arg; in hfi1_update_sdma_affinity() local
786 struct hfi1_devdata *dd = sde->dd; in hfi1_update_sdma_affinity()
791 if (cpu > num_online_cpus() || cpu == sde->cpu) in hfi1_update_sdma_affinity()
799 old_cpu = sde->cpu; in hfi1_update_sdma_affinity()
800 sde->cpu = cpu; in hfi1_update_sdma_affinity()
805 sde->this_idx, cpu); in hfi1_update_sdma_affinity()
883 struct sdma_engine *sde = NULL; in get_irq_affinity() local
895 sde = (struct sdma_engine *)msix->arg; in get_irq_affinity()
896 scnprintf(extra, 64, "engine %u", sde->this_idx); in get_irq_affinity()
946 sde in get_irq_affinity()
[all...]
H A Dverbs_txreq.h65 struct sdma_engine *sde; member
92 tx->sde = priv->s_sde;
H A Duser_sdma.c98 struct sdma_engine *sde,
122 struct sdma_engine *sde, in defer_packet_queue()
131 write_seqlock(&sde->waitlock); in defer_packet_queue()
132 trace_hfi1_usdma_defer(pq, sde, &pq->busy); in defer_packet_queue()
133 if (sdma_progress(sde, seq, txreq)) in defer_packet_queue()
142 pq->busy.lock = &sde->waitlock; in defer_packet_queue()
144 iowait_queue(pkts_sent, &pq->busy, &sde->dmawait); in defer_packet_queue()
146 write_sequnlock(&sde->waitlock); in defer_packet_queue()
149 write_sequnlock(&sde->waitlock); in defer_packet_queue()
575 req->sde in hfi1_user_sdma_process_request()
121 defer_packet_queue( struct sdma_engine *sde, struct iowait_work *wait, struct sdma_txreq *txreq, uint seq, bool pkts_sent) defer_packet_queue() argument
[all...]
H A Dmsix.h59 int msix_request_sdma_irq(struct sdma_engine *sde);
H A Diowait.h142 struct sdma_engine *sde,
174 int (*sleep)(struct sdma_engine *sde,
/kernel/linux/linux-6.6/drivers/infiniband/hw/hfi1/
H A Dsdma.c201 struct sdma_engine *sde,
204 struct sdma_engine *sde,
206 static void dump_sdma_state(struct sdma_engine *sde);
207 static void sdma_make_progress(struct sdma_engine *sde, u64 status);
208 static void sdma_desc_avail(struct sdma_engine *sde, uint avail);
209 static void sdma_flush_descq(struct sdma_engine *sde);
245 struct sdma_engine *sde, in write_sde_csr()
249 write_kctxt_csr(sde->dd, sde->this_idx, offset0, value); in write_sde_csr()
253 struct sdma_engine *sde, in read_sde_csr()
244 write_sde_csr( struct sdma_engine *sde, u32 offset0, u64 value) write_sde_csr() argument
252 read_sde_csr( struct sdma_engine *sde, u32 offset0) read_sde_csr() argument
263 sdma_wait_for_packet_egress(struct sdma_engine *sde, int pause) sdma_wait_for_packet_egress() argument
304 struct sdma_engine *sde = &dd->per_sdma[i]; sdma_wait() local
310 sdma_set_desc_cnt(struct sdma_engine *sde, unsigned cnt) sdma_set_desc_cnt() argument
322 complete_tx(struct sdma_engine *sde, struct sdma_txreq *tx, int res) complete_tx() argument
362 sdma_flush(struct sdma_engine *sde) sdma_flush() argument
409 struct sdma_engine *sde = sdma_field_flush() local
420 struct sdma_engine *sde = container_of(work, struct sdma_engine, sdma_err_halt_wait() local
447 sdma_err_progress_check_schedule(struct sdma_engine *sde) sdma_err_progress_check_schedule() argument
470 struct sdma_engine *sde = from_timer(sde, t, err_progress_check_timer); sdma_err_progress_check() local
508 struct sdma_engine *sde = from_tasklet(sde, t, sdma_hw_clean_up_task() local
528 get_txhead(struct sdma_engine *sde) get_txhead() argument
536 sdma_flush_descq(struct sdma_engine *sde) sdma_flush_descq() argument
568 struct sdma_engine *sde = from_tasklet(sde, t, sdma_sw_clean_up_task); sdma_sw_clean_up_task() local
613 sdma_sw_tear_down(struct sdma_engine *sde) sdma_sw_tear_down() argument
625 sdma_start_hw_clean_up(struct sdma_engine *sde) sdma_start_hw_clean_up() argument
630 sdma_set_state(struct sdma_engine *sde, enum sdma_states next_state) sdma_set_state() argument
708 sdma_engine_get_vl(struct sdma_engine *sde) sdma_engine_get_vl() argument
795 struct sdma_engine *sde[]; global() member
831 struct sdma_engine *sde = NULL; sdma_select_user_engine() local
868 sdma_cleanup_sde_map(struct sdma_rht_map_elem *map, struct sdma_engine *sde) sdma_cleanup_sde_map() argument
892 sdma_set_cpu_to_sde_map(struct sdma_engine *sde, const char *buf, size_t count) sdma_set_cpu_to_sde_map() argument
1045 sdma_get_cpu_to_sde_map(struct sdma_engine *sde, char *buf) sdma_get_cpu_to_sde_map() argument
1256 struct sdma_engine *sde; sdma_clean() local
1322 struct sdma_engine *sde; sdma_init() local
1513 struct sdma_engine *sde; sdma_all_running() local
1531 struct sdma_engine *sde; sdma_all_idle() local
1552 struct sdma_engine *sde; sdma_start() local
1568 struct sdma_engine *sde; sdma_exit() local
1660 sdma_gethead(struct sdma_engine *sde) sdma_gethead() argument
1723 sdma_desc_avail(struct sdma_engine *sde, uint avail) sdma_desc_avail() argument
1784 sdma_make_progress(struct sdma_engine *sde, u64 status) sdma_make_progress() argument
1853 sdma_engine_interrupt(struct sdma_engine *sde, u64 status) sdma_engine_interrupt() argument
1873 sdma_engine_error(struct sdma_engine *sde, u64 status) sdma_engine_error() argument
1899 sdma_sendctrl(struct sdma_engine *sde, unsigned op) sdma_sendctrl() argument
1948 sdma_setlengen(struct sdma_engine *sde) sdma_setlengen() argument
1967 sdma_update_tail(struct sdma_engine *sde, u16 tail) sdma_update_tail() argument
1978 sdma_hw_start_up(struct sdma_engine *sde) sdma_hw_start_up() argument
2001 set_sdma_integrity(struct sdma_engine *sde) set_sdma_integrity() argument
2009 init_sdma_regs( struct sdma_engine *sde, u32 credits, uint idle_cnt) init_sdma_regs() argument
2060 sdma_dumpstate(struct sdma_engine *sde) sdma_dumpstate() argument
2101 dump_sdma_state(struct sdma_engine *sde) dump_sdma_state() argument
2169 sdma_seqfile_dump_sde(struct seq_file *s, struct sdma_engine *sde) sdma_seqfile_dump_sde() argument
2239 add_gen(struct sdma_engine *sde, u64 qw1) add_gen() argument
2265 submit_tx(struct sdma_engine *sde, struct sdma_txreq *tx) submit_tx() argument
2312 sdma_check_progress( struct sdma_engine *sde, struct iowait_work *wait, struct sdma_txreq *tx, bool pkts_sent) sdma_check_progress() argument
2353 sdma_send_txreq(struct sdma_engine *sde, struct iowait_work *wait, struct sdma_txreq *tx, bool pkts_sent) sdma_send_txreq() argument
2432 sdma_send_txlist(struct sdma_engine *sde, struct iowait_work *wait, struct list_head *tx_list, u16 *count_out) sdma_send_txlist() argument
2502 sdma_process_event(struct sdma_engine *sde, enum sdma_events event) sdma_process_event() argument
2518 __sdma_process_event(struct sdma_engine *sde, enum sdma_events event) __sdma_process_event() argument
3139 struct sdma_engine *sde; sdma_update_lmc() local
3248 sdma_ahg_alloc(struct sdma_engine *sde) sdma_ahg_alloc() argument
3279 sdma_ahg_free(struct sdma_engine *sde, int ahg_index) sdma_ahg_free() argument
3373 _sdma_engine_progress_schedule( struct sdma_engine *sde) _sdma_engine_progress_schedule() argument
[all...]
H A Dtrace_tx.h110 TP_PROTO(struct sdma_engine *sde,
115 TP_ARGS(sde, desc0, desc1, e, descp),
116 TP_STRUCT__entry(DD_DEV_ENTRY(sde->dd)
123 TP_fast_assign(DD_DEV_ASSIGN(sde->dd);
126 __entry->idx = sde->this_idx;
304 TP_PROTO(struct sdma_engine *sde, u64 status),
305 TP_ARGS(sde, status),
306 TP_STRUCT__entry(DD_DEV_ENTRY(sde->dd)
310 TP_fast_assign(DD_DEV_ASSIGN(sde->dd);
312 __entry->idx = sde
[all...]
H A Dsdma.h391 static inline int sdma_empty(struct sdma_engine *sde) in sdma_empty() argument
393 return sde->descq_tail == sde->descq_head; in sdma_empty()
396 static inline u16 sdma_descq_freecnt(struct sdma_engine *sde) in sdma_descq_freecnt() argument
398 return sde->descq_cnt - in sdma_descq_freecnt()
399 (sde->descq_tail - in sdma_descq_freecnt()
400 READ_ONCE(sde->descq_head)) - 1; in sdma_descq_freecnt()
403 static inline u16 sdma_descq_inprocess(struct sdma_engine *sde) in sdma_descq_inprocess() argument
405 return sde->descq_cnt - sdma_descq_freecnt(sde); in sdma_descq_inprocess()
880 sdma_progress(struct sdma_engine *sde, unsigned seq, struct sdma_txreq *tx) sdma_progress() argument
969 struct sdma_engine *sde[]; global() member
1010 sdma_engine_progress_schedule( struct sdma_engine *sde) sdma_engine_progress_schedule() argument
[all...]
H A Dvnic_sdma.c49 static noinline int build_vnic_ulp_payload(struct sdma_engine *sde, in build_vnic_ulp_payload() argument
55 sde->dd, in build_vnic_ulp_payload()
66 ret = sdma_txadd_page(sde->dd, in build_vnic_ulp_payload()
77 ret = sdma_txadd_kvaddr(sde->dd, &tx->txreq, in build_vnic_ulp_payload()
85 static int build_vnic_tx_desc(struct sdma_engine *sde, in build_vnic_tx_desc() argument
107 sde->dd, in build_vnic_tx_desc()
115 ret = build_vnic_ulp_payload(sde, tx); in build_vnic_tx_desc()
131 struct sdma_engine *sde = vnic_sdma->sde; in hfi1_vnic_send_dma() local
138 if (unlikely(!sde || !sdma_runnin in hfi1_vnic_send_dma()
186 hfi1_vnic_sdma_sleep(struct sdma_engine *sde, struct iowait_work *wait, struct sdma_txreq *txreq, uint seq, bool pkts_sent) hfi1_vnic_sdma_sleep() argument
[all...]
H A Dmsix.c182 * @sde: valid sdma engine
185 int msix_request_sdma_irq(struct sdma_engine *sde) in msix_request_sdma_irq() argument
191 sde->dd->unit, sde->this_idx); in msix_request_sdma_irq()
192 nr = msix_request_irq(sde->dd, sde, sdma_interrupt, NULL, in msix_request_sdma_irq()
196 sde->msix_intr = nr; in msix_request_sdma_irq()
197 remap_sdma_interrupts(sde->dd, sde->this_idx, nr); in msix_request_sdma_irq()
259 struct sdma_engine *sde in msix_request_irqs() local
[all...]
H A Dqp.c26 struct sdma_engine *sde,
437 struct sdma_engine *sde, in iowait_sleep()
461 write_seqlock(&sde->waitlock); in iowait_sleep()
462 if (sdma_progress(sde, seq, stx)) in iowait_sleep()
472 &sde->dmawait); in iowait_sleep()
473 priv->s_iowait.lock = &sde->waitlock; in iowait_sleep()
477 write_sequnlock(&sde->waitlock); in iowait_sleep()
487 write_sequnlock(&sde->waitlock); in iowait_sleep()
542 struct sdma_engine *sde; in qp_to_sdma_engine() local
552 sde in qp_to_sdma_engine()
436 iowait_sleep( struct sdma_engine *sde, struct iowait_work *wait, struct sdma_txreq *stx, uint seq, bool pkts_sent) iowait_sleep() argument
603 struct sdma_engine *sde; qp_iter_print() local
[all...]
H A Dipoib_tx.c123 "%s: Status = 0x%x pbc 0x%llx txq = %d sde = %d\n", in hfi1_ipoib_free_tx()
126 tx->txq->sde->this_idx); in hfi1_ipoib_free_tx()
389 txq->sde = in hfi1_ipoib_send_dma_common()
410 ret = sdma_send_txlist(txq->sde, in hfi1_ipoib_submit_tx_list()
444 ret = sdma_send_txreq(txq->sde, in hfi1_ipoib_submit_tx()
613 static int hfi1_ipoib_sdma_sleep(struct sdma_engine *sde, in hfi1_ipoib_sdma_sleep() argument
622 write_seqlock(&sde->waitlock); in hfi1_ipoib_sdma_sleep()
625 if (sdma_progress(sde, seq, txreq)) { in hfi1_ipoib_sdma_sleep()
626 write_sequnlock(&sde->waitlock); in hfi1_ipoib_sdma_sleep()
634 struct hfi1_ibport *ibp = &sde in hfi1_ipoib_sdma_sleep()
[all...]
H A Dsysfs.c571 ssize_t (*show)(struct sdma_engine *sde, char *buf);
572 ssize_t (*store)(struct sdma_engine *sde, const char *buf, size_t cnt);
579 struct sdma_engine *sde = in sde_show() local
585 return sde_attr->show(sde, buf); in sde_show()
593 struct sdma_engine *sde = in sde_store() local
602 return sde_attr->store(sde, buf, count); in sde_store()
618 static ssize_t sde_show_cpu_to_sde_map(struct sdma_engine *sde, char *buf) in sde_show_cpu_to_sde_map() argument
620 return sdma_get_cpu_to_sde_map(sde, buf); in sde_show_cpu_to_sde_map()
623 static ssize_t sde_store_cpu_to_sde_map(struct sdma_engine *sde, in sde_store_cpu_to_sde_map() argument
626 return sdma_set_cpu_to_sde_map(sde, bu in sde_store_cpu_to_sde_map()
629 sde_show_vl(struct sdma_engine *sde, char *buf) sde_show_vl() argument
[all...]
H A Duser_sdma.c57 struct sdma_engine *sde,
65 struct sdma_engine *sde, in defer_packet_queue()
74 write_seqlock(&sde->waitlock); in defer_packet_queue()
75 trace_hfi1_usdma_defer(pq, sde, &pq->busy); in defer_packet_queue()
76 if (sdma_progress(sde, seq, txreq)) in defer_packet_queue()
85 pq->busy.lock = &sde->waitlock; in defer_packet_queue()
87 iowait_queue(pkts_sent, &pq->busy, &sde->dmawait); in defer_packet_queue()
89 write_sequnlock(&sde->waitlock); in defer_packet_queue()
92 write_sequnlock(&sde->waitlock); in defer_packet_queue()
514 req->sde in hfi1_user_sdma_process_request()
64 defer_packet_queue( struct sdma_engine *sde, struct iowait_work *wait, struct sdma_txreq *txreq, uint seq, bool pkts_sent) defer_packet_queue() argument
[all...]
H A Daffinity.c741 struct sdma_engine *sde = msix->arg; in hfi1_update_sdma_affinity() local
742 struct hfi1_devdata *dd = sde->dd; in hfi1_update_sdma_affinity()
747 if (cpu > num_online_cpus() || cpu == sde->cpu) in hfi1_update_sdma_affinity()
755 old_cpu = sde->cpu; in hfi1_update_sdma_affinity()
756 sde->cpu = cpu; in hfi1_update_sdma_affinity()
761 sde->this_idx, cpu); in hfi1_update_sdma_affinity()
839 struct sdma_engine *sde = NULL; in get_irq_affinity() local
851 sde = (struct sdma_engine *)msix->arg; in get_irq_affinity()
852 scnprintf(extra, 64, "engine %u", sde->this_idx); in get_irq_affinity()
902 sde in get_irq_affinity()
[all...]
H A Dverbs_txreq.h23 struct sdma_engine *sde; member
50 tx->sde = priv->s_sde;
H A Dmsix.h17 int msix_request_sdma_irq(struct sdma_engine *sde);

Completed in 21 milliseconds

123