Home
last modified time | relevance | path

Searched refs:md_ctrl (Results 1 - 6 of 6) sorted by relevance

/kernel/linux/linux-6.6/drivers/net/wwan/t7xx/
H A Dt7xx_hif_cldma.c62 static void md_cd_queue_struct_reset(struct cldma_queue *queue, struct cldma_ctrl *md_ctrl, in md_cd_queue_struct_reset() argument
67 queue->md_ctrl = md_ctrl; in md_cd_queue_struct_reset()
73 static void md_cd_queue_struct_init(struct cldma_queue *queue, struct cldma_ctrl *md_ctrl, in md_cd_queue_struct_init() argument
76 md_cd_queue_struct_reset(queue, md_ctrl, tx_rx, index); in md_cd_queue_struct_init()
93 static int t7xx_cldma_alloc_and_map_skb(struct cldma_ctrl *md_ctrl, struct cldma_request *req, in t7xx_cldma_alloc_and_map_skb() argument
100 req->mapped_buff = dma_map_single(md_ctrl->dev, req->skb->data, size, DMA_FROM_DEVICE); in t7xx_cldma_alloc_and_map_skb()
101 if (dma_mapping_error(md_ctrl->dev, req->mapped_buff)) { in t7xx_cldma_alloc_and_map_skb()
105 dev_err(md_ctrl->dev, "DMA mapping failed\n"); in t7xx_cldma_alloc_and_map_skb()
114 struct cldma_ctrl *md_ctrl in t7xx_cldma_gpd_rx_from_q() local
199 struct cldma_ctrl *md_ctrl = queue->md_ctrl; t7xx_cldma_gpd_rx_collect() local
242 struct cldma_ctrl *md_ctrl = queue->md_ctrl; t7xx_cldma_rx_done() local
260 struct cldma_ctrl *md_ctrl = queue->md_ctrl; t7xx_cldma_gpd_tx_collect() local
301 struct cldma_ctrl *md_ctrl = queue->md_ctrl; t7xx_cldma_txq_empty_hndl() local
338 struct cldma_ctrl *md_ctrl = queue->md_ctrl; t7xx_cldma_tx_done() local
370 t7xx_cldma_ring_free(struct cldma_ctrl *md_ctrl, struct cldma_ring *ring, enum dma_data_direction tx_rx) t7xx_cldma_ring_free() argument
392 t7xx_alloc_rx_request(struct cldma_ctrl *md_ctrl, size_t pkt_size) t7xx_alloc_rx_request() argument
420 t7xx_cldma_rx_ring_init(struct cldma_ctrl *md_ctrl, struct cldma_ring *ring) t7xx_cldma_rx_ring_init() argument
453 t7xx_alloc_tx_request(struct cldma_ctrl *md_ctrl) t7xx_alloc_tx_request() argument
470 t7xx_cldma_tx_ring_init(struct cldma_ctrl *md_ctrl, struct cldma_ring *ring) t7xx_cldma_tx_ring_init() argument
523 struct cldma_ctrl *md_ctrl = queue->md_ctrl; t7xx_cldma_rxq_init() local
532 struct cldma_ctrl *md_ctrl = queue->md_ctrl; t7xx_cldma_txq_init() local
539 t7xx_cldma_enable_irq(struct cldma_ctrl *md_ctrl) t7xx_cldma_enable_irq() argument
544 t7xx_cldma_disable_irq(struct cldma_ctrl *md_ctrl) t7xx_cldma_disable_irq() argument
549 t7xx_cldma_irq_work_cb(struct cldma_ctrl *md_ctrl) t7xx_cldma_irq_work_cb() argument
610 t7xx_cldma_qs_are_active(struct cldma_ctrl *md_ctrl) t7xx_cldma_qs_are_active() argument
636 t7xx_cldma_stop(struct cldma_ctrl *md_ctrl) t7xx_cldma_stop() argument
669 t7xx_cldma_late_release(struct cldma_ctrl *md_ctrl) t7xx_cldma_late_release() argument
687 t7xx_cldma_reset(struct cldma_ctrl *md_ctrl) t7xx_cldma_reset() argument
724 t7xx_cldma_start(struct cldma_ctrl *md_ctrl) t7xx_cldma_start() argument
759 t7xx_cldma_clear_txq(struct cldma_ctrl *md_ctrl, int qnum) t7xx_cldma_clear_txq() argument
779 t7xx_cldma_clear_rxq(struct cldma_ctrl *md_ctrl, int qnum) t7xx_cldma_clear_rxq() argument
815 t7xx_cldma_clear_all_qs(struct cldma_ctrl *md_ctrl, enum mtk_txrx tx_rx) t7xx_cldma_clear_all_qs() argument
828 t7xx_cldma_stop_all_qs(struct cldma_ctrl *md_ctrl, enum mtk_txrx tx_rx) t7xx_cldma_stop_all_qs() argument
847 struct cldma_ctrl *md_ctrl = queue->md_ctrl; t7xx_cldma_gpd_handle_tx_request() local
876 t7xx_cldma_hw_start_send(struct cldma_ctrl *md_ctrl, int qno, struct cldma_request *prev_req) t7xx_cldma_hw_start_send() argument
903 t7xx_cldma_set_recv_skb(struct cldma_ctrl *md_ctrl, int (*recv_skb)(struct cldma_queue *queue, struct sk_buff *skb)) t7xx_cldma_set_recv_skb() argument
922 t7xx_cldma_send_skb(struct cldma_ctrl *md_ctrl, int qno, struct sk_buff *skb) t7xx_cldma_send_skb() argument
996 t7xx_cldma_late_init(struct cldma_ctrl *md_ctrl) t7xx_cldma_late_init() argument
1063 t7xx_hw_info_init(struct cldma_ctrl *md_ctrl) t7xx_hw_info_init() argument
1096 struct cldma_ctrl *md_ctrl; t7xx_cldma_alloc() local
1113 struct cldma_ctrl *md_ctrl = entity_param; t7xx_cldma_resume_early() local
1138 struct cldma_ctrl *md_ctrl = entity_param; t7xx_cldma_resume() local
1155 struct cldma_ctrl *md_ctrl = entity_param; t7xx_cldma_suspend_late() local
1173 struct cldma_ctrl *md_ctrl = entity_param; t7xx_cldma_suspend() local
1193 t7xx_cldma_pm_init(struct cldma_ctrl *md_ctrl) t7xx_cldma_pm_init() argument
1214 t7xx_cldma_pm_uninit(struct cldma_ctrl *md_ctrl) t7xx_cldma_pm_uninit() argument
1225 t7xx_cldma_hif_hw_init(struct cldma_ctrl *md_ctrl) t7xx_cldma_hif_hw_init() argument
1241 struct cldma_ctrl *md_ctrl = data; t7xx_cldma_isr_handler() local
1252 t7xx_cldma_destroy_wqs(struct cldma_ctrl *md_ctrl) t7xx_cldma_destroy_wqs() argument
1283 t7xx_cldma_init(struct cldma_ctrl *md_ctrl) t7xx_cldma_init() argument
1335 t7xx_cldma_switch_cfg(struct cldma_ctrl *md_ctrl) t7xx_cldma_switch_cfg() argument
1341 t7xx_cldma_exit(struct cldma_ctrl *md_ctrl) t7xx_cldma_exit() argument
[all...]
H A Dt7xx_hif_cldma.h73 struct cldma_ctrl *md_ctrl; member
114 void t7xx_cldma_hif_hw_init(struct cldma_ctrl *md_ctrl);
115 int t7xx_cldma_init(struct cldma_ctrl *md_ctrl);
116 void t7xx_cldma_exit(struct cldma_ctrl *md_ctrl);
117 void t7xx_cldma_switch_cfg(struct cldma_ctrl *md_ctrl);
118 void t7xx_cldma_start(struct cldma_ctrl *md_ctrl);
119 int t7xx_cldma_stop(struct cldma_ctrl *md_ctrl);
120 void t7xx_cldma_reset(struct cldma_ctrl *md_ctrl);
121 void t7xx_cldma_set_recv_skb(struct cldma_ctrl *md_ctrl,
123 int t7xx_cldma_send_skb(struct cldma_ctrl *md_ctrl, in
[all...]
H A Dt7xx_modem_ops.c229 * @md_ctrl: modem control struct.
260 static void t7xx_cldma_exception(struct cldma_ctrl *md_ctrl, enum hif_ex_stage stage) in t7xx_cldma_exception() argument
264 t7xx_cldma_stop_all_qs(md_ctrl, MTK_TX); in t7xx_cldma_exception()
265 t7xx_cldma_clear_all_qs(md_ctrl, MTK_TX); in t7xx_cldma_exception()
272 t7xx_cldma_stop_all_qs(md_ctrl, MTK_RX); in t7xx_cldma_exception()
273 t7xx_cldma_stop(md_ctrl); in t7xx_cldma_exception()
275 if (md_ctrl->hif_id == CLDMA_ID_MD) in t7xx_cldma_exception()
276 t7xx_cldma_hw_reset(md_ctrl->t7xx_dev->base_addr.infracfg_ao_base); in t7xx_cldma_exception()
278 t7xx_cldma_clear_all_qs(md_ctrl, MTK_RX); in t7xx_cldma_exception()
282 t7xx_cldma_hw_init(&md_ctrl in t7xx_cldma_exception()
[all...]
H A Dt7xx_port_proxy.c220 struct cldma_ctrl *md_ctrl; in t7xx_port_send_raw_skb() local
223 md_ctrl = port->t7xx_dev->md->md_ctrl[path_id]; in t7xx_port_send_raw_skb()
225 ret = t7xx_cldma_send_skb(md_ctrl, tx_qno, skb); in t7xx_port_send_raw_skb()
345 if (queue->md_ctrl->hif_id == port_conf->path_id && in t7xx_port_proxy_find_port()
365 struct t7xx_pci_dev *t7xx_dev = queue->md_ctrl->t7xx_dev; in t7xx_port_proxy_recv_skb()
367 struct device *dev = queue->md_ctrl->dev; in t7xx_port_proxy_recv_skb()
495 t7xx_cldma_set_recv_skb(md->md_ctrl[CLDMA_ID_AP], t7xx_port_proxy_recv_skb); in t7xx_port_proxy_init()
496 t7xx_cldma_set_recv_skb(md->md_ctrl[CLDMA_ID_MD], t7xx_port_proxy_recv_skb); in t7xx_port_proxy_init()
H A Dt7xx_state_monitor.c230 struct cldma_ctrl *md_ctrl; in fsm_routine_stopping() local
238 md_ctrl = ctl->md->md_ctrl[CLDMA_ID_MD]; in fsm_routine_stopping()
243 t7xx_cldma_stop(md_ctrl); in fsm_routine_stopping()
346 t7xx_cldma_hif_hw_init(md->md_ctrl[CLDMA_ID_AP]); in fsm_routine_start()
347 t7xx_cldma_hif_hw_init(md->md_ctrl[CLDMA_ID_MD]); in fsm_routine_start()
H A Dt7xx_modem_ops.h66 struct cldma_ctrl *md_ctrl[CLDMA_NUM]; member

Completed in 5 milliseconds