/kernel/linux/linux-5.10/drivers/dma/ioat/ |
H A D | prep.c | 157 struct ioat_ring_ent *compl_desc; in __ioat_prep_xor_lock() local 230 compl_desc = ioat_get_ring_ent(ioat_chan, idx + i); in __ioat_prep_xor_lock() 231 compl_desc->txd.flags = flags & DMA_PREP_INTERRUPT; in __ioat_prep_xor_lock() 232 hw = compl_desc->hw; in __ioat_prep_xor_lock() 238 dump_desc_dbg(ioat_chan, compl_desc); in __ioat_prep_xor_lock() 241 return &compl_desc->txd; in __ioat_prep_xor_lock() 346 struct ioat_ring_ent *compl_desc; in __ioat_prep_pq_lock() local 443 compl_desc = desc; in __ioat_prep_pq_lock() 446 compl_desc = ioat_get_ring_ent(ioat_chan, idx + i); in __ioat_prep_pq_lock() 447 compl_desc in __ioat_prep_pq_lock() [all...] |
/kernel/linux/linux-6.6/drivers/dma/ioat/ |
H A D | prep.c | 157 struct ioat_ring_ent *compl_desc; in __ioat_prep_xor_lock() local 230 compl_desc = ioat_get_ring_ent(ioat_chan, idx + i); in __ioat_prep_xor_lock() 231 compl_desc->txd.flags = flags & DMA_PREP_INTERRUPT; in __ioat_prep_xor_lock() 232 hw = compl_desc->hw; in __ioat_prep_xor_lock() 238 dump_desc_dbg(ioat_chan, compl_desc); in __ioat_prep_xor_lock() 241 return &compl_desc->txd; in __ioat_prep_xor_lock() 346 struct ioat_ring_ent *compl_desc; in __ioat_prep_pq_lock() local 443 compl_desc = desc; in __ioat_prep_pq_lock() 446 compl_desc = ioat_get_ring_ent(ioat_chan, idx + i); in __ioat_prep_pq_lock() 447 compl_desc in __ioat_prep_pq_lock() [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/google/gve/ |
H A D | gve_rx_dqo.c | 496 const struct gve_rx_compl_desc_dqo *compl_desc, in gve_rx_skb_hash() 506 skb_set_hash(skb, le32_to_cpu(compl_desc->hash), hash_type); in gve_rx_skb_hash() 608 const struct gve_rx_compl_desc_dqo *compl_desc, in gve_rx_dqo() 611 const u16 buffer_id = le16_to_cpu(compl_desc->buf_id); in gve_rx_dqo() 612 const bool eop = compl_desc->end_of_packet != 0; in gve_rx_dqo() 629 if (unlikely(compl_desc->rx_error)) { in gve_rx_dqo() 635 buf_len = compl_desc->packet_len; in gve_rx_dqo() 769 struct gve_rx_compl_desc_dqo *compl_desc = in gve_rx_poll_dqo() local 774 if (compl_desc->generation == complq->cur_gen_bit) in gve_rx_poll_dqo() 784 err = gve_rx_dqo(napi, rx, compl_desc, r in gve_rx_poll_dqo() 495 gve_rx_skb_hash(struct sk_buff *skb, const struct gve_rx_compl_desc_dqo *compl_desc, struct gve_ptype ptype) gve_rx_skb_hash() argument 607 gve_rx_dqo(struct napi_struct *napi, struct gve_rx_ring *rx, const struct gve_rx_compl_desc_dqo *compl_desc, int queue_idx) gve_rx_dqo() argument [all...] |
H A D | gve_tx_dqo.c | 1180 struct gve_tx_compl_desc *compl_desc = in gve_clean_tx_done_dqo() local 1184 if (compl_desc->generation == tx->dqo_compl.cur_gen_bit) in gve_clean_tx_done_dqo() 1193 type = compl_desc->type; in gve_clean_tx_done_dqo() 1197 u16 tx_head = le16_to_cpu(compl_desc->tx_head); in gve_clean_tx_done_dqo() 1201 u16 compl_tag = le16_to_cpu(compl_desc->completion_tag); in gve_clean_tx_done_dqo() 1215 u16 compl_tag = le16_to_cpu(compl_desc->completion_tag); in gve_clean_tx_done_dqo() 1221 u16 compl_tag = le16_to_cpu(compl_desc->completion_tag); in gve_clean_tx_done_dqo() 1253 struct gve_tx_compl_desc *compl_desc; in gve_tx_poll_dqo() local 1272 compl_desc = &tx->dqo.compl_ring[tx->dqo_compl.head]; in gve_tx_poll_dqo() 1273 return compl_desc in gve_tx_poll_dqo() [all...] |
/kernel/linux/linux-5.10/drivers/crypto/ccree/ |
H A D | cc_request_mgr.c | 29 struct cc_hw_desc compl_desc; member 171 hw_desc_init(&req_mgr_h->compl_desc); in cc_req_mgr_init() 172 set_din_const(&req_mgr_h->compl_desc, 0, sizeof(u32)); in cc_req_mgr_init() 173 set_dout_dlli(&req_mgr_h->compl_desc, req_mgr_h->dummy_comp_buff_dma, in cc_req_mgr_init() 175 set_flow_mode(&req_mgr_h->compl_desc, BYPASS); in cc_req_mgr_init() 176 set_queue_last_ind(drvdata, &req_mgr_h->compl_desc); in cc_req_mgr_init() 314 enqueue_seq(drvdata, &req_mgr_h->compl_desc, 1); in cc_do_send_request()
|
/kernel/linux/linux-6.6/drivers/crypto/ccree/ |
H A D | cc_request_mgr.c | 29 struct cc_hw_desc compl_desc; member 170 hw_desc_init(&req_mgr_h->compl_desc); in cc_req_mgr_init() 171 set_din_const(&req_mgr_h->compl_desc, 0, sizeof(u32)); in cc_req_mgr_init() 172 set_dout_dlli(&req_mgr_h->compl_desc, req_mgr_h->dummy_comp_buff_dma, in cc_req_mgr_init() 174 set_flow_mode(&req_mgr_h->compl_desc, BYPASS); in cc_req_mgr_init() 175 set_queue_last_ind(drvdata, &req_mgr_h->compl_desc); in cc_req_mgr_init() 313 enqueue_seq(drvdata, &req_mgr_h->compl_desc, 1); in cc_do_send_request()
|
/kernel/linux/linux-5.10/drivers/usb/dwc2/ |
H A D | gadget.c | 1021 hs_ep->compl_desc = 0; in dwc2_gadget_start_isoc_ddma() 2185 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status; in dwc2_gadget_complete_isoc_request_ddma() 2220 hs_ep->compl_desc++; in dwc2_gadget_complete_isoc_request_ddma() 2221 if (hs_ep->compl_desc > (MAX_DMA_DESC_NUM_HS_ISOC - 1)) in dwc2_gadget_complete_isoc_request_ddma() 2222 hs_ep->compl_desc = 0; in dwc2_gadget_complete_isoc_request_ddma() 2223 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status; in dwc2_gadget_complete_isoc_request_ddma() 2246 hs_ep->compl_desc = 0; in dwc2_gadget_handle_isoc_bna() 4111 hs_ep->compl_desc = 0; in dwc2_hsotg_ep_enable()
|
H A D | core.h | 128 * @compl_desc: index of next descriptor to be completed by xFerComplete 183 unsigned int compl_desc; member
|
/kernel/linux/linux-6.6/drivers/usb/dwc2/ |
H A D | gadget.c | 1020 hs_ep->compl_desc = 0; in dwc2_gadget_start_isoc_ddma() 2185 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status; in dwc2_gadget_complete_isoc_request_ddma() 2220 hs_ep->compl_desc++; in dwc2_gadget_complete_isoc_request_ddma() 2221 if (hs_ep->compl_desc > (MAX_DMA_DESC_NUM_HS_ISOC - 1)) in dwc2_gadget_complete_isoc_request_ddma() 2222 hs_ep->compl_desc = 0; in dwc2_gadget_complete_isoc_request_ddma() 2223 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status; in dwc2_gadget_complete_isoc_request_ddma() 2246 hs_ep->compl_desc = 0; in dwc2_gadget_handle_isoc_bna() 4112 hs_ep->compl_desc = 0; in dwc2_hsotg_ep_enable()
|
H A D | core.h | 100 * @compl_desc: index of next descriptor to be completed by xFerComplete 156 unsigned int compl_desc; member
|