/kernel/linux/linux-5.10/drivers/ps3/ |
H A D | ps3-vuart.c | 72 } tx_list; member 486 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_write() 488 if (list_empty(&priv->tx_list.head)) { in ps3_vuart_write() 493 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 511 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 523 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_write() 524 list_add_tail(&lb->link, &priv->tx_list.head); in ps3_vuart_write() 526 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 728 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_handle_interrupt_tx() 730 list_for_each_entry_safe(lb, n, &priv->tx_list in ps3_vuart_handle_interrupt_tx() [all...] |
/kernel/linux/linux-6.6/drivers/ps3/ |
H A D | ps3-vuart.c | 72 } tx_list; member 486 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_write() 488 if (list_empty(&priv->tx_list.head)) { in ps3_vuart_write() 493 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 511 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 523 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_write() 524 list_add_tail(&lb->link, &priv->tx_list.head); in ps3_vuart_write() 526 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 728 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_handle_interrupt_tx() 730 list_for_each_entry_safe(lb, n, &priv->tx_list in ps3_vuart_handle_interrupt_tx() [all...] |
/kernel/linux/linux-5.10/sound/soc/intel/common/ |
H A D | sst-ipc.c | 105 list_add_tail(&msg->list, &ipc->tx_list); in ipc_tx_message() 160 while (!list_empty(&ipc->tx_list) && !ipc->pending) { in ipc_tx_msgs() 170 msg = list_first_entry(&ipc->tx_list, struct ipc_message, list); in ipc_tx_msgs() 261 INIT_LIST_HEAD(&ipc->tx_list); in sst_ipc_init()
|
/kernel/linux/linux-6.6/sound/soc/intel/common/ |
H A D | sst-ipc.c | 105 list_add_tail(&msg->list, &ipc->tx_list); in ipc_tx_message() 160 while (!list_empty(&ipc->tx_list) && !ipc->pending) { in ipc_tx_msgs() 170 msg = list_first_entry(&ipc->tx_list, struct ipc_message, list); in ipc_tx_msgs() 261 INIT_LIST_HEAD(&ipc->tx_list); in sst_ipc_init()
|
/kernel/linux/linux-5.10/drivers/hid/intel-ish-hid/ishtp/ |
H A D | client-buffers.c | 148 /* release allocated memory - pass over tx_list */ in ishtp_cl_free_tx_ring() 149 while (!list_empty(&cl->tx_list.list)) { in ishtp_cl_free_tx_ring() 150 tx_buf = list_entry(cl->tx_list.list.next, in ishtp_cl_free_tx_ring() 268 tx_list_empty = list_empty(&cl->tx_list.list); in ishtp_cl_tx_empty()
|
H A D | client.c | 97 INIT_LIST_HEAD(&cl->tx_list.list); in ishtp_cl_init() 591 have_msg_to_send = !list_empty(&cl->tx_list.list); in ishtp_cl_send() 592 list_add_tail(&cl_msg->list, &cl->tx_list.list); in ishtp_cl_send() 660 if (list_empty(&cl->tx_list.list)) { in ipc_tx_send() 677 cl_msg = list_entry(cl->tx_list.list.next, struct ishtp_cl_tx_ring, in ipc_tx_send() 759 if (list_empty(&cl->tx_list.list)) { in ishtp_cl_send_msg_dma() 764 cl_msg = list_entry(cl->tx_list.list.next, struct ishtp_cl_tx_ring, in ishtp_cl_send_msg_dma()
|
/kernel/linux/linux-6.6/drivers/hid/intel-ish-hid/ishtp/ |
H A D | client-buffers.c | 148 /* release allocated memory - pass over tx_list */ in ishtp_cl_free_tx_ring() 149 while (!list_empty(&cl->tx_list.list)) { in ishtp_cl_free_tx_ring() 150 tx_buf = list_entry(cl->tx_list.list.next, in ishtp_cl_free_tx_ring() 268 tx_list_empty = list_empty(&cl->tx_list.list); in ishtp_cl_tx_empty()
|
H A D | client.c | 98 INIT_LIST_HEAD(&cl->tx_list.list); in ishtp_cl_init() 591 have_msg_to_send = !list_empty(&cl->tx_list.list); in ishtp_cl_send() 592 list_add_tail(&cl_msg->list, &cl->tx_list.list); in ishtp_cl_send() 660 if (list_empty(&cl->tx_list.list)) { in ipc_tx_send() 677 cl_msg = list_entry(cl->tx_list.list.next, struct ishtp_cl_tx_ring, in ipc_tx_send() 759 if (list_empty(&cl->tx_list.list)) { in ishtp_cl_send_msg_dma() 764 cl_msg = list_entry(cl->tx_list.list.next, struct ishtp_cl_tx_ring, in ishtp_cl_send_msg_dma()
|
/kernel/linux/linux-6.6/drivers/infiniband/hw/hfi1/ |
H A D | ipoib_tx.c | 412 &txq->tx_list, in hfi1_ipoib_submit_tx_list() 427 if (!list_empty(&txq->tx_list)) { in hfi1_ipoib_flush_tx_list() 546 list_add_tail(&tx->txreq.list, &txq->tx_list); in hfi1_ipoib_send_dma_list() 599 if (netdev_xmit_more() || !list_empty(&txp.txq->tx_list)) in hfi1_ipoib_send() 632 list_add_tail(&txreq->list, &txq->tx_list); in hfi1_ipoib_sdma_sleep() 719 INIT_LIST_HEAD(&txq->tx_list); in hfi1_ipoib_txreq_init() 775 list_for_each_entry_safe(txreq, txreq_tmp, &txq->tx_list, list) { in hfi1_ipoib_drain_tx_list() 865 dd_dev_info(priv->dd, "tx_list empty %u\n", in hfi1_ipoib_tx_timeout() 866 list_empty(&txq->tx_list)); in hfi1_ipoib_tx_timeout()
|
/kernel/linux/linux-5.10/drivers/dma/ |
H A D | txx9dmac.c | 182 if (!list_empty(&desc->tx_list)) in txx9dmac_last_child() 183 desc = list_entry(desc->tx_list.prev, typeof(*desc), desc_node); in txx9dmac_last_child() 198 INIT_LIST_HEAD(&desc->tx_list); in txx9dmac_desc_alloc() 247 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_sync_desc_for_cpu() 269 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_desc_put() 273 list_splice_init(&desc->tx_list, &dc->free_list); in txx9dmac_desc_put() 412 list_splice_init(&desc->tx_list, &dc->free_list); in txx9dmac_descriptor_complete() 530 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in txx9dmac_handle_error() 572 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_scan_descriptors() 769 * The descriptors on tx_list ar in txx9dmac_prep_dma_memcpy() [all...] |
H A D | altera-msgdma.c | 157 * @tx_list: transmit list node 163 struct list_head tx_list; member 219 INIT_LIST_HEAD(&desc->tx_list); in msgdma_get_descriptor() 236 list_for_each_entry_safe(child, next, &desc->tx_list, node) { in msgdma_free_descriptor() 366 list_add_tail(&new->node, &first->tx_list); in msgdma_prep_memcpy() 440 list_add_tail(&new->node, &first->tx_list); in msgdma_prep_slave_sg() 540 list_for_each_entry_safe(sdesc, next, &desc->tx_list, node) in msgdma_copy_desc_to_fifo()
|
H A D | mmp_pdma.c | 84 struct list_head tx_list; member 340 /* desc->tx_list ==> pending list */ 351 list_for_each_entry(child, &desc->tx_list, node) { in mmp_pdma_tx_submit() 355 /* softly link to pending list - desc->tx_list ==> pending list */ in mmp_pdma_tx_submit() 356 list_splice_tail_init(&desc->tx_list, &chan->chain_pending); in mmp_pdma_tx_submit() 375 INIT_LIST_HEAD(&desc->tx_list); in mmp_pdma_alloc_descriptor() 505 list_add_tail(&new->node, &first->tx_list); in mmp_pdma_prep_memcpy() 521 mmp_pdma_free_desc_list(chan, &first->tx_list); in mmp_pdma_prep_memcpy() 579 list_add_tail(&new->node, &first->tx_list); in mmp_pdma_prep_slave_sg() 601 mmp_pdma_free_desc_list(chan, &first->tx_list); in mmp_pdma_prep_slave_sg() [all...] |
H A D | ep93xx_dma.c | 119 * @tx_list: list of linked descriptors 128 struct list_head tx_list; member 247 /* Flatten the @desc->tx_list chain into @edmac->active list */ in ep93xx_dma_set_active() 248 while (!list_empty(&desc->tx_list)) { in ep93xx_dma_set_active() 249 struct ep93xx_dma_desc *d = list_first_entry(&desc->tx_list, in ep93xx_dma_set_active() 712 list_splice_init(&desc->tx_list, &edmac->free_list); in ep93xx_dma_desc_put() 925 INIT_LIST_HEAD(&desc->tx_list); in ep93xx_dma_alloc_chan_resources() 1011 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_memcpy() 1086 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_slave_sg() 1167 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_cyclic() [all...] |
/kernel/linux/linux-6.6/drivers/dma/ |
H A D | txx9dmac.c | 182 if (!list_empty(&desc->tx_list)) in txx9dmac_last_child() 183 desc = list_entry(desc->tx_list.prev, typeof(*desc), desc_node); in txx9dmac_last_child() 198 INIT_LIST_HEAD(&desc->tx_list); in txx9dmac_desc_alloc() 247 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_sync_desc_for_cpu() 269 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_desc_put() 273 list_splice_init(&desc->tx_list, &dc->free_list); in txx9dmac_desc_put() 412 list_splice_init(&desc->tx_list, &dc->free_list); in txx9dmac_descriptor_complete() 530 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in txx9dmac_handle_error() 572 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_scan_descriptors() 769 * The descriptors on tx_list ar in txx9dmac_prep_dma_memcpy() [all...] |
H A D | mmp_pdma.c | 83 struct list_head tx_list; member 339 /* desc->tx_list ==> pending list */ 350 list_for_each_entry(child, &desc->tx_list, node) { in mmp_pdma_tx_submit() 354 /* softly link to pending list - desc->tx_list ==> pending list */ in mmp_pdma_tx_submit() 355 list_splice_tail_init(&desc->tx_list, &chan->chain_pending); in mmp_pdma_tx_submit() 374 INIT_LIST_HEAD(&desc->tx_list); in mmp_pdma_alloc_descriptor() 504 list_add_tail(&new->node, &first->tx_list); in mmp_pdma_prep_memcpy() 520 mmp_pdma_free_desc_list(chan, &first->tx_list); in mmp_pdma_prep_memcpy() 578 list_add_tail(&new->node, &first->tx_list); in mmp_pdma_prep_slave_sg() 600 mmp_pdma_free_desc_list(chan, &first->tx_list); in mmp_pdma_prep_slave_sg() [all...] |
H A D | altera-msgdma.c | 158 * @tx_list: transmit list node 164 struct list_head tx_list; member 220 INIT_LIST_HEAD(&desc->tx_list); in msgdma_get_descriptor() 237 list_for_each_entry_safe(child, next, &desc->tx_list, node) { in msgdma_free_descriptor() 367 list_add_tail(&new->node, &first->tx_list); in msgdma_prep_memcpy() 441 list_add_tail(&new->node, &first->tx_list); in msgdma_prep_slave_sg() 541 list_for_each_entry_safe(sdesc, next, &desc->tx_list, node) in msgdma_copy_desc_to_fifo()
|
H A D | ep93xx_dma.c | 119 * @tx_list: list of linked descriptors 128 struct list_head tx_list; member 247 /* Flatten the @desc->tx_list chain into @edmac->active list */ in ep93xx_dma_set_active() 248 while (!list_empty(&desc->tx_list)) { in ep93xx_dma_set_active() 249 struct ep93xx_dma_desc *d = list_first_entry(&desc->tx_list, in ep93xx_dma_set_active() 712 list_splice_init(&desc->tx_list, &edmac->free_list); in ep93xx_dma_desc_put() 925 INIT_LIST_HEAD(&desc->tx_list); in ep93xx_dma_alloc_chan_resources() 1011 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_memcpy() 1086 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_slave_sg() 1167 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_cyclic() [all...] |
/kernel/linux/linux-5.10/drivers/dma/dw/ |
H A D | core.c | 91 INIT_LIST_HEAD(&desc->tx_list); in dwc_desc_get() 107 list_for_each_entry_safe(child, _next, &desc->tx_list, desc_node) { in dwc_desc_put() 203 dwc->tx_node_active = &first->tx_list; in dwc_dostart() 253 list_for_each_entry(child, &desc->tx_list, desc_node) in dwc_descriptor_complete() 325 head = &desc->tx_list; in dwc_scan_descriptors() 384 list_for_each_entry(child, &desc->tx_list, desc_node) { in dwc_scan_descriptors() 457 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in dwc_handle_error() 595 list_add_tail(&desc->desc_node, &first->tx_list); in dwc_prep_dma_memcpy() 685 list_add_tail(&desc->desc_node, &first->tx_list); in dwc_prep_slave_sg() 734 list_add_tail(&desc->desc_node, &first->tx_list); in dwc_prep_slave_sg() [all...] |
/kernel/linux/linux-6.6/drivers/dma/dw/ |
H A D | core.c | 88 INIT_LIST_HEAD(&desc->tx_list); in dwc_desc_get() 104 list_for_each_entry_safe(child, _next, &desc->tx_list, desc_node) { in dwc_desc_put() 200 dwc->tx_node_active = &first->tx_list; in dwc_dostart() 250 list_for_each_entry(child, &desc->tx_list, desc_node) in dwc_descriptor_complete() 322 head = &desc->tx_list; in dwc_scan_descriptors() 381 list_for_each_entry(child, &desc->tx_list, desc_node) { in dwc_scan_descriptors() 454 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in dwc_handle_error() 592 list_add_tail(&desc->desc_node, &first->tx_list); in dwc_prep_dma_memcpy() 682 list_add_tail(&desc->desc_node, &first->tx_list); in dwc_prep_slave_sg() 731 list_add_tail(&desc->desc_node, &first->tx_list); in dwc_prep_slave_sg() [all...] |
/kernel/linux/linux-5.10/drivers/infiniband/hw/hfi1/ |
H A D | ipoib_tx.c | 436 &txq->tx_list, in hfi1_ipoib_submit_tx_list() 451 if (!list_empty(&txq->tx_list)) { in hfi1_ipoib_flush_tx_list() 560 list_add_tail(&tx->txreq.list, &txq->tx_list); in hfi1_ipoib_send_dma_list() 613 if (netdev_xmit_more() || !list_empty(&txp.txq->tx_list)) in hfi1_ipoib_send_dma() 646 list_add_tail(&txreq->list, &txq->tx_list); in hfi1_ipoib_sdma_sleep() 743 INIT_LIST_HEAD(&txq->tx_list); in hfi1_ipoib_txreq_init() 802 list_for_each_entry_safe(txreq, txreq_tmp, &txq->tx_list, list) { in hfi1_ipoib_drain_tx_list()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/cavium/octeon/ |
H A D | octeon_mgmt.c | 130 /* The tx_list lock also protects the ring related variables */ 131 struct sk_buff_head tx_list; member 263 spin_lock_irqsave(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 268 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 279 skb = __skb_dequeue(&p->tx_list); in octeon_mgmt_clean_tx_buffers() 288 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 1262 skb_queue_purge(&p->tx_list); in octeon_mgmt_stop() 1293 spin_lock_irqsave(&p->tx_list.lock, flags); in octeon_mgmt_xmit() 1296 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_xmit() 1298 spin_lock_irqsave(&p->tx_list in octeon_mgmt_xmit() [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/cavium/octeon/ |
H A D | octeon_mgmt.c | 130 /* The tx_list lock also protects the ring related variables */ 131 struct sk_buff_head tx_list; member 263 spin_lock_irqsave(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 268 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 279 skb = __skb_dequeue(&p->tx_list); in octeon_mgmt_clean_tx_buffers() 288 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 1259 skb_queue_purge(&p->tx_list); in octeon_mgmt_stop() 1290 spin_lock_irqsave(&p->tx_list.lock, flags); in octeon_mgmt_xmit() 1293 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_xmit() 1295 spin_lock_irqsave(&p->tx_list in octeon_mgmt_xmit() [all...] |
/kernel/linux/linux-5.10/drivers/dma/xilinx/ |
H A D | zynqmp_dma.c | 172 * @tx_list: List head for the current transfer 184 struct list_head tx_list; member 385 if (!list_empty(&desc->tx_list)) in zynqmp_dma_tx_submit() 386 desc = list_last_entry(&desc->tx_list, in zynqmp_dma_tx_submit() 418 INIT_LIST_HEAD(&desc->tx_list); in zynqmp_dma_get_descriptor() 439 list_for_each_entry_safe(child, next, &sdesc->tx_list, node) { in zynqmp_dma_free_descriptor() 845 list_add_tail(&new->node, &first->tx_list); in zynqmp_dma_prep_memcpy()
|
/kernel/linux/linux-6.6/drivers/dma/xilinx/ |
H A D | zynqmp_dma.c | 170 * @tx_list: List head for the current transfer 182 struct list_head tx_list; member 383 if (!list_empty(&desc->tx_list)) in zynqmp_dma_tx_submit() 384 desc = list_last_entry(&desc->tx_list, in zynqmp_dma_tx_submit() 416 INIT_LIST_HEAD(&desc->tx_list); in zynqmp_dma_get_descriptor() 436 list_for_each_entry_safe(child, next, &sdesc->tx_list, node) { in zynqmp_dma_free_descriptor() 859 list_add_tail(&new->node, &first->tx_list); in zynqmp_dma_prep_memcpy()
|
/kernel/linux/linux-5.10/include/linux/platform_data/ |
H A D | dma-iop32x.h | 76 * @tx_list: list of descriptors that are associated with one operation 91 struct list_head tx_list; member
|