Lines Matching defs:fifo
97 static inline void VXGE_COMPLETE_VPATH_TX(struct vxge_fifo *fifo)
109 if (__netif_tx_trylock(fifo->txq)) {
110 vxge_hw_vpath_poll_tx(fifo->handle, &skb_ptr,
112 __netif_tx_unlock(fifo->txq);
127 VXGE_COMPLETE_VPATH_TX(&vdev->vpaths[i].fifo);
551 struct vxge_fifo *fifo = (struct vxge_fifo *)userdata;
571 "tcode = 0x%x", fifo->ndev->name, __func__,
577 fifo->ndev->name, __func__, __LINE__,
580 fifo->stats.tx_errors++;
583 "error t_code %01x", fifo->ndev->name,
589 dma_unmap_single(&fifo->pdev->dev, txd_priv->dma_buffers[i++],
593 dma_unmap_page(&fifo->pdev->dev,
602 u64_stats_update_begin(&fifo->stats.syncp);
603 fifo->stats.tx_frms++;
604 fifo->stats.tx_bytes += skb->len;
605 u64_stats_update_end(&fifo->stats.syncp);
615 if (pkt_cnt > fifo->indicate_max_pkts)
622 if (netif_tx_queue_stopped(fifo->txq))
623 netif_tx_wake_queue(fifo->txq);
627 fifo->ndev->name, __func__, __LINE__);
806 struct vxge_fifo *fifo = NULL;
861 fifo = &vdev->vpaths[vpath_no].fifo;
862 fifo_hw = fifo->handle;
864 if (netif_tx_queue_stopped(fifo->txq))
871 fifo->stats.txd_not_free++;
879 netif_tx_stop_queue(fifo->txq);
885 fifo->stats.txd_out_of_desc++;
901 dma_pointer = dma_map_single(&fifo->pdev->dev, skb->data,
904 if (unlikely(dma_mapping_error(&fifo->pdev->dev, dma_pointer))) {
906 fifo->stats.pci_map_fail++;
930 dma_pointer = (u64)skb_frag_dma_map(&fifo->pdev->dev, frag,
934 if (unlikely(dma_mapping_error(&fifo->pdev->dev, dma_pointer)))
981 dma_unmap_single(&fifo->pdev->dev, txdl_priv->dma_buffers[j++],
985 dma_unmap_page(&fifo->pdev->dev, txdl_priv->dma_buffers[j],
992 netif_tx_stop_queue(fifo->txq);
1035 struct vxge_fifo *fifo = (struct vxge_fifo *)userdata;
1052 dma_unmap_single(&fifo->pdev->dev, txd_priv->dma_buffers[i++],
1056 dma_unmap_page(&fifo->pdev->dev, txd_priv->dma_buffers[i++],
1571 if (netif_tx_queue_stopped(vpath->fifo.txq))
1572 netif_tx_wake_queue(vpath->fifo.txq);
1594 struct __vxge_hw_fifo *hw_fifo = vdev->vpaths[i].fifo.handle;
2053 attr.fifo_attr.userdata = &vpath->fifo;
2066 vpath->fifo.handle =
2070 vpath->fifo.tx_steering_type =
2072 vpath->fifo.ndev = vdev->ndev;
2073 vpath->fifo.pdev = vdev->pdev;
2075 u64_stats_init(&vpath->fifo.stats.syncp);
2079 vpath->fifo.txq =
2082 vpath->fifo.txq =
2084 vpath->fifo.indicate_max_pkts =
2086 vpath->fifo.tx_vector_no = 0;
2113 * @fifo: pointer to transmit fifo structure
2118 static void adaptive_coalesce_tx_interrupts(struct vxge_fifo *fifo)
2120 fifo->interrupt_count++;
2121 if (time_before(fifo->jiffies + HZ / 100, jiffies)) {
2122 struct __vxge_hw_fifo *hw_fifo = fifo->handle;
2124 fifo->jiffies = jiffies;
2125 if (fifo->interrupt_count > VXGE_T1A_MAX_TX_INTERRUPT_COUNT &&
2133 fifo->interrupt_count = 0;
2223 struct vxge_fifo *fifo = (struct vxge_fifo *)dev_id;
2225 adaptive_coalesce_tx_interrupts(fifo);
2227 vxge_hw_channel_msix_mask((struct __vxge_hw_channel *)fifo->handle,
2228 fifo->tx_vector_no);
2230 vxge_hw_channel_msix_clear((struct __vxge_hw_channel *)fifo->handle,
2231 fifo->tx_vector_no);
2233 VXGE_COMPLETE_VPATH_TX(fifo);
2235 vxge_hw_channel_msix_unmask((struct __vxge_hw_channel *)fifo->handle,
2236 fifo->tx_vector_no);
2326 /* Initialize the fifo vector */
2395 /* If fifo or ring are not enabled, the MSIX vector for
2401 vpath->fifo.tx_vector_no = (vpath->device_id *
2482 &vdev->vpaths[vp_idx].fifo);
2484 &vdev->vpaths[vp_idx].fifo;
2571 vxge_hw_vpath_tti_ci_set(vdev->vpaths[0].fifo.handle);
2640 netif_tx_stop_queue(vpath->fifo.txq);
3112 struct vxge_fifo_stats *txstats = &vdev->vpaths[k].fifo.stats;
3603 netif_tx_stop_queue(vpath->fifo.txq);
3617 /* 1. If user enters 0 for some fifo, give equal priority to all */
3745 /* Configure Tx fifo's */
3746 device_config->vp_config[i].fifo.enable =
3748 device_config->vp_config[i].fifo.max_frags =
3750 device_config->vp_config[i].fifo.memblock_size =
3753 txdl_size = device_config->vp_config[i].fifo.max_frags *
3757 device_config->vp_config[i].fifo.fifo_blocks =
3760 device_config->vp_config[i].fifo.intr =
4000 config.vp_config[i].fifo.max_frags);