Lines Matching defs:ring

58  * Each data frame passed to the high-speed DMA ring has this header. If
145 struct tb_ring *ring;
175 * @rx_ring: Software ring holding Rx frames
178 * @tx_ring: Software ring holding Tx frames
334 static void tbnet_free_buffers(struct tbnet_ring *ring)
339 struct device *dma_dev = tb_ring_dma_device(ring->ring);
340 struct tbnet_frame *tf = &ring->frames[i];
348 if (ring->ring->is_tx) {
368 ring->cons = 0;
369 ring->prod = 0;
392 tb_ring_stop(net->rx_ring.ring);
393 tb_ring_stop(net->tx_ring.ring);
399 net->rx_ring.ring->hop,
401 net->tx_ring.ring->hop);
494 static unsigned int tbnet_available_buffers(const struct tbnet_ring *ring)
496 return ring->prod - ring->cons;
501 struct tbnet_ring *ring = &net->rx_ring;
505 struct device *dma_dev = tb_ring_dma_device(ring->ring);
506 unsigned int index = ring->prod & (TBNET_RING_SIZE - 1);
507 struct tbnet_frame *tf = &ring->frames[index];
536 tb_ring_rx(ring->ring, &tf->frame);
538 ring->prod++;
544 tbnet_free_buffers(ring);
550 struct tbnet_ring *ring = &net->tx_ring;
551 struct device *dma_dev = tb_ring_dma_device(ring->ring);
555 if (!tbnet_available_buffers(ring))
558 index = ring->cons++ & (TBNET_RING_SIZE - 1);
560 tf = &ring->frames[index];
569 static void tbnet_tx_callback(struct tb_ring *ring, struct ring_frame *frame,
575 /* Return buffer to the ring */
584 struct tbnet_ring *ring = &net->tx_ring;
585 struct device *dma_dev = tb_ring_dma_device(ring->ring);
589 struct tbnet_frame *tf = &ring->frames[i];
594 tbnet_free_buffers(ring);
603 tbnet_free_buffers(ring);
616 ring->cons = 0;
617 ring->prod = TBNET_RING_SIZE - 1;
650 * the Rx ring before any incoming packets are allowed to
653 tb_ring_start(net->tx_ring.ring);
654 tb_ring_start(net->rx_ring.ring);
665 net->rx_ring.ring->hop,
667 net->tx_ring.ring->hop);
684 tb_ring_stop(net->rx_ring.ring);
685 tb_ring_stop(net->tx_ring.ring);
806 struct device *dma_dev = tb_ring_dma_device(net->rx_ring.ring);
828 frame = tb_ring_poll(net->rx_ring.ring);
903 /* Re-enable the ring interrupt */
904 tb_ring_poll_complete(net->rx_ring.ring);
921 struct tb_ring *ring;
927 ring = tb_ring_alloc_tx(xd->tb->nhi, -1, TBNET_RING_SIZE,
929 if (!ring) {
930 netdev_err(dev, "failed to allocate Tx ring\n");
933 net->tx_ring.ring = ring;
938 tb_ring_free(net->tx_ring.ring);
939 net->tx_ring.ring = NULL;
952 ring = tb_ring_alloc_rx(xd->tb->nhi, -1, TBNET_RING_SIZE, flags,
953 net->tx_ring.ring->hop, sof_mask,
955 if (!ring) {
956 netdev_err(dev, "failed to allocate Rx ring\n");
958 tb_ring_free(net->tx_ring.ring);
959 net->tx_ring.ring = NULL;
962 net->rx_ring.ring = ring;
979 tb_ring_free(net->rx_ring.ring);
980 net->rx_ring.ring = NULL;
983 tb_ring_free(net->tx_ring.ring);
984 net->tx_ring.ring = NULL;
993 struct device *dma_dev = tb_ring_dma_device(net->tx_ring.ring);
1219 tb_ring_tx(net->tx_ring.ring, &frames[i]->frame);