Lines Matching defs:rxq
247 struct mwl8k_rx_queue rxq[MWL8K_RX_QUEUES];
1163 struct mwl8k_rx_queue *rxq = priv->rxq + index;
1167 rxq->rxd_count = 0;
1168 rxq->head = 0;
1169 rxq->tail = 0;
1173 rxq->rxd = dma_alloc_coherent(&priv->pdev->dev, size, &rxq->rxd_dma,
1175 if (rxq->rxd == NULL) {
1180 rxq->buf = kcalloc(MWL8K_RX_DESCS, sizeof(*rxq->buf), GFP_KERNEL);
1181 if (rxq->buf == NULL) {
1182 dma_free_coherent(&priv->pdev->dev, size, rxq->rxd,
1183 rxq->rxd_dma);
1194 rxd = rxq->rxd + (i * priv->rxd_ops->rxd_size);
1199 next_dma_addr = rxq->rxd_dma + (nexti * desc_size);
1210 struct mwl8k_rx_queue *rxq = priv->rxq + index;
1213 while (rxq->rxd_count < MWL8K_RX_DESCS && limit--) {
1226 rxq->rxd_count++;
1227 rx = rxq->tail++;
1228 if (rxq->tail == MWL8K_RX_DESCS)
1229 rxq->tail = 0;
1230 rxq->buf[rx].skb = skb;
1231 dma_unmap_addr_set(&rxq->buf[rx], dma, addr);
1233 rxd = rxq->rxd + (rx * priv->rxd_ops->rxd_size);
1246 struct mwl8k_rx_queue *rxq = priv->rxq + index;
1249 if (rxq->rxd == NULL)
1253 if (rxq->buf[i].skb != NULL) {
1255 dma_unmap_addr(&rxq->buf[i], dma),
1257 dma_unmap_addr_set(&rxq->buf[i], dma, 0);
1259 kfree_skb(rxq->buf[i].skb);
1260 rxq->buf[i].skb = NULL;
1264 kfree(rxq->buf);
1265 rxq->buf = NULL;
1268 MWL8K_RX_DESCS * priv->rxd_ops->rxd_size, rxq->rxd,
1269 rxq->rxd_dma);
1270 rxq->rxd = NULL;
1323 struct mwl8k_rx_queue *rxq = priv->rxq + index;
1327 while (rxq->rxd_count && limit--) {
1335 skb = rxq->buf[rxq->head].skb;
1339 rxd = rxq->rxd + (rxq->head * priv->rxd_ops->rxd_size);
1346 rxq->buf[rxq->head].skb = NULL;
1349 dma_unmap_addr(&rxq->buf[rxq->head], dma),
1351 dma_unmap_addr_set(&rxq->buf[rxq->head], dma, 0);
1353 rxq->head++;
1354 if (rxq->head == MWL8K_RX_DESCS)
1355 rxq->head = 0;
1357 rxq->rxd_count--;
2475 cmd->rx_queue_ptr = cpu_to_le32(priv->rxq[0].rxd_dma);
2572 iowrite32(priv->rxq[0].rxd_dma, priv->sram + off);
2575 iowrite32(priv->rxq[0].rxd_dma, priv->sram + off);
2641 cmd->rx_queue_ptr = cpu_to_le32(priv->rxq[0].rxd_dma);