Lines Matching refs:dma
59 struct ltq_dma_channel dma;
103 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
110 ch->dma.desc++;
111 ch->dma.desc %= LTQ_DESC_NUM;
120 ltq_dma_open(&priv->chan_tx.dma);
121 ltq_dma_enable_irq(&priv->chan_tx.dma);
124 ltq_dma_open(&priv->chan_rx.dma);
133 ltq_dma_enable_irq(&priv->chan_rx.dma);
147 ltq_dma_close(&priv->chan_rx.dma);
150 ltq_dma_close(&priv->chan_tx.dma);
157 struct sk_buff *skb = ch->skb[ch->dma.desc];
161 ch->skb[ch->dma.desc] = netdev_alloc_skb_ip_align(ch->priv->net_dev,
163 if (!ch->skb[ch->dma.desc]) {
168 mapping = dma_map_single(ch->priv->dev, ch->skb[ch->dma.desc]->data,
171 dev_kfree_skb_any(ch->skb[ch->dma.desc]);
172 ch->skb[ch->dma.desc] = skb;
177 ch->dma.desc_base[ch->dma.desc].addr = mapping;
181 ch->dma.desc_base[ch->dma.desc].ctl =
191 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
192 struct sk_buff *skb = ch->skb[ch->dma.desc];
199 ch->dma.desc++;
200 ch->dma.desc %= LTQ_DESC_NUM;
225 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
239 ltq_dma_enable_irq(&ch->dma);
255 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->tx_free];
264 memset(&ch->dma.desc_base[ch->tx_free], 0,
283 ltq_dma_enable_irq(&ch->dma);
294 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
307 if ((desc->ctl & (LTQ_DMA_OWN | LTQ_DMA_C)) || ch->skb[ch->dma.desc]) {
313 ch->skb[ch->dma.desc] = skb;
319 /* dma needs to start on a 16 byte aligned address */
327 ch->dma.desc++;
328 ch->dma.desc %= LTQ_DESC_NUM;
329 if (ch->dma.desc == ch->tx_free)
356 ltq_dma_disable_irq(&ch->dma);
360 ltq_dma_ack_irq(&ch->dma);
374 ch_rx->dma.nr = XRX200_DMA_RX;
375 ch_rx->dma.dev = priv->dev;
378 ltq_dma_alloc_rx(&ch_rx->dma);
379 for (ch_rx->dma.desc = 0; ch_rx->dma.desc < LTQ_DESC_NUM;
380 ch_rx->dma.desc++) {
385 ch_rx->dma.desc = 0;
386 ret = devm_request_irq(priv->dev, ch_rx->dma.irq, xrx200_dma_irq, 0,
390 ch_rx->dma.irq);
394 ch_tx->dma.nr = XRX200_DMA_TX;
395 ch_tx->dma.dev = priv->dev;
398 ltq_dma_alloc_tx(&ch_tx->dma);
399 ret = devm_request_irq(priv->dev, ch_tx->dma.irq, xrx200_dma_irq, 0,
403 ch_tx->dma.irq);
410 ltq_dma_free(&ch_tx->dma);
420 ltq_dma_free(&ch_rx->dma);
428 ltq_dma_free(&priv->chan_tx.dma);
429 ltq_dma_free(&priv->chan_rx.dma);
473 priv->chan_rx.dma.irq = platform_get_irq_byname(pdev, "rx");
474 if (priv->chan_rx.dma.irq < 0)
476 priv->chan_tx.dma.irq = platform_get_irq_byname(pdev, "tx");
477 if (priv->chan_tx.dma.irq < 0)
493 /* bring up the dma engine and IP core */