Lines Matching refs:vd
53 * @vd: virtual descriptor to prepare
57 struct virt_dma_desc *vd, unsigned long tx_flags)
61 dma_async_tx_descriptor_init(&vd->tx, &vc->chan);
62 vd->tx.flags = tx_flags;
63 vd->tx.tx_submit = vchan_tx_submit;
64 vd->tx.desc_free = vchan_tx_desc_free;
66 vd->tx_result.result = DMA_TRANS_NOERROR;
67 vd->tx_result.residue = 0;
70 list_add_tail(&vd->node, &vc->desc_allocated);
73 return &vd->tx;
90 * @vd: virtual descriptor to update
94 static inline void vchan_cookie_complete(struct virt_dma_desc *vd)
96 struct virt_dma_chan *vc = to_virt_chan(vd->tx.chan);
99 cookie = vd->tx.cookie;
100 dma_cookie_complete(&vd->tx);
102 vd, cookie);
103 list_add_tail(&vd->node, &vc->desc_completed);
110 * @vd: virtual descriptor to free/reuse
112 static inline void vchan_vdesc_fini(struct virt_dma_desc *vd)
114 struct virt_dma_chan *vc = to_virt_chan(vd->tx.chan);
116 if (dmaengine_desc_test_reuse(&vd->tx)) {
120 list_add(&vd->node, &vc->desc_allocated);
123 vc->desc_free(vd);
129 * @vd: virtual descriptor
131 static inline void vchan_cyclic_callback(struct virt_dma_desc *vd)
133 struct virt_dma_chan *vc = to_virt_chan(vd->tx.chan);
135 vc->cyclic = vd;
141 * @vd: virtual descriptor to be terminated
145 static inline void vchan_terminate_vdesc(struct virt_dma_desc *vd)
147 struct virt_dma_chan *vc = to_virt_chan(vd->tx.chan);
149 list_add_tail(&vd->node, &vc->desc_terminated);
151 if (vc->cyclic == vd)
189 struct virt_dma_desc *vd;
195 list_for_each_entry(vd, &head, node)
196 dmaengine_desc_clear_reuse(&vd->tx);