Lines Matching refs:tx
62 * @tx: the new operation
66 struct dma_async_tx_descriptor *tx)
74 if (txd_parent(depend_tx) && depend_tx->chan == tx->chan) {
75 txd_chain(depend_tx, tx);
100 txd_chain(intr_tx, tx);
121 tx->tx_submit(tx);
143 async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx,
148 tx->callback = submit->cb_fn;
149 tx->callback_param = submit->cb_param;
161 txd_parent(tx));
173 txd_chain(depend_tx, tx);
192 async_tx_channel_switch(depend_tx, tx);
195 txd_clear_parent(tx);
196 tx->tx_submit(tx);
200 txd_clear_parent(tx);
201 tx->tx_submit(tx);
205 async_tx_ack(tx);
225 struct dma_async_tx_descriptor *tx;
238 tx = device ? device->device_prep_dma_interrupt(chan, 0) : NULL;
240 tx = NULL;
242 if (tx) {
245 async_tx_submit(chan, tx, submit);
255 return tx;
260 * async_tx_quiesce - ensure tx is complete and freeable upon return
261 * @tx: transaction to quiesce
263 void async_tx_quiesce(struct dma_async_tx_descriptor **tx)
265 if (*tx) {
269 BUG_ON(async_tx_test_ack(*tx));
270 if (dma_wait_for_async_tx(*tx) != DMA_COMPLETE)
273 async_tx_ack(*tx);
274 *tx = NULL;