Lines Matching defs:flags
164 * enum dma_ctrl_flags - DMA flags to augment operation preparation,
582 * @flags: flags to augment operation preparation, control completion, and
602 enum dma_ctrl_flags flags; /* not a 'long' to pack with cookie */
627 struct dmaengine_unmap_data *dmaengine_get_unmap_data(struct device *dev, int nr, gfp_t flags);
633 static inline struct dmaengine_unmap_data *dmaengine_get_unmap_data(struct device *dev, int nr, gfp_t flags)
774 * @cap_mask: one or more dma_capability flags
879 size_t len, unsigned long flags);
881 unsigned int src_cnt, size_t len, unsigned long flags);
884 enum sum_check_flags *result, unsigned long flags);
887 unsigned long flags);
891 unsigned long flags);
893 size_t len, unsigned long flags);
895 unsigned int nents, int value, unsigned long flags);
896 struct dma_async_tx_descriptor *(*device_prep_dma_interrupt)(struct dma_chan *chan, unsigned long flags);
900 unsigned long flags, void *context);
904 unsigned long flags);
907 unsigned long flags);
909 unsigned long flags);
944 unsigned long flags)
955 return chan->device->device_prep_slave_sg(chan, &sg, 1, dir, flags, NULL);
961 unsigned long flags)
967 return chan->device->device_prep_slave_sg(chan, sgl, sg_len, dir, flags, NULL);
975 unsigned long flags, struct rio_dma_ext *rio_ext)
981 return chan->device->device_prep_slave_sg(chan, sgl, sg_len, dir, flags, rio_ext);
988 unsigned long flags)
994 return chan->device->device_prep_dma_cyclic(chan, buf_addr, buf_len, period_len, dir, flags);
998 dmaengine_prep_interleaved_dma(struct dma_chan *chan, struct dma_interleaved_template *xt, unsigned long flags)
1003 if (flags & DMA_PREP_REPEAT && !test_bit(DMA_REPEAT, chan->device->cap_mask.bits)) {
1007 return chan->device->device_prep_interleaved_dma(chan, xt, flags);
1011 int value, size_t len, unsigned long flags)
1017 return chan->device->device_prep_dma_memset(chan, dest, value, len, flags);
1021 dma_addr_t src, size_t len, unsigned long flags)
1027 return chan->device->device_prep_dma_memcpy(chan, dest, src, len, flags);
1222 static inline bool dmaf_continue(enum dma_ctrl_flags flags)
1224 return (flags & DMA_PREP_CONTINUE) == DMA_PREP_CONTINUE;
1227 static inline bool dmaf_p_disabled_continue(enum dma_ctrl_flags flags)
1231 return (flags & mask) == mask;
1246 * @flags - to check if DMA_PREP_CONTINUE and DMA_PREP_PQ_DISABLE_P are set
1257 static inline int dma_maxpq(struct dma_device *dma, enum dma_ctrl_flags flags)
1259 if (dma_dev_has_pq_continue(dma) || !dmaf_continue(flags)) {
1262 if (dmaf_p_disabled_continue(flags)) {
1265 if (dmaf_continue(flags)) {
1333 tx->flags |= DMA_CTRL_ACK;
1338 tx->flags &= ~DMA_CTRL_ACK;
1343 return (tx->flags & DMA_CTRL_ACK) == DMA_CTRL_ACK;
1511 tx->flags |= DMA_CTRL_REUSE;
1517 tx->flags &= ~DMA_CTRL_REUSE;
1522 return (tx->flags & DMA_CTRL_REUSE) == DMA_CTRL_REUSE;