Lines Matching refs:chan

322  * @device_node: used to add this to the device chan list
358 * @chan: driver channel device
363 struct dma_chan *chan;
517 static inline const char *dma_chan_name(struct dma_chan *chan)
519 return dev_name(&chan->dev->device);
526 * @chan: channel to be reviewed
535 typedef bool (*dma_filter_fn)(struct dma_chan *chan, void *filter_param);
585 * @chan: target channel for this operation
604 struct dma_chan *chan;
875 int (*device_alloc_chan_resources)(struct dma_chan *chan);
876 void (*device_free_chan_resources)(struct dma_chan *chan);
878 struct dma_async_tx_descriptor *(*device_prep_dma_memcpy)(struct dma_chan *chan, dma_addr_t dst, dma_addr_t src,
880 struct dma_async_tx_descriptor *(*device_prep_dma_xor)(struct dma_chan *chan, dma_addr_t dst, dma_addr_t *src,
882 struct dma_async_tx_descriptor *(*device_prep_dma_xor_val)(struct dma_chan *chan, dma_addr_t *src,
885 struct dma_async_tx_descriptor *(*device_prep_dma_pq)(struct dma_chan *chan, dma_addr_t *dst, dma_addr_t *src,
888 struct dma_async_tx_descriptor *(*device_prep_dma_pq_val)(struct dma_chan *chan, dma_addr_t *pq, dma_addr_t *src,
892 struct dma_async_tx_descriptor *(*device_prep_dma_memset)(struct dma_chan *chan, dma_addr_t dest, int value,
894 struct dma_async_tx_descriptor *(*device_prep_dma_memset_sg)(struct dma_chan *chan, struct scatterlist *sg,
896 struct dma_async_tx_descriptor *(*device_prep_dma_interrupt)(struct dma_chan *chan, unsigned long flags);
898 struct dma_async_tx_descriptor *(*device_prep_slave_sg)(struct dma_chan *chan, struct scatterlist *sgl,
901 struct dma_async_tx_descriptor *(*device_prep_dma_cyclic)(struct dma_chan *chan, dma_addr_t buf_addr,
905 struct dma_async_tx_descriptor *(*device_prep_interleaved_dma)(struct dma_chan *chan,
908 struct dma_async_tx_descriptor *(*device_prep_dma_imm_data)(struct dma_chan *chan, dma_addr_t dst, u64 data,
911 void (*device_caps)(struct dma_chan *chan, struct dma_slave_caps *caps);
912 int (*device_config)(struct dma_chan *chan, struct dma_slave_config *config);
913 int (*device_pause)(struct dma_chan *chan);
914 int (*device_resume)(struct dma_chan *chan);
915 int (*device_terminate_all)(struct dma_chan *chan);
916 void (*device_synchronize)(struct dma_chan *chan);
918 enum dma_status (*device_tx_status)(struct dma_chan *chan, dma_cookie_t cookie, struct dma_tx_state *txstate);
919 void (*device_issue_pending)(struct dma_chan *chan);
928 static inline int dmaengine_slave_config(struct dma_chan *chan, struct dma_slave_config *config)
930 if (chan->device->device_config) {
931 return chan->device->device_config(chan, config);
942 static inline struct dma_async_tx_descriptor *dmaengine_prep_slave_single(struct dma_chan *chan, dma_addr_t buf,
951 if (!chan || !chan->device || !chan->device->device_prep_slave_sg) {
955 return chan->device->device_prep_slave_sg(chan, &sg, 1, dir, flags, NULL);
958 static inline struct dma_async_tx_descriptor *dmaengine_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
963 if (!chan || !chan->device || !chan->device->device_prep_slave_sg) {
967 return chan->device->device_prep_slave_sg(chan, sgl, sg_len, dir, flags, NULL);
972 static inline struct dma_async_tx_descriptor *dmaengine_prep_rio_sg(struct dma_chan *chan, struct scatterlist *sgl,
977 if (!chan || !chan->device || !chan->device->device_prep_slave_sg) {
981 return chan->device->device_prep_slave_sg(chan, sgl, sg_len, dir, flags, rio_ext);
985 static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr,
990 if (!chan || !chan->device || !chan->device->device_prep_dma_cyclic) {
994 return chan->device->device_prep_dma_cyclic(chan, buf_addr, buf_len, period_len, dir, flags);
998 dmaengine_prep_interleaved_dma(struct dma_chan *chan, struct dma_interleaved_template *xt, unsigned long flags)
1000 if (!chan || !chan->device || !chan->device->device_prep_interleaved_dma) {
1003 if (flags & DMA_PREP_REPEAT && !test_bit(DMA_REPEAT, chan->device->cap_mask.bits)) {
1007 return chan->device->device_prep_interleaved_dma(chan, xt, flags);
1010 static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_memset(struct dma_chan *chan, dma_addr_t dest,
1013 if (!chan || !chan->device || !chan->device->device_prep_dma_memset) {
1017 return chan->device->device_prep_dma_memset(chan, dest, value, len, flags);
1020 static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest,
1023 if (!chan || !chan->device || !chan->device->device_prep_dma_memcpy) {
1027 return chan->device->device_prep_dma_memcpy(chan, dest, src, len, flags);
1030 static inline bool dmaengine_is_metadata_mode_supported(struct dma_chan *chan, enum dma_desc_metadata_mode mode)
1032 if (!chan) {
1036 return !!(chan->device->desc_metadata_modes & mode);
1061 * @chan: The channel for which to terminate the transfers
1066 static inline int dmaengine_terminate_all(struct dma_chan *chan)
1068 if (chan->device->device_terminate_all) {
1069 return chan->device->device_terminate_all(chan);
1077 * @chan: The channel for which to terminate the transfers
1096 static inline int dmaengine_terminate_async(struct dma_chan *chan)
1098 if (chan->device->device_terminate_all) {
1099 return chan->device->device_terminate_all(chan);
1107 * @chan: The channel to synchronize
1123 static inline void dmaengine_synchronize(struct dma_chan *chan)
1127 if (chan->device->device_synchronize) {
1128 chan->device->device_synchronize(chan);
1134 * @chan: The channel for which to terminate the transfers
1146 static inline int dmaengine_terminate_sync(struct dma_chan *chan)
1150 ret = dmaengine_terminate_async(chan);
1155 dmaengine_synchronize(chan);
1160 static inline int dmaengine_pause(struct dma_chan *chan)
1162 if (chan->device->device_pause) {
1163 return chan->device->device_pause(chan);
1169 static inline int dmaengine_resume(struct dma_chan *chan)
1171 if (chan->device->device_resume) {
1172 return chan->device->device_resume(chan);
1178 static inline enum dma_status dmaengine_tx_status(struct dma_chan *chan, dma_cookie_t cookie,
1181 return chan->device->device_tx_status(chan, cookie, state);
1329 void dma_async_tx_descriptor_init(struct dma_async_tx_descriptor *tx, struct dma_chan *chan);
1374 * @chan: target DMA channel
1379 static inline void dma_async_issue_pending(struct dma_chan *chan)
1381 chan->device->device_issue_pending(chan);
1386 * @chan: DMA channel
1395 static inline enum dma_status dma_async_is_tx_complete(struct dma_chan *chan, dma_cookie_t cookie, dma_cookie_t *last,
1401 status = chan->device->device_tx_status(chan, cookie, &state);
1412 * dma_async_is_complete - test a cookie against chan state
1448 enum dma_status dma_sync_wait(struct dma_chan *chan, dma_cookie_t cookie);
1457 void dma_release_channel(struct dma_chan *chan);
1458 int dma_get_slave_caps(struct dma_chan *chan, struct dma_slave_caps *caps);
1464 static inline enum dma_status dma_sync_wait(struct dma_chan *chan, dma_cookie_t cookie)
1488 static inline void dma_release_channel(struct dma_chan *chan)
1491 static inline int dma_get_slave_caps(struct dma_chan *chan, struct dma_slave_caps *caps)
1502 ret = dma_get_slave_caps(tx->chan, &caps);
1540 int dma_async_device_channel_register(struct dma_device *device, struct dma_chan *chan);
1541 void dma_async_device_channel_unregister(struct dma_device *device, struct dma_chan *chan);
1556 struct dma_chan *chan;
1558 chan = dma_request_slave_channel(dev, name);
1559 if (chan) {
1560 return chan;