Lines Matching defs:ivc
6 #include <soc/tegra/ivc.h>
71 static inline void tegra_ivc_invalidate(struct tegra_ivc *ivc, dma_addr_t phys)
73 if (!ivc->peer)
76 dma_sync_single_for_cpu(ivc->peer, phys, TEGRA_IVC_ALIGN,
80 static inline void tegra_ivc_flush(struct tegra_ivc *ivc, dma_addr_t phys)
82 if (!ivc->peer)
85 dma_sync_single_for_device(ivc->peer, phys, TEGRA_IVC_ALIGN,
89 static inline bool tegra_ivc_empty(struct tegra_ivc *ivc,
110 if (tx - rx > ivc->num_frames)
116 static inline bool tegra_ivc_full(struct tegra_ivc *ivc,
126 return tx - rx >= ivc->num_frames;
129 static inline u32 tegra_ivc_available(struct tegra_ivc *ivc,
144 static inline void tegra_ivc_advance_tx(struct tegra_ivc *ivc)
146 WRITE_ONCE(ivc->tx.channel->tx.count,
147 READ_ONCE(ivc->tx.channel->tx.count) + 1);
149 if (ivc->tx.position == ivc->num_frames - 1)
150 ivc->tx.position = 0;
152 ivc->tx.position++;
155 static inline void tegra_ivc_advance_rx(struct tegra_ivc *ivc)
157 WRITE_ONCE(ivc->rx.channel->rx.count,
158 READ_ONCE(ivc->rx.channel->rx.count) + 1);
160 if (ivc->rx.position == ivc->num_frames - 1)
161 ivc->rx.position = 0;
163 ivc->rx.position++;
166 static inline int tegra_ivc_check_read(struct tegra_ivc *ivc)
178 if (ivc->tx.channel->tx.state != TEGRA_IVC_STATE_ESTABLISHED)
188 if (!tegra_ivc_empty(ivc, ivc->rx.channel))
191 tegra_ivc_invalidate(ivc, ivc->rx.phys + offset);
193 if (tegra_ivc_empty(ivc, ivc->rx.channel))
199 static inline int tegra_ivc_check_write(struct tegra_ivc *ivc)
203 if (ivc->tx.channel->tx.state != TEGRA_IVC_STATE_ESTABLISHED)
206 if (!tegra_ivc_full(ivc, ivc->tx.channel))
209 tegra_ivc_invalidate(ivc, ivc->tx.phys + offset);
211 if (tegra_ivc_full(ivc, ivc->tx.channel))
217 static void *tegra_ivc_frame_virt(struct tegra_ivc *ivc,
221 if (WARN_ON(frame >= ivc->num_frames))
224 return (void *)(header + 1) + ivc->frame_size * frame;
227 static inline dma_addr_t tegra_ivc_frame_phys(struct tegra_ivc *ivc,
233 offset = sizeof(struct tegra_ivc_header) + ivc->frame_size * frame;
238 static inline void tegra_ivc_invalidate_frame(struct tegra_ivc *ivc,
244 if (!ivc->peer || WARN_ON(frame >= ivc->num_frames))
247 phys = tegra_ivc_frame_phys(ivc, phys, frame) + offset;
249 dma_sync_single_for_cpu(ivc->peer, phys, size, DMA_FROM_DEVICE);
252 static inline void tegra_ivc_flush_frame(struct tegra_ivc *ivc,
258 if (!ivc->peer || WARN_ON(frame >= ivc->num_frames))
261 phys = tegra_ivc_frame_phys(ivc, phys, frame) + offset;
263 dma_sync_single_for_device(ivc->peer, phys, size, DMA_TO_DEVICE);
267 void *tegra_ivc_read_get_next_frame(struct tegra_ivc *ivc)
271 if (WARN_ON(ivc == NULL))
274 err = tegra_ivc_check_read(ivc);
279 * Order observation of ivc->rx.position potentially indicating new
284 tegra_ivc_invalidate_frame(ivc, ivc->rx.phys, ivc->rx.position, 0,
285 ivc->frame_size);
287 return tegra_ivc_frame_virt(ivc, ivc->rx.channel, ivc->rx.position);
291 int tegra_ivc_read_advance(struct tegra_ivc *ivc)
302 err = tegra_ivc_check_read(ivc);
306 tegra_ivc_advance_rx(ivc);
308 tegra_ivc_flush(ivc, ivc->rx.phys + rx);
311 * Ensure our write to ivc->rx.position occurs before our read from
312 * ivc->tx.position.
321 tegra_ivc_invalidate(ivc, ivc->rx.phys + tx);
323 if (tegra_ivc_available(ivc, ivc->rx.channel) == ivc->num_frames - 1)
324 ivc->notify(ivc, ivc->notify_data);
331 void *tegra_ivc_write_get_next_frame(struct tegra_ivc *ivc)
335 err = tegra_ivc_check_write(ivc);
339 return tegra_ivc_frame_virt(ivc, ivc->tx.channel, ivc->tx.position);
344 int tegra_ivc_write_advance(struct tegra_ivc *ivc)
350 err = tegra_ivc_check_write(ivc);
354 tegra_ivc_flush_frame(ivc, ivc->tx.phys, ivc->tx.position, 0,
355 ivc->frame_size);
359 * ivc->tx.position.
363 tegra_ivc_advance_tx(ivc);
364 tegra_ivc_flush(ivc, ivc->tx.phys + tx);
367 * Ensure our write to ivc->tx.position occurs before our read from
368 * ivc->rx.position.
377 tegra_ivc_invalidate(ivc, ivc->tx.phys + rx);
379 if (tegra_ivc_available(ivc, ivc->tx.channel) == 1)
380 ivc->notify(ivc, ivc->notify_data);
386 void tegra_ivc_reset(struct tegra_ivc *ivc)
390 ivc->tx.channel->tx.state = TEGRA_IVC_STATE_SYNC;
391 tegra_ivc_flush(ivc, ivc->tx.phys + offset);
392 ivc->notify(ivc, ivc->notify_data);
416 int tegra_ivc_notified(struct tegra_ivc *ivc)
422 tegra_ivc_invalidate(ivc, ivc->rx.phys + offset);
423 state = READ_ONCE(ivc->rx.channel->tx.state);
439 ivc->tx.channel->tx.count = 0;
440 ivc->rx.channel->rx.count = 0;
442 ivc->tx.position = 0;
443 ivc->rx.position = 0;
455 ivc->tx.channel->tx.state = TEGRA_IVC_STATE_ACK;
456 tegra_ivc_flush(ivc, ivc->tx.phys + offset);
461 ivc->notify(ivc, ivc->notify_data);
463 } else if (ivc->tx.channel->tx.state == TEGRA_IVC_STATE_SYNC &&
478 ivc->tx.channel->tx.count = 0;
479 ivc->rx.channel->rx.count = 0;
481 ivc->tx.position = 0;
482 ivc->rx.position = 0;
495 ivc->tx.channel->tx.state = TEGRA_IVC_STATE_ESTABLISHED;
496 tegra_ivc_flush(ivc, ivc->tx.phys + offset);
501 ivc->notify(ivc, ivc->notify_data);
503 } else if (ivc->tx.channel->tx.state == TEGRA_IVC_STATE_ACK) {
519 ivc->tx.channel->tx.state = TEGRA_IVC_STATE_ESTABLISHED;
520 tegra_ivc_flush(ivc, ivc->tx.phys + offset);
525 ivc->notify(ivc, ivc->notify_data);
536 if (ivc->tx.channel->tx.state != TEGRA_IVC_STATE_ESTABLISHED)
612 int tegra_ivc_init(struct tegra_ivc *ivc, struct device *peer, void *rx,
615 void (*notify)(struct tegra_ivc *ivc, void *data),
621 if (WARN_ON(!ivc || !notify))
639 ivc->rx.phys = dma_map_single(peer, rx, queue_size,
641 if (dma_mapping_error(peer, ivc->rx.phys))
644 ivc->tx.phys = dma_map_single(peer, tx, queue_size,
646 if (dma_mapping_error(peer, ivc->tx.phys)) {
647 dma_unmap_single(peer, ivc->rx.phys, queue_size,
652 ivc->rx.phys = rx_phys;
653 ivc->tx.phys = tx_phys;
656 ivc->rx.channel = rx;
657 ivc->tx.channel = tx;
658 ivc->peer = peer;
659 ivc->notify = notify;
660 ivc->notify_data = data;
661 ivc->frame_size = frame_size;
662 ivc->num_frames = num_frames;
668 ivc->tx.position = 0;
669 ivc->rx.position = 0;
675 void tegra_ivc_cleanup(struct tegra_ivc *ivc)
677 if (ivc->peer) {
678 size_t size = tegra_ivc_total_queue_size(ivc->num_frames *
679 ivc->frame_size);
681 dma_unmap_single(ivc->peer, ivc->rx.phys, size,
683 dma_unmap_single(ivc->peer, ivc->tx.phys, size,