Lines Matching refs:tx_chn

105 		bool tx_chn)
135 if (tx_chn && !(thread_id & K3_PSIL_DST_THREAD_ID_OFFSET)) {
140 if (!tx_chn && (thread_id & K3_PSIL_DST_THREAD_ID_OFFSET)) {
158 if (tx_chn)
170 static void k3_udma_glue_dump_tx_chn(struct k3_udma_glue_tx_channel *tx_chn)
172 struct device *dev = tx_chn->common.dev;
178 tx_chn->udma_tchan_id,
179 tx_chn->common.src_thread,
180 tx_chn->common.dst_thread);
202 static int k3_udma_glue_cfg_tx_chn(struct k3_udma_glue_tx_channel *tx_chn)
204 const struct udma_tisci_rm *tisci_rm = tx_chn->common.tisci_rm;
218 req.index = tx_chn->udma_tchan_id;
219 if (tx_chn->tx_pause_on_err)
221 if (tx_chn->tx_filt_einfo)
223 if (tx_chn->tx_filt_pswords)
226 if (tx_chn->tx_supr_tdpkt)
228 req.tx_fetch_size = tx_chn->common.hdesc_size >> 2;
229 req.txcq_qnum = k3_ringacc_get_ring_id(tx_chn->ringtxcq);
230 req.tx_atype = tx_chn->common.atype;
238 struct k3_udma_glue_tx_channel *tx_chn;
241 tx_chn = devm_kzalloc(dev, sizeof(*tx_chn), GFP_KERNEL);
242 if (!tx_chn)
245 tx_chn->common.dev = dev;
246 tx_chn->common.swdata_size = cfg->swdata_size;
247 tx_chn->tx_pause_on_err = cfg->tx_pause_on_err;
248 tx_chn->tx_filt_einfo = cfg->tx_filt_einfo;
249 tx_chn->tx_filt_pswords = cfg->tx_filt_pswords;
250 tx_chn->tx_supr_tdpkt = cfg->tx_supr_tdpkt;
254 &tx_chn->common, true);
258 tx_chn->common.hdesc_size = cppi5_hdesc_calc_size(tx_chn->common.epib,
259 tx_chn->common.psdata_size,
260 tx_chn->common.swdata_size);
263 tx_chn->udma_tchanx = xudma_tchan_get(tx_chn->common.udmax, -1);
264 if (IS_ERR(tx_chn->udma_tchanx)) {
265 ret = PTR_ERR(tx_chn->udma_tchanx);
269 tx_chn->udma_tchan_id = xudma_tchan_get_id(tx_chn->udma_tchanx);
271 atomic_set(&tx_chn->free_pkts, cfg->txcq_cfg.size);
274 ret = k3_ringacc_request_rings_pair(tx_chn->common.ringacc,
275 tx_chn->udma_tchan_id, -1,
276 &tx_chn->ringtx,
277 &tx_chn->ringtxcq);
283 ret = k3_ringacc_ring_cfg(tx_chn->ringtx, &cfg->tx_cfg);
289 ret = k3_ringacc_ring_cfg(tx_chn->ringtxcq, &cfg->txcq_cfg);
296 tx_chn->common.src_thread =
297 xudma_dev_get_psil_base(tx_chn->common.udmax) +
298 tx_chn->udma_tchan_id;
300 ret = k3_udma_glue_cfg_tx_chn(tx_chn);
306 ret = xudma_navss_psil_pair(tx_chn->common.udmax,
307 tx_chn->common.src_thread,
308 tx_chn->common.dst_thread);
314 tx_chn->psil_paired = true;
317 k3_udma_glue_disable_tx_chn(tx_chn);
319 k3_udma_glue_dump_tx_chn(tx_chn);
321 return tx_chn;
324 k3_udma_glue_release_tx_chn(tx_chn);
329 void k3_udma_glue_release_tx_chn(struct k3_udma_glue_tx_channel *tx_chn)
331 if (tx_chn->psil_paired) {
332 xudma_navss_psil_unpair(tx_chn->common.udmax,
333 tx_chn->common.src_thread,
334 tx_chn->common.dst_thread);
335 tx_chn->psil_paired = false;
338 if (!IS_ERR_OR_NULL(tx_chn->udma_tchanx))
339 xudma_tchan_put(tx_chn->common.udmax,
340 tx_chn->udma_tchanx);
342 if (tx_chn->ringtxcq)
343 k3_ringacc_ring_free(tx_chn->ringtxcq);
345 if (tx_chn->ringtx)
346 k3_ringacc_ring_free(tx_chn->ringtx);
350 int k3_udma_glue_push_tx_chn(struct k3_udma_glue_tx_channel *tx_chn,
356 if (!atomic_add_unless(&tx_chn->free_pkts, -1, 0))
359 ringtxcq_id = k3_ringacc_get_ring_id(tx_chn->ringtxcq);
362 return k3_ringacc_ring_push(tx_chn->ringtx, &desc_dma);
366 int k3_udma_glue_pop_tx_chn(struct k3_udma_glue_tx_channel *tx_chn,
371 ret = k3_ringacc_ring_pop(tx_chn->ringtxcq, desc_dma);
373 atomic_inc(&tx_chn->free_pkts);
379 int k3_udma_glue_enable_tx_chn(struct k3_udma_glue_tx_channel *tx_chn)
381 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_PEER_RT_EN_REG,
384 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG,
387 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn en");
392 void k3_udma_glue_disable_tx_chn(struct k3_udma_glue_tx_channel *tx_chn)
394 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn dis1");
396 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG, 0);
398 xudma_tchanrt_write(tx_chn->udma_tchanx,
400 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn dis2");
404 void k3_udma_glue_tdown_tx_chn(struct k3_udma_glue_tx_channel *tx_chn,
410 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn tdown1");
412 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG,
415 val = xudma_tchanrt_read(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG);
418 val = xudma_tchanrt_read(tx_chn->udma_tchanx,
422 dev_err(tx_chn->common.dev, "TX tdown timeout\n");
428 val = xudma_tchanrt_read(tx_chn->udma_tchanx,
431 dev_err(tx_chn->common.dev, "TX tdown peer not stopped\n");
432 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn tdown2");
436 void k3_udma_glue_reset_tx_chn(struct k3_udma_glue_tx_channel *tx_chn,
444 if (tx_chn->ringtxcq)
445 k3_ringacc_ring_reset(tx_chn->ringtxcq);
454 occ_tx = k3_ringacc_ring_get_occ(tx_chn->ringtx);
455 dev_dbg(tx_chn->common.dev, "TX reset occ_tx %u\n", occ_tx);
458 ret = k3_ringacc_ring_pop(tx_chn->ringtx, &desc_dma);
460 dev_err(tx_chn->common.dev, "TX reset pop %d\n", ret);
466 k3_ringacc_ring_reset_dma(tx_chn->ringtx, occ_tx);
470 u32 k3_udma_glue_tx_get_hdesc_size(struct k3_udma_glue_tx_channel *tx_chn)
472 return tx_chn->common.hdesc_size;
476 u32 k3_udma_glue_tx_get_txcq_id(struct k3_udma_glue_tx_channel *tx_chn)
478 return k3_ringacc_get_ring_id(tx_chn->ringtxcq);
482 int k3_udma_glue_tx_get_irq(struct k3_udma_glue_tx_channel *tx_chn)
484 tx_chn->virq = k3_ringacc_get_ring_irq_num(tx_chn->ringtxcq);
486 return tx_chn->virq;