Lines Matching defs:mhi_chan

108 		      struct mhi_chan *mhi_chan)
110 struct mhi_ring *ring = &mhi_chan->tre_ring;
115 mhi_chan->db_cfg.process_db(mhi_cntrl, &mhi_chan->db_cfg,
230 struct mhi_chan *ul_chan, *dl_chan;
304 struct mhi_chan *mhi_chan;
309 mhi_chan = mhi_cntrl->mhi_chan;
310 for (i = 0; i < mhi_cntrl->max_chan; i++, mhi_chan++) {
311 if (!mhi_chan->configured || mhi_chan->mhi_dev ||
312 !(mhi_chan->ee_mask & BIT(mhi_cntrl->ee)))
319 switch (mhi_chan->dir) {
321 mhi_dev->ul_chan = mhi_chan;
322 mhi_dev->ul_chan_id = mhi_chan->chan;
326 mhi_dev->dl_chan = mhi_chan;
327 mhi_dev->dl_chan_id = mhi_chan->chan;
336 mhi_chan->mhi_dev = mhi_dev;
339 if ((i + 1) < mhi_cntrl->max_chan && mhi_chan[1].configured) {
340 if (!strcmp(mhi_chan[1].name, mhi_chan->name)) {
342 mhi_chan++;
343 if (mhi_chan->dir == DMA_TO_DEVICE) {
344 mhi_dev->ul_chan = mhi_chan;
345 mhi_dev->ul_chan_id = mhi_chan->chan;
347 mhi_dev->dl_chan = mhi_chan;
348 mhi_dev->dl_chan_id = mhi_chan->chan;
351 mhi_chan->mhi_dev = mhi_dev;
356 mhi_dev->name = mhi_chan->name;
395 struct mhi_chan *mhi_chan = mhi_event->mhi_chan;
396 struct mhi_device *mhi_dev = mhi_chan->mhi_dev;
496 struct mhi_chan *mhi_chan)
505 buf_ring = &mhi_chan->buf_ring;
506 tre_ring = &mhi_chan->tre_ring;
518 write_lock_irqsave(&mhi_chan->lock, flags);
520 read_lock_bh(&mhi_chan->lock);
522 if (mhi_chan->ch_state != MHI_CH_STATE_ENABLED)
548 result.dir = mhi_chan->dir;
572 read_unlock_bh(&mhi_chan->lock);
575 mhi_chan->xfer_cb(mhi_chan->mhi_dev, &result);
577 if (mhi_chan->dir == DMA_TO_DEVICE)
585 if (mhi_chan->pre_alloc) {
586 if (mhi_queue_buf(mhi_chan->mhi_dev,
587 mhi_chan->dir,
592 mhi_chan->chan);
597 read_lock_bh(&mhi_chan->lock);
606 mhi_chan->db_cfg.db_mode = 1;
610 mhi_ring_chan_db(mhi_cntrl, mhi_chan);
623 write_unlock_irqrestore(&mhi_chan->lock, flags);
625 read_unlock_bh(&mhi_chan->lock);
632 struct mhi_chan *mhi_chan)
641 buf_ring = &mhi_chan->buf_ring;
642 tre_ring = &mhi_chan->tre_ring;
659 result.dir = mhi_chan->dir;
661 read_lock_bh(&mhi_chan->lock);
663 if (mhi_chan->ch_state != MHI_CH_STATE_ENABLED)
669 mhi_chan->xfer_cb(mhi_chan->mhi_dev, &result);
689 read_unlock_bh(&mhi_chan->lock);
701 struct mhi_chan *mhi_chan;
715 mhi_cntrl->mhi_chan[chan].configured) {
716 mhi_chan = &mhi_cntrl->mhi_chan[chan];
717 write_lock_bh(&mhi_chan->lock);
718 mhi_chan->ccs = MHI_TRE_GET_EV_CODE(tre);
719 complete(&mhi_chan->completion);
720 write_unlock_bh(&mhi_chan->lock);
737 struct mhi_chan *mhi_chan;
867 mhi_chan = &mhi_cntrl->mhi_chan[chan];
868 if (!mhi_chan->configured)
870 parse_xfer_event(mhi_cntrl, local_rp, mhi_chan);
911 struct mhi_chan *mhi_chan;
938 mhi_cntrl->mhi_chan[chan].configured) {
939 mhi_chan = &mhi_cntrl->mhi_chan[chan];
942 parse_xfer_event(mhi_cntrl, local_rp, mhi_chan);
945 parse_rsc_event(mhi_cntrl, local_rp, mhi_chan);
1043 struct mhi_chan *mhi_chan = (dir == DMA_TO_DEVICE) ? mhi_dev->ul_chan :
1045 struct mhi_ring *tre_ring = &mhi_chan->tre_ring;
1050 if (mhi_chan->pre_alloc)
1073 ret = mhi_gen_tre(mhi_cntrl, mhi_chan, &buf_info, mflags);
1079 if (mhi_chan->dir == DMA_TO_DEVICE)
1083 read_lock_bh(&mhi_chan->lock);
1084 mhi_ring_chan_db(mhi_cntrl, mhi_chan);
1085 read_unlock_bh(&mhi_chan->lock);
1098 struct mhi_chan *mhi_chan = (dir == DMA_TO_DEVICE) ? mhi_dev->ul_chan :
1101 struct mhi_ring *tre_ring = &mhi_chan->tre_ring;
1106 if (mhi_chan->pre_alloc)
1133 ret = mhi_gen_tre(mhi_cntrl, mhi_chan, &buf_info, mflags);
1139 if (mhi_chan->dir == DMA_TO_DEVICE)
1143 read_lock_bh(&mhi_chan->lock);
1144 mhi_ring_chan_db(mhi_cntrl, mhi_chan);
1145 read_unlock_bh(&mhi_chan->lock);
1154 int mhi_gen_tre(struct mhi_controller *mhi_cntrl, struct mhi_chan *mhi_chan,
1163 buf_ring = &mhi_chan->buf_ring;
1164 tre_ring = &mhi_chan->tre_ring;
1175 buf_info->dir = mhi_chan->dir;
1187 bei = !!(mhi_chan->intmod);
1205 struct mhi_chan *mhi_chan = (dir == DMA_TO_DEVICE) ? mhi_dev->ul_chan :
1220 tre_ring = &mhi_chan->tre_ring;
1228 ret = mhi_gen_tre(mhi_cntrl, mhi_chan, &buf_info, mflags);
1241 if (mhi_chan->dir == DMA_TO_DEVICE)
1247 read_lock_irqsave(&mhi_chan->lock, flags);
1248 mhi_ring_chan_db(mhi_cntrl, mhi_chan);
1249 read_unlock_irqrestore(&mhi_chan->lock, flags);
1259 struct mhi_chan *mhi_chan,
1268 if (mhi_chan)
1269 chan = mhi_chan->chan;
1307 struct mhi_chan *mhi_chan)
1312 dev_dbg(dev, "Entered: unprepare channel:%d\n", mhi_chan->chan);
1315 mutex_lock(&mhi_chan->mutex);
1316 write_lock_irq(&mhi_chan->lock);
1317 if (mhi_chan->ch_state != MHI_CH_STATE_ENABLED) {
1318 write_unlock_irq(&mhi_chan->lock);
1319 mutex_unlock(&mhi_chan->mutex);
1323 mhi_chan->ch_state = MHI_CH_STATE_DISABLED;
1324 write_unlock_irq(&mhi_chan->lock);
1326 reinit_completion(&mhi_chan->completion);
1338 ret = mhi_send_cmd(mhi_cntrl, mhi_chan, MHI_CMD_RESET_CHAN);
1343 ret = wait_for_completion_timeout(&mhi_chan->completion,
1345 if (!ret || mhi_chan->ccs != MHI_EV_CC_SUCCESS)
1350 if (!mhi_chan->offload_ch) {
1351 mhi_reset_chan(mhi_cntrl, mhi_chan);
1352 mhi_deinit_chan_ctxt(mhi_cntrl, mhi_chan);
1354 dev_dbg(dev, "chan:%d successfully resetted\n", mhi_chan->chan);
1355 mutex_unlock(&mhi_chan->mutex);
1359 struct mhi_chan *mhi_chan)
1364 dev_dbg(dev, "Preparing channel: %d\n", mhi_chan->chan);
1366 if (!(BIT(mhi_cntrl->ee) & mhi_chan->ee_mask)) {
1369 TO_MHI_EXEC_STR(mhi_cntrl->ee), mhi_chan->ee_mask,
1370 mhi_chan->name);
1374 mutex_lock(&mhi_chan->mutex);
1377 if (mhi_chan->ch_state != MHI_CH_STATE_DISABLED) {
1380 mhi_chan->chan);
1385 if (!mhi_chan->offload_ch) {
1386 ret = mhi_init_chan_ctxt(mhi_cntrl, mhi_chan);
1391 reinit_completion(&mhi_chan->completion);
1404 ret = mhi_send_cmd(mhi_cntrl, mhi_chan, MHI_CMD_START_CHAN);
1408 ret = wait_for_completion_timeout(&mhi_chan->completion,
1410 if (!ret || mhi_chan->ccs != MHI_EV_CC_SUCCESS) {
1415 write_lock_irq(&mhi_chan->lock);
1416 mhi_chan->ch_state = MHI_CH_STATE_ENABLED;
1417 write_unlock_irq(&mhi_chan->lock);
1420 if (mhi_chan->pre_alloc) {
1422 &mhi_chan->tre_ring);
1438 ret = mhi_gen_tre(mhi_cntrl, mhi_chan, &info, MHI_EOT);
1447 read_lock_irq(&mhi_chan->lock);
1448 mhi_ring_chan_db(mhi_cntrl, mhi_chan);
1449 read_unlock_irq(&mhi_chan->lock);
1454 mutex_unlock(&mhi_chan->mutex);
1457 mhi_chan->chan);
1462 if (!mhi_chan->offload_ch)
1463 mhi_deinit_chan_ctxt(mhi_cntrl, mhi_chan);
1466 mutex_unlock(&mhi_chan->mutex);
1471 mutex_unlock(&mhi_chan->mutex);
1472 __mhi_unprepare_channel(mhi_cntrl, mhi_chan);
1521 struct mhi_chan *mhi_chan)
1527 buf_ring = &mhi_chan->buf_ring;
1528 tre_ring = &mhi_chan->tre_ring;
1534 if (mhi_chan->dir == DMA_TO_DEVICE)
1543 if (mhi_chan->pre_alloc) {
1547 mhi_chan->xfer_cb(mhi_chan->mhi_dev, &result);
1552 void mhi_reset_chan(struct mhi_controller *mhi_cntrl, struct mhi_chan *mhi_chan)
1556 int chan = mhi_chan->chan;
1559 if (mhi_chan->offload_ch)
1563 mhi_event = &mhi_cntrl->mhi_event[mhi_chan->er_index];
1564 er_ctxt = &mhi_cntrl->mhi_ctxt->er_ctxt[mhi_chan->er_index];
1568 mhi_reset_data_chan(mhi_cntrl, mhi_chan);
1578 struct mhi_chan *mhi_chan;
1581 mhi_chan = dir ? mhi_dev->dl_chan : mhi_dev->ul_chan;
1582 if (!mhi_chan)
1585 ret = mhi_prepare_channel(mhi_cntrl, mhi_chan);
1594 mhi_chan = dir ? mhi_dev->dl_chan : mhi_dev->ul_chan;
1595 if (!mhi_chan)
1598 __mhi_unprepare_channel(mhi_cntrl, mhi_chan);
1608 struct mhi_chan *mhi_chan;
1612 mhi_chan = dir ? mhi_dev->ul_chan : mhi_dev->dl_chan;
1613 if (!mhi_chan)
1616 __mhi_unprepare_channel(mhi_cntrl, mhi_chan);
1624 struct mhi_chan *mhi_chan = mhi_dev->dl_chan;
1625 struct mhi_event *mhi_event = &mhi_cntrl->mhi_event[mhi_chan->er_index];