Lines Matching refs:mhi_dev

278 	struct mhi_device *mhi_dev;
285 mhi_dev = to_mhi_device(dev);
286 mhi_cntrl = mhi_dev->mhi_cntrl;
289 if (mhi_dev->dev_type == MHI_DEVICE_CONTROLLER)
292 ul_chan = mhi_dev->ul_chan;
293 dl_chan = mhi_dev->dl_chan;
306 * references to mhi_dev created for ul and dl channels. We can
307 * be sure that there will be no instances of mhi_dev left after
314 put_device(&ul_chan->mhi_dev->dev);
321 put_device(&dl_chan->mhi_dev->dev);
324 dev_dbg(&mhi_cntrl->mhi_dev->dev, "destroy device for chan:%s\n",
325 mhi_dev->name);
334 int mhi_get_free_desc_count(struct mhi_device *mhi_dev,
337 struct mhi_controller *mhi_cntrl = mhi_dev->mhi_cntrl;
339 mhi_dev->ul_chan : mhi_dev->dl_chan;
346 void mhi_notify(struct mhi_device *mhi_dev, enum mhi_callback cb_reason)
350 if (!mhi_dev->dev.driver)
353 mhi_drv = to_mhi_driver(mhi_dev->dev.driver);
356 mhi_drv->status_cb(mhi_dev, cb_reason);
364 struct mhi_device *mhi_dev;
365 struct device *dev = &mhi_cntrl->mhi_dev->dev;
370 if (!mhi_chan->configured || mhi_chan->mhi_dev ||
373 mhi_dev = mhi_alloc_device(mhi_cntrl);
374 if (IS_ERR(mhi_dev))
377 mhi_dev->dev_type = MHI_DEVICE_XFER;
380 mhi_dev->ul_chan = mhi_chan;
381 mhi_dev->ul_chan_id = mhi_chan->chan;
385 mhi_dev->dl_chan = mhi_chan;
386 mhi_dev->dl_chan_id = mhi_chan->chan;
390 put_device(&mhi_dev->dev);
394 get_device(&mhi_dev->dev);
395 mhi_chan->mhi_dev = mhi_dev;
403 mhi_dev->ul_chan = mhi_chan;
404 mhi_dev->ul_chan_id = mhi_chan->chan;
406 mhi_dev->dl_chan = mhi_chan;
407 mhi_dev->dl_chan_id = mhi_chan->chan;
409 get_device(&mhi_dev->dev);
410 mhi_chan->mhi_dev = mhi_dev;
415 mhi_dev->name = mhi_chan->name;
416 dev_set_name(&mhi_dev->dev, "%s_%s",
417 dev_name(&mhi_cntrl->mhi_dev->dev),
418 mhi_dev->name);
421 if (mhi_dev->dl_chan && mhi_dev->dl_chan->wake_capable)
422 device_init_wakeup(&mhi_dev->dev, true);
424 ret = device_add(&mhi_dev->dev);
426 put_device(&mhi_dev->dev);
445 dev_dbg(&mhi_cntrl->mhi_dev->dev,
454 dev_err(&mhi_cntrl->mhi_dev->dev,
468 struct mhi_device *mhi_dev = mhi_chan->mhi_dev;
470 if (mhi_dev)
471 mhi_notify(mhi_dev, MHI_CB_PENDING_DATA);
482 struct device *dev = &mhi_cntrl->mhi_dev->dev;
573 struct device *dev = &mhi_cntrl->mhi_dev->dev;
611 dev_err(&mhi_cntrl->mhi_dev->dev,
649 mhi_chan->xfer_cb(mhi_chan->mhi_dev, &result);
663 if (mhi_queue_buf(mhi_chan->mhi_dev,
746 mhi_chan->xfer_cb(mhi_chan->mhi_dev, &result);
782 dev_err(&mhi_cntrl->mhi_dev->dev,
799 dev_err(&mhi_cntrl->mhi_dev->dev,
815 struct device *dev = &mhi_cntrl->mhi_dev->dev;
829 dev_err(&mhi_cntrl->mhi_dev->dev,
958 dev_err(&mhi_cntrl->mhi_dev->dev,
994 dev_err(&mhi_cntrl->mhi_dev->dev,
1031 dev_err(&mhi_cntrl->mhi_dev->dev,
1064 struct device *dev = &mhi_cntrl->mhi_dev->dev;
1117 static int mhi_queue(struct mhi_device *mhi_dev, struct mhi_buf_info *buf_info,
1120 struct mhi_controller *mhi_cntrl = mhi_dev->mhi_cntrl;
1121 struct mhi_chan *mhi_chan = (dir == DMA_TO_DEVICE) ? mhi_dev->ul_chan :
1122 mhi_dev->dl_chan;
1163 int mhi_queue_skb(struct mhi_device *mhi_dev, enum dma_data_direction dir,
1166 struct mhi_chan *mhi_chan = (dir == DMA_TO_DEVICE) ? mhi_dev->ul_chan :
1167 mhi_dev->dl_chan;
1177 return mhi_queue(mhi_dev, &buf_info, dir, mflags);
1181 int mhi_queue_dma(struct mhi_device *mhi_dev, enum dma_data_direction dir,
1184 struct mhi_chan *mhi_chan = (dir == DMA_TO_DEVICE) ? mhi_dev->ul_chan :
1185 mhi_dev->dl_chan;
1196 return mhi_queue(mhi_dev, &buf_info, dir, mflags);
1254 int mhi_queue_buf(struct mhi_device *mhi_dev, enum dma_data_direction dir,
1263 return mhi_queue(mhi_dev, &buf_info, dir, mflags);
1267 bool mhi_queue_is_full(struct mhi_device *mhi_dev, enum dma_data_direction dir)
1269 struct mhi_controller *mhi_cntrl = mhi_dev->mhi_cntrl;
1271 mhi_dev->ul_chan : mhi_dev->dl_chan;
1285 struct device *dev = &mhi_cntrl->mhi_dev->dev;
1335 struct device *dev = &mhi_chan->mhi_dev->dev;
1376 ret = mhi_device_get_sync(mhi_cntrl->mhi_dev);
1413 mhi_device_put(mhi_cntrl->mhi_dev);
1422 struct device *dev = &mhi_chan->mhi_dev->dev;
1457 struct device *dev = &mhi_chan->mhi_dev->dev;
1546 struct device *dev = &mhi_cntrl->mhi_dev->dev;
1559 dev_err(&mhi_cntrl->mhi_dev->dev,
1611 mhi_chan->xfer_cb(mhi_chan->mhi_dev, &result);
1637 static int __mhi_prepare_for_transfer(struct mhi_device *mhi_dev, unsigned int flags)
1640 struct mhi_controller *mhi_cntrl = mhi_dev->mhi_cntrl;
1644 mhi_chan = dir ? mhi_dev->dl_chan : mhi_dev->ul_chan;
1657 mhi_chan = dir ? mhi_dev->dl_chan : mhi_dev->ul_chan;
1667 int mhi_prepare_for_transfer(struct mhi_device *mhi_dev)
1669 return __mhi_prepare_for_transfer(mhi_dev, 0);
1673 int mhi_prepare_for_transfer_autoqueue(struct mhi_device *mhi_dev)
1675 return __mhi_prepare_for_transfer(mhi_dev, MHI_CH_INBOUND_ALLOC_BUFS);
1679 void mhi_unprepare_from_transfer(struct mhi_device *mhi_dev)
1681 struct mhi_controller *mhi_cntrl = mhi_dev->mhi_cntrl;
1686 mhi_chan = dir ? mhi_dev->ul_chan : mhi_dev->dl_chan;