Lines Matching refs:hw_ep

203 	struct musb_hw_ep	*hw_ep = qh->hw_ep;
204 int epnum = hw_ep->epnum;
234 musb_ep_set_qh(hw_ep, is_in, qh);
267 hw_ep->tx_channel ? "dma" : "pio");
269 if (!hw_ep->tx_channel)
270 musb_h_tx_start(hw_ep);
272 musb_h_tx_dma_start(hw_ep);
297 struct musb_hw_ep *hw_ep, int is_in)
299 struct musb_qh *qh = musb_ep_get_qh(hw_ep, is_in);
300 struct musb_hw_ep *ep = qh->hw_ep;
328 qh = musb_ep_get_qh(hw_ep, is_in);
385 hw_ep->epnum, is_in ? 'R' : 'T', next_urb(qh));
390 static u16 musb_h_flush_rxfifo(struct musb_hw_ep *hw_ep, u16 csr)
402 musb_writew(hw_ep->regs, MUSB_RXCSR, csr);
403 musb_writew(hw_ep->regs, MUSB_RXCSR, csr);
406 return musb_readw(hw_ep->regs, MUSB_RXCSR);
421 struct musb_hw_ep *hw_ep = musb->endpoints + epnum;
422 void __iomem *epio = hw_ep->regs;
423 struct musb_qh *qh = hw_ep->in_qh;
488 musb_read_fifo(hw_ep, length, buf);
493 musb_h_flush_rxfifo(hw_ep, csr);
572 static void musb_tx_dma_set_mode_mentor(struct musb_hw_ep *hw_ep,
576 struct dma_channel *channel = hw_ep->tx_channel;
577 void __iomem *epio = hw_ep->regs;
599 can_bulk_split(hw_ep->musb, qh->type)))
610 static void musb_tx_dma_set_mode_cppi_tusb(struct musb_hw_ep *hw_ep,
614 struct dma_channel *channel = hw_ep->tx_channel;
626 struct musb_hw_ep *hw_ep, struct musb_qh *qh,
629 struct dma_channel *channel = hw_ep->tx_channel;
633 if (musb_dma_inventra(hw_ep->musb) || musb_dma_ux500(hw_ep->musb))
634 musb_tx_dma_set_mode_mentor(hw_ep, qh,
636 else if (is_cppi_enabled(hw_ep->musb) || tusb_dma_omap(hw_ep->musb))
637 musb_tx_dma_set_mode_cppi_tusb(hw_ep, urb, &mode);
651 void __iomem *epio = hw_ep->regs;
655 hw_ep->tx_channel = NULL;
677 struct musb_hw_ep *hw_ep = musb->endpoints + epnum;
678 void __iomem *epio = hw_ep->regs;
679 struct musb_qh *qh = musb_ep_get_qh(hw_ep, !is_out);
699 hw_ep->tx_channel = NULL;
705 dma_channel = is_out ? hw_ep->tx_channel : hw_ep->rx_channel;
708 dma_controller, hw_ep, is_out);
710 hw_ep->tx_channel = dma_channel;
712 hw_ep->rx_channel = dma_channel;
739 if (!hw_ep->tx_double_buffered)
740 musb_h_tx_flush_fifo(hw_ep);
757 if (!hw_ep->tx_double_buffered)
767 musb_h_ep0_flush_fifo(hw_ep);
783 qh->hb_mult = hw_ep->max_packet_sz_tx
801 load_count = min((u32) hw_ep->max_packet_sz_tx,
807 hw_ep, qh, urb, offset, len))
828 musb_write_fifo(hw_ep, load_count, buf);
832 musb_write_fifo(hw_ep, load_count, buf);
842 if (hw_ep->rx_reinit) {
850 csr = musb_readw(hw_ep->regs, MUSB_RXCSR);
856 hw_ep->epnum, csr);
870 musb_writew(hw_ep->regs, MUSB_RXCSR, csr);
871 csr = musb_readw(hw_ep->regs, MUSB_RXCSR);
884 hw_ep->rx_channel = dma_channel = NULL;
891 musb_writew(hw_ep->regs, MUSB_RXCSR, csr);
892 csr = musb_readw(hw_ep->regs, MUSB_RXCSR);
983 struct musb_hw_ep *hw_ep = musb->control_ep;
984 struct musb_qh *qh = hw_ep->in_qh;
995 musb_read_fifo(hw_ep, fifo_count, fifo_dest);
1034 musb_write_fifo(hw_ep, fifo_count, fifo_dest);
1060 struct musb_hw_ep *hw_ep = musb->control_ep;
1061 void __iomem *epio = hw_ep->regs;
1062 struct musb_qh *qh = hw_ep->in_qh;
1122 musb_h_ep0_flush_fifo(hw_ep);
1136 musb_h_ep0_flush_fifo(hw_ep);
1172 musb_advance_schedule(musb, urb, hw_ep, 1);
1202 struct musb_hw_ep *hw_ep = musb->endpoints + epnum;
1203 void __iomem *epio = hw_ep->regs;
1204 struct musb_qh *qh = hw_ep->out_qh;
1221 dma = is_dma_capable() ? hw_ep->tx_channel : NULL;
1244 musb_bulk_nak_timeout(musb, hw_ep, 0);
1273 musb_h_tx_flush_fifo(hw_ep);
1409 musb_advance_schedule(musb, urb, hw_ep, USB_DIR_OUT);
1412 if (musb_tx_dma_program(musb->dma_controller, hw_ep, qh, urb,
1415 musb_h_tx_dma_start(hw_ep);
1426 * REVISIT: some docs say that when hw_ep->tx_double_buffered,
1448 musb_write_fifo(hw_ep, length, qh->sg_miter.addr);
1452 musb_write_fifo(hw_ep, length, urb->transfer_buffer + offset);
1465 struct musb_hw_ep *hw_ep,
1470 struct dma_channel *channel = hw_ep->rx_channel;
1471 void __iomem *epio = hw_ep->regs;
1483 musb_writew(hw_ep->regs, MUSB_RXCSR, val);
1490 struct musb_hw_ep *hw_ep,
1536 struct musb_hw_ep *hw_ep,
1541 struct dma_channel *channel = hw_ep->rx_channel;
1542 void __iomem *epio = hw_ep->regs;
1565 if (musb_dma_cppi41(hw_ep->musb))
1566 done = musb_rx_dma_iso_cppi41(dma, hw_ep, qh,
1606 struct musb_hw_ep *hw_ep,
1612 struct musb *musb = hw_ep->musb;
1613 void __iomem *epio = hw_ep->regs;
1614 struct dma_channel *channel = hw_ep->rx_channel;
1659 if (rx_count < hw_ep->max_packet_sz_rx) {
1693 hw_ep->rx_channel = NULL;
1706 struct musb_hw_ep *hw_ep,
1715 struct musb_hw_ep *hw_ep,
1732 struct musb_hw_ep *hw_ep = musb->endpoints + epnum;
1734 void __iomem *epio = hw_ep->regs;
1735 struct musb_qh *qh = hw_ep->in_qh;
1748 dma = is_dma_capable() ? hw_ep->rx_channel : NULL;
1762 musb_h_flush_rxfifo(hw_ep, MUSB_RXCSR_CLRDATATOG);
1807 musb_bulk_nak_timeout(musb, hw_ep, 1);
1835 musb_h_flush_rxfifo(hw_ep, MUSB_RXCSR_CLRDATATOG);
1883 musb_writew(hw_ep->regs, MUSB_RXCSR, val);
1887 done = musb_rx_dma_inventra_cppi41(c, hw_ep, qh, urb, xfer_len);
1888 musb_dbg(hw_ep->musb,
1916 musb_dbg(hw_ep->musb,
1924 if (musb_rx_dma_in_inventra_cppi41(c, hw_ep, qh, urb,
1984 musb_advance_schedule(musb, urb, hw_ep, USB_DIR_IN);
2001 struct musb_hw_ep *hw_ep = NULL;
2010 hw_ep = musb->control_ep;
2026 for (epnum = 1, hw_ep = musb->endpoints + 1;
2028 epnum++, hw_ep++) {
2031 if (musb_ep_get_qh(hw_ep, is_in) != NULL)
2034 if (hw_ep == musb->bulk_ep)
2038 diff = hw_ep->max_packet_sz_rx;
2040 diff = hw_ep->max_packet_sz_tx;
2057 hw_ep = musb->endpoints + epnum;
2059 txtype = (musb_readb(hw_ep->regs, MUSB_TXTYPE)
2071 hw_ep = musb->bulk_ep;
2098 hw_ep = musb->endpoints + best_end;
2106 qh->hw_ep = hw_ep;
2152 * hw_ep gets reprogrammed, or with irqs blocked. Then schedule it.
2310 struct musb_hw_ep *ep = qh->hw_ep;
2396 || musb_ep_get_qh(qh->hw_ep, is_in) != qh) {
2407 musb_ep_set_qh(qh->hw_ep, is_in, NULL);
2439 if (musb_ep_get_qh(qh->hw_ep, is_in) == qh) {
2450 * queue on hw_ep (e.g. bulk ring) when we're done.
2455 musb_advance_schedule(musb, urb, qh->hw_ep, is_in);