Lines Matching defs:xudc
397 struct tegra_xudc *xudc;
561 static inline u32 fpci_readl(struct tegra_xudc *xudc, unsigned int offset)
563 return readl(xudc->fpci + offset);
566 static inline void fpci_writel(struct tegra_xudc *xudc, u32 val,
569 writel(val, xudc->fpci + offset);
572 static inline u32 ipfs_readl(struct tegra_xudc *xudc, unsigned int offset)
574 return readl(xudc->ipfs + offset);
577 static inline void ipfs_writel(struct tegra_xudc *xudc, u32 val,
580 writel(val, xudc->ipfs + offset);
583 static inline u32 xudc_readl(struct tegra_xudc *xudc, unsigned int offset)
585 return readl(xudc->base + offset);
588 static inline void xudc_writel(struct tegra_xudc *xudc, u32 val,
591 writel(val, xudc->base + offset);
594 static inline int xudc_readl_poll(struct tegra_xudc *xudc,
599 return readl_poll_timeout_atomic(xudc->base + offset, regval,
618 static inline void dump_trb(struct tegra_xudc *xudc, const char *type,
621 dev_dbg(xudc->dev,
627 static void tegra_xudc_limit_port_speed(struct tegra_xudc *xudc)
632 val = xudc_readl(xudc, SSPX_CORE_CNT56);
635 xudc_writel(xudc, val, SSPX_CORE_CNT56);
637 val = xudc_readl(xudc, SSPX_CORE_CNT57);
640 xudc_writel(xudc, val, SSPX_CORE_CNT57);
642 val = xudc_readl(xudc, SSPX_CORE_CNT65);
645 xudc_writel(xudc, val, SSPX_CORE_CNT66);
647 val = xudc_readl(xudc, SSPX_CORE_CNT66);
650 xudc_writel(xudc, val, SSPX_CORE_CNT66);
652 val = xudc_readl(xudc, SSPX_CORE_CNT67);
655 xudc_writel(xudc, val, SSPX_CORE_CNT67);
657 val = xudc_readl(xudc, SSPX_CORE_CNT72);
660 xudc_writel(xudc, val, SSPX_CORE_CNT72);
663 static void tegra_xudc_restore_port_speed(struct tegra_xudc *xudc)
668 val = xudc_readl(xudc, SSPX_CORE_CNT56);
671 xudc_writel(xudc, val, SSPX_CORE_CNT56);
673 val = xudc_readl(xudc, SSPX_CORE_CNT57);
676 xudc_writel(xudc, val, SSPX_CORE_CNT57);
678 val = xudc_readl(xudc, SSPX_CORE_CNT65);
681 xudc_writel(xudc, val, SSPX_CORE_CNT66);
683 val = xudc_readl(xudc, SSPX_CORE_CNT66);
686 xudc_writel(xudc, val, SSPX_CORE_CNT66);
688 val = xudc_readl(xudc, SSPX_CORE_CNT67);
691 xudc_writel(xudc, val, SSPX_CORE_CNT67);
693 val = xudc_readl(xudc, SSPX_CORE_CNT72);
696 xudc_writel(xudc, val, SSPX_CORE_CNT72);
699 static void tegra_xudc_device_mode_on(struct tegra_xudc *xudc)
703 pm_runtime_get_sync(xudc->dev);
705 err = phy_power_on(xudc->curr_utmi_phy);
707 dev_err(xudc->dev, "UTMI power on failed: %d\n", err);
709 err = phy_power_on(xudc->curr_usb3_phy);
711 dev_err(xudc->dev, "USB3 PHY power on failed: %d\n", err);
713 dev_dbg(xudc->dev, "device mode on\n");
715 phy_set_mode_ext(xudc->curr_utmi_phy, PHY_MODE_USB_OTG,
719 static void tegra_xudc_device_mode_off(struct tegra_xudc *xudc)
725 dev_dbg(xudc->dev, "device mode off\n");
727 connected = !!(xudc_readl(xudc, PORTSC) & PORTSC_CCS);
729 reinit_completion(&xudc->disconnect_complete);
731 if (xudc->soc->port_speed_quirk)
732 tegra_xudc_restore_port_speed(xudc);
734 phy_set_mode_ext(xudc->curr_utmi_phy, PHY_MODE_USB_OTG, USB_ROLE_NONE);
736 pls = (xudc_readl(xudc, PORTSC) & PORTSC_PLS_MASK) >>
740 if (xudc->soc->pls_quirk && xudc->gadget.speed == USB_SPEED_SUPER &&
742 val = xudc_readl(xudc, PORTPM);
744 xudc_writel(xudc, val, PORTPM);
746 val = xudc_readl(xudc, PORTSC);
749 xudc_writel(xudc, val, PORTSC);
754 wait_for_completion(&xudc->disconnect_complete);
757 synchronize_irq(xudc->irq);
759 err = phy_power_off(xudc->curr_utmi_phy);
761 dev_err(xudc->dev, "UTMI PHY power off failed: %d\n", err);
763 err = phy_power_off(xudc->curr_usb3_phy);
765 dev_err(xudc->dev, "USB3 PHY power off failed: %d\n", err);
767 pm_runtime_put(xudc->dev);
772 struct tegra_xudc *xudc = container_of(work, struct tegra_xudc,
775 if (xudc->device_mode)
776 tegra_xudc_device_mode_on(xudc);
778 tegra_xudc_device_mode_off(xudc);
781 static int tegra_xudc_get_phy_index(struct tegra_xudc *xudc,
786 for (i = 0; i < xudc->soc->num_phys; i++) {
787 if (xudc->usbphy[i] && usbphy == xudc->usbphy[i])
791 dev_info(xudc->dev, "phy index could not be found for shared USB PHY");
798 struct tegra_xudc *xudc = container_of(nb, struct tegra_xudc,
803 dev_dbg(xudc->dev, "%s(): event is %d\n", __func__, usbphy->last_event);
805 if ((xudc->device_mode && usbphy->last_event == USB_EVENT_VBUS) ||
806 (!xudc->device_mode && usbphy->last_event != USB_EVENT_VBUS)) {
807 dev_dbg(xudc->dev, "Same role(%d) received. Ignore",
808 xudc->device_mode);
812 xudc->device_mode = (usbphy->last_event == USB_EVENT_VBUS) ? true :
815 phy_index = tegra_xudc_get_phy_index(xudc, usbphy);
816 dev_dbg(xudc->dev, "%s(): current phy index is %d\n", __func__,
819 if (!xudc->suspended && phy_index != -1) {
820 xudc->curr_utmi_phy = xudc->utmi_phy[phy_index];
821 xudc->curr_usb3_phy = xudc->usb3_phy[phy_index];
822 xudc->curr_usbphy = usbphy;
823 schedule_work(&xudc->usb_role_sw_work);
832 struct tegra_xudc *xudc = container_of(dwork, struct tegra_xudc,
836 spin_lock_irqsave(&xudc->lock, flags);
838 if (xudc->wait_csc) {
839 u32 pls = (xudc_readl(xudc, PORTSC) & PORTSC_PLS_MASK) >>
843 dev_info(xudc->dev, "PLS = Inactive. Toggle VBUS\n");
844 phy_set_mode_ext(xudc->curr_utmi_phy, PHY_MODE_USB_OTG,
846 phy_set_mode_ext(xudc->curr_utmi_phy, PHY_MODE_USB_OTG,
849 xudc->wait_csc = false;
853 spin_unlock_irqrestore(&xudc->lock, flags);
859 struct tegra_xudc *xudc =
865 spin_lock_irqsave(&xudc->lock, flags);
867 if (xudc->device_mode && xudc->wait_for_sec_prc) {
868 pls = (xudc_readl(xudc, PORTSC) & PORTSC_PLS_MASK) >>
870 dev_dbg(xudc->dev, "pls = %x\n", pls);
873 dev_dbg(xudc->dev, "toggle vbus\n");
876 xudc->curr_utmi_phy);
878 xudc->wait_for_sec_prc = 0;
882 spin_unlock_irqrestore(&xudc->lock, flags);
914 static void ep_reload(struct tegra_xudc *xudc, unsigned int ep)
916 xudc_writel(xudc, BIT(ep), EP_RELOAD);
917 xudc_readl_poll(xudc, EP_RELOAD, BIT(ep), 0);
920 static void ep_pause(struct tegra_xudc *xudc, unsigned int ep)
924 val = xudc_readl(xudc, EP_PAUSE);
929 xudc_writel(xudc, val, EP_PAUSE);
931 xudc_readl_poll(xudc, EP_STCHG, BIT(ep), BIT(ep));
933 xudc_writel(xudc, BIT(ep), EP_STCHG);
936 static void ep_unpause(struct tegra_xudc *xudc, unsigned int ep)
940 val = xudc_readl(xudc, EP_PAUSE);
945 xudc_writel(xudc, val, EP_PAUSE);
947 xudc_readl_poll(xudc, EP_STCHG, BIT(ep), BIT(ep));
949 xudc_writel(xudc, BIT(ep), EP_STCHG);
952 static void ep_unpause_all(struct tegra_xudc *xudc)
956 val = xudc_readl(xudc, EP_PAUSE);
958 xudc_writel(xudc, 0, EP_PAUSE);
960 xudc_readl_poll(xudc, EP_STCHG, val, val);
962 xudc_writel(xudc, val, EP_STCHG);
965 static void ep_halt(struct tegra_xudc *xudc, unsigned int ep)
969 val = xudc_readl(xudc, EP_HALT);
973 xudc_writel(xudc, val, EP_HALT);
975 xudc_readl_poll(xudc, EP_STCHG, BIT(ep), BIT(ep));
977 xudc_writel(xudc, BIT(ep), EP_STCHG);
980 static void ep_unhalt(struct tegra_xudc *xudc, unsigned int ep)
984 val = xudc_readl(xudc, EP_HALT);
988 xudc_writel(xudc, val, EP_HALT);
990 xudc_readl_poll(xudc, EP_STCHG, BIT(ep), BIT(ep));
992 xudc_writel(xudc, BIT(ep), EP_STCHG);
995 static void ep_unhalt_all(struct tegra_xudc *xudc)
999 val = xudc_readl(xudc, EP_HALT);
1002 xudc_writel(xudc, 0, EP_HALT);
1004 xudc_readl_poll(xudc, EP_STCHG, val, val);
1006 xudc_writel(xudc, val, EP_STCHG);
1009 static void ep_wait_for_stopped(struct tegra_xudc *xudc, unsigned int ep)
1011 xudc_readl_poll(xudc, EP_STOPPED, BIT(ep), BIT(ep));
1012 xudc_writel(xudc, BIT(ep), EP_STOPPED);
1015 static void ep_wait_for_inactive(struct tegra_xudc *xudc, unsigned int ep)
1017 xudc_readl_poll(xudc, EP_THREAD_ACTIVE, BIT(ep), 0);
1023 struct tegra_xudc *xudc = ep->xudc;
1025 dev_dbg(xudc->dev, "completing request %p on EP %u with status %d\n",
1034 usb_gadget_unmap_request(&xudc->gadget, &req->usb_req,
1035 (xudc->setup_state ==
1038 usb_gadget_unmap_request(&xudc->gadget, &req->usb_req,
1042 spin_unlock(&xudc->lock);
1044 spin_lock(&xudc->lock);
1074 struct tegra_xudc *xudc = ep->xudc;
1100 (xudc->setup_state == DATA_STAGE_RECV)))
1106 if (xudc->setup_state == DATA_STAGE_XFER ||
1107 xudc->setup_state == DATA_STAGE_RECV)
1112 if (xudc->setup_state == DATA_STAGE_XFER ||
1113 xudc->setup_state == STATUS_STAGE_XFER)
1135 dump_trb(xudc, "TRANSFER", trb);
1201 struct tegra_xudc *xudc = ep->xudc;
1209 val |= DB_STREAMID(xudc->setup_seq_num);
1222 dev_dbg(xudc->dev, "ring doorbell: %#x\n", val);
1223 xudc_writel(xudc, val, DB);
1246 struct tegra_xudc *xudc = ep->xudc;
1250 dev_err(xudc->dev, "control EP has pending transfers\n");
1255 err = usb_gadget_map_request(&xudc->gadget, &req->usb_req,
1256 (xudc->setup_state ==
1259 err = usb_gadget_map_request(&xudc->gadget, &req->usb_req,
1264 dev_err(xudc->dev, "failed to map request: %d\n", err);
1301 struct tegra_xudc *xudc;
1310 xudc = ep->xudc;
1312 spin_lock_irqsave(&xudc->lock, flags);
1313 if (xudc->powergated || !ep->desc) {
1320 spin_unlock_irqrestore(&xudc->lock, flags);
1373 dev_dbg(ep->xudc->dev, "%s: request %p -> %p; trb %p\n", __func__,
1397 dev_dbg(ep->xudc->dev, "%s: request %p -> %p; enq ptr: %p; trb %p\n",
1414 struct tegra_xudc *xudc = ep->xudc;
1437 ep_pause(xudc, ep->index);
1438 ep_wait_for_inactive(xudc, ep->index);
1471 if (dma_mapping_error(xudc->dev, deq_ptr)) {
1476 ep_reload(xudc, ep->index);
1495 ep_unpause(xudc, ep->index);
1508 struct tegra_xudc *xudc;
1517 xudc = ep->xudc;
1519 spin_lock_irqsave(&xudc->lock, flags);
1521 if (xudc->powergated || !ep->desc) {
1528 spin_unlock_irqrestore(&xudc->lock, flags);
1535 struct tegra_xudc *xudc = ep->xudc;
1541 dev_err(xudc->dev, "can't halt isochronous EP\n");
1545 if (!!(xudc_readl(xudc, EP_HALT) & BIT(ep->index)) == halt) {
1546 dev_dbg(xudc->dev, "EP %u already %s\n", ep->index,
1552 ep_halt(xudc, ep->index);
1556 ep_reload(xudc, ep->index);
1564 ep_reload(xudc, ep->index);
1565 ep_unpause(xudc, ep->index);
1566 ep_unhalt(xudc, ep->index);
1577 struct tegra_xudc *xudc;
1585 xudc = ep->xudc;
1587 spin_lock_irqsave(&xudc->lock, flags);
1588 if (xudc->powergated) {
1595 dev_err(xudc->dev, "can't halt EP with requests pending\n");
1602 spin_unlock_irqrestore(&xudc->lock, flags);
1611 struct tegra_xudc *xudc = ep->xudc;
1616 if (xudc->gadget.speed == USB_SPEED_SUPER) {
1622 } else if ((xudc->gadget.speed < USB_SPEED_SUPER) &&
1625 if (xudc->gadget.speed == USB_SPEED_HIGH) {
1628 dev_warn(xudc->dev,
1640 if (xudc->gadget.speed == USB_SPEED_SUPER) {
1698 struct tegra_xudc *xudc = ep->xudc;
1701 dev_err(xudc->dev, "endpoint %u already disabled\n",
1708 ep_reload(xudc, ep->index);
1712 xudc->nr_enabled_eps--;
1714 xudc->nr_isoch_eps--;
1721 ep_unpause(xudc, ep->index);
1722 ep_unhalt(xudc, ep->index);
1723 if (xudc_readl(xudc, EP_STOPPED) & BIT(ep->index))
1724 xudc_writel(xudc, BIT(ep->index), EP_STOPPED);
1730 if ((xudc->device_state == USB_STATE_CONFIGURED) &&
1731 (xudc->nr_enabled_eps == 1)) {
1734 xudc->device_state = USB_STATE_ADDRESS;
1735 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
1737 val = xudc_readl(xudc, CTRL);
1739 xudc_writel(xudc, val, CTRL);
1742 dev_info(xudc->dev, "ep %u disabled\n", ep->index);
1750 struct tegra_xudc *xudc;
1758 xudc = ep->xudc;
1760 spin_lock_irqsave(&xudc->lock, flags);
1761 if (xudc->powergated) {
1768 spin_unlock_irqrestore(&xudc->lock, flags);
1776 struct tegra_xudc *xudc = ep->xudc;
1780 if (xudc->gadget.speed == USB_SPEED_SUPER &&
1792 if (xudc->nr_isoch_eps > XUDC_MAX_ISOCH_EPS) {
1793 dev_err(xudc->dev, "too many isochronous endpoints\n");
1796 xudc->nr_isoch_eps++;
1807 xudc->nr_enabled_eps++;
1822 if (xudc->device_state == USB_STATE_ADDRESS) {
1823 val = xudc_readl(xudc, CTRL);
1825 xudc_writel(xudc, val, CTRL);
1827 xudc->device_state = USB_STATE_CONFIGURED;
1828 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
1836 for (i = 0; i < ARRAY_SIZE(xudc->ep); i++) {
1837 if (xudc->ep[i].desc &&
1838 usb_endpoint_xfer_bulk(xudc->ep[i].desc))
1839 ep_pause(xudc, i);
1843 ep_reload(xudc, ep->index);
1844 ep_unpause(xudc, ep->index);
1845 ep_unhalt(xudc, ep->index);
1848 for (i = 0; i < ARRAY_SIZE(xudc->ep); i++) {
1849 if (xudc->ep[i].desc &&
1850 usb_endpoint_xfer_bulk(xudc->ep[i].desc))
1851 ep_unpause(xudc, i);
1856 dev_info(xudc->dev, "EP %u (type: %s, dir: %s) enabled\n", ep->index,
1867 struct tegra_xudc *xudc;
1875 xudc = ep->xudc;
1877 spin_lock_irqsave(&xudc->lock, flags);
1878 if (xudc->powergated) {
1885 spin_unlock_irqrestore(&xudc->lock, flags);
1945 struct tegra_xudc *xudc = to_xudc(gadget);
1949 spin_lock_irqsave(&xudc->lock, flags);
1950 if (xudc->powergated) {
1955 ret = (xudc_readl(xudc, MFINDEX) & MFINDEX_FRAME_MASK) >>
1958 spin_unlock_irqrestore(&xudc->lock, flags);
1963 static void tegra_xudc_resume_device_state(struct tegra_xudc *xudc)
1968 ep_unpause_all(xudc);
1971 val = xudc_readl(xudc, PORTSC);
1975 xudc_writel(xudc, val, PORTSC);
1978 if (xudc->device_state == USB_STATE_SUSPENDED) {
1979 xudc->device_state = xudc->resume_state;
1980 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
1981 xudc->resume_state = 0;
1989 for (i = 0; i < ARRAY_SIZE(xudc->ep); i++)
1990 tegra_xudc_ep_ring_doorbell(&xudc->ep[i]);
1995 struct tegra_xudc *xudc = to_xudc(gadget);
2000 spin_lock_irqsave(&xudc->lock, flags);
2002 if (xudc->powergated) {
2006 val = xudc_readl(xudc, PORTPM);
2007 dev_dbg(xudc->dev, "%s: PORTPM=%#x, speed=%x\n", __func__,
2010 if (((xudc->gadget.speed <= USB_SPEED_HIGH) &&
2012 ((xudc->gadget.speed == USB_SPEED_SUPER) &&
2014 tegra_xudc_resume_device_state(xudc);
2017 if (xudc->gadget.speed == USB_SPEED_SUPER) {
2020 xudc_writel(xudc, 0, DEVNOTIF_HI);
2021 xudc_writel(xudc, val, DEVNOTIF_LO);
2026 dev_dbg(xudc->dev, "%s: ret value is %d", __func__, ret);
2027 spin_unlock_irqrestore(&xudc->lock, flags);
2034 struct tegra_xudc *xudc = to_xudc(gadget);
2038 pm_runtime_get_sync(xudc->dev);
2040 spin_lock_irqsave(&xudc->lock, flags);
2042 if (is_on != xudc->pullup) {
2043 val = xudc_readl(xudc, CTRL);
2048 xudc_writel(xudc, val, CTRL);
2051 xudc->pullup = is_on;
2052 dev_dbg(xudc->dev, "%s: pullup:%d", __func__, is_on);
2054 spin_unlock_irqrestore(&xudc->lock, flags);
2056 pm_runtime_put(xudc->dev);
2064 struct tegra_xudc *xudc = to_xudc(gadget);
2073 pm_runtime_get_sync(xudc->dev);
2075 spin_lock_irqsave(&xudc->lock, flags);
2077 if (xudc->driver) {
2082 xudc->setup_state = WAIT_FOR_SETUP;
2083 xudc->device_state = USB_STATE_DEFAULT;
2084 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
2086 ret = __tegra_xudc_ep_enable(&xudc->ep[0], &tegra_xudc_ep0_desc);
2090 val = xudc_readl(xudc, CTRL);
2092 xudc_writel(xudc, val, CTRL);
2094 val = xudc_readl(xudc, PORTHALT);
2096 xudc_writel(xudc, val, PORTHALT);
2098 if (xudc->pullup) {
2099 val = xudc_readl(xudc, CTRL);
2101 xudc_writel(xudc, val, CTRL);
2104 for (i = 0; i < xudc->soc->num_phys; i++)
2105 if (xudc->usbphy[i])
2106 otg_set_peripheral(xudc->usbphy[i]->otg, gadget);
2108 xudc->driver = driver;
2110 dev_dbg(xudc->dev, "%s: ret value is %d", __func__, ret);
2111 spin_unlock_irqrestore(&xudc->lock, flags);
2113 pm_runtime_put(xudc->dev);
2120 struct tegra_xudc *xudc = to_xudc(gadget);
2125 pm_runtime_get_sync(xudc->dev);
2127 spin_lock_irqsave(&xudc->lock, flags);
2129 for (i = 0; i < xudc->soc->num_phys; i++)
2130 if (xudc->usbphy[i])
2131 otg_set_peripheral(xudc->usbphy[i]->otg, NULL);
2133 val = xudc_readl(xudc, CTRL);
2135 xudc_writel(xudc, val, CTRL);
2137 __tegra_xudc_ep_disable(&xudc->ep[0]);
2139 xudc->driver = NULL;
2140 dev_dbg(xudc->dev, "Gadget stopped");
2142 spin_unlock_irqrestore(&xudc->lock, flags);
2144 pm_runtime_put(xudc->dev);
2153 struct tegra_xudc *xudc = to_xudc(gadget);
2155 dev_dbg(xudc->dev, "%s: %u mA\n", __func__, m_a);
2157 if (xudc->curr_usbphy && xudc->curr_usbphy->chg_type == SDP_TYPE)
2158 ret = usb_phy_set_power(xudc->curr_usbphy, m_a);
2165 struct tegra_xudc *xudc = to_xudc(gadget);
2167 dev_dbg(xudc->dev, "%s: %d\n", __func__, is_on);
2168 xudc->selfpowered = !!is_on;
2188 tegra_xudc_ep0_queue_status(struct tegra_xudc *xudc,
2191 xudc->ep0_req->usb_req.buf = NULL;
2192 xudc->ep0_req->usb_req.dma = 0;
2193 xudc->ep0_req->usb_req.length = 0;
2194 xudc->ep0_req->usb_req.complete = cmpl;
2195 xudc->ep0_req->usb_req.context = xudc;
2197 return __tegra_xudc_ep_queue(&xudc->ep[0], xudc->ep0_req);
2201 tegra_xudc_ep0_queue_data(struct tegra_xudc *xudc, void *buf, size_t len,
2204 xudc->ep0_req->usb_req.buf = buf;
2205 xudc->ep0_req->usb_req.length = len;
2206 xudc->ep0_req->usb_req.complete = cmpl;
2207 xudc->ep0_req->usb_req.context = xudc;
2209 return __tegra_xudc_ep_queue(&xudc->ep[0], xudc->ep0_req);
2212 static void tegra_xudc_ep0_req_done(struct tegra_xudc *xudc)
2214 switch (xudc->setup_state) {
2216 xudc->setup_state = STATUS_STAGE_RECV;
2217 tegra_xudc_ep0_queue_status(xudc, no_op_complete);
2220 xudc->setup_state = STATUS_STAGE_XFER;
2221 tegra_xudc_ep0_queue_status(xudc, no_op_complete);
2224 xudc->setup_state = WAIT_FOR_SETUP;
2229 static int tegra_xudc_ep0_delegate_req(struct tegra_xudc *xudc,
2234 spin_unlock(&xudc->lock);
2235 ret = xudc->driver->setup(&xudc->gadget, ctrl);
2236 spin_lock(&xudc->lock);
2243 struct tegra_xudc *xudc = req->context;
2245 if (xudc->test_mode_pattern) {
2246 xudc_writel(xudc, xudc->test_mode_pattern, PORT_TM);
2247 xudc->test_mode_pattern = 0;
2251 static int tegra_xudc_ep0_set_feature(struct tegra_xudc *xudc,
2267 if ((xudc->gadget.speed == USB_SPEED_SUPER) ||
2268 (xudc->device_state == USB_STATE_DEFAULT))
2271 val = xudc_readl(xudc, PORTPM);
2277 xudc_writel(xudc, val, PORTPM);
2281 if ((xudc->device_state != USB_STATE_CONFIGURED) ||
2282 (xudc->gadget.speed != USB_SPEED_SUPER))
2285 val = xudc_readl(xudc, PORTPM);
2287 xudc->soc->u1_enable) {
2295 xudc->soc->u2_enable) {
2302 xudc_writel(xudc, val, PORTPM);
2305 if (xudc->gadget.speed != USB_SPEED_HIGH)
2311 xudc->test_mode_pattern = index >> 8;
2319 if (xudc->device_state != USB_STATE_CONFIGURED)
2325 val = xudc_readl(xudc, PORTPM);
2332 xudc_writel(xudc, val, PORTPM);
2335 return tegra_xudc_ep0_delegate_req(xudc, ctrl);
2345 if ((xudc->device_state == USB_STATE_DEFAULT) ||
2346 ((xudc->device_state == USB_STATE_ADDRESS) &&
2350 ret = __tegra_xudc_ep_set_halt(&xudc->ep[ep], set);
2358 return tegra_xudc_ep0_queue_status(xudc, set_feature_complete);
2361 static int tegra_xudc_ep0_get_status(struct tegra_xudc *xudc,
2377 val = xudc_readl(xudc, PORTPM);
2379 if (xudc->selfpowered)
2382 if ((xudc->gadget.speed < USB_SPEED_SUPER) &&
2386 if (xudc->gadget.speed == USB_SPEED_SUPER) {
2394 if (xudc->gadget.speed == USB_SPEED_SUPER) {
2396 val = xudc_readl(xudc, PORTPM);
2404 ep_ctx = &xudc->ep_context[ep];
2406 if ((xudc->device_state != USB_STATE_CONFIGURED) &&
2407 ((xudc->device_state != USB_STATE_ADDRESS) || (ep != 0)))
2413 if (xudc_readl(xudc, EP_HALT) & BIT(ep))
2420 xudc->status_buf = cpu_to_le16(status);
2421 return tegra_xudc_ep0_queue_data(xudc, &xudc->status_buf,
2422 sizeof(xudc->status_buf),
2431 static int tegra_xudc_ep0_set_sel(struct tegra_xudc *xudc,
2438 if (xudc->device_state == USB_STATE_DEFAULT)
2446 return tegra_xudc_ep0_queue_data(xudc, &xudc->sel_timing,
2447 sizeof(xudc->sel_timing),
2456 static int tegra_xudc_ep0_set_isoch_delay(struct tegra_xudc *xudc,
2469 xudc->isoch_delay = delay;
2471 return tegra_xudc_ep0_queue_status(xudc, set_isoch_delay_complete);
2476 struct tegra_xudc *xudc = req->context;
2478 if ((xudc->device_state == USB_STATE_DEFAULT) &&
2479 (xudc->dev_addr != 0)) {
2480 xudc->device_state = USB_STATE_ADDRESS;
2481 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
2482 } else if ((xudc->device_state == USB_STATE_ADDRESS) &&
2483 (xudc->dev_addr == 0)) {
2484 xudc->device_state = USB_STATE_DEFAULT;
2485 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
2489 static int tegra_xudc_ep0_set_address(struct tegra_xudc *xudc,
2492 struct tegra_xudc_ep *ep0 = &xudc->ep[0];
2503 if (xudc->device_state == USB_STATE_CONFIGURED)
2506 dev_dbg(xudc->dev, "set address: %u\n", addr);
2508 xudc->dev_addr = addr;
2509 val = xudc_readl(xudc, CTRL);
2512 xudc_writel(xudc, val, CTRL);
2516 return tegra_xudc_ep0_queue_status(xudc, set_address_complete);
2519 static int tegra_xudc_ep0_standard_req(struct tegra_xudc *xudc,
2526 dev_dbg(xudc->dev, "USB_REQ_GET_STATUS\n");
2527 ret = tegra_xudc_ep0_get_status(xudc, ctrl);
2530 dev_dbg(xudc->dev, "USB_REQ_SET_ADDRESS\n");
2531 ret = tegra_xudc_ep0_set_address(xudc, ctrl);
2534 dev_dbg(xudc->dev, "USB_REQ_SET_SEL\n");
2535 ret = tegra_xudc_ep0_set_sel(xudc, ctrl);
2538 dev_dbg(xudc->dev, "USB_REQ_SET_ISOCH_DELAY\n");
2539 ret = tegra_xudc_ep0_set_isoch_delay(xudc, ctrl);
2543 dev_dbg(xudc->dev, "USB_REQ_CLEAR/SET_FEATURE\n");
2544 ret = tegra_xudc_ep0_set_feature(xudc, ctrl);
2547 dev_dbg(xudc->dev, "USB_REQ_SET_CONFIGURATION\n");
2555 ret = tegra_xudc_ep0_delegate_req(xudc, ctrl);
2562 static void tegra_xudc_handle_ep0_setup_packet(struct tegra_xudc *xudc,
2568 xudc->setup_seq_num = seq_num;
2571 ep_unhalt(xudc, 0);
2577 if (xudc->soc->invalid_seq_num &&
2579 dev_warn(xudc->dev, "invalid sequence number detected\n");
2580 ep_halt(xudc, 0);
2585 xudc->setup_state = (ctrl->bRequestType & USB_DIR_IN) ?
2588 xudc->setup_state = STATUS_STAGE_XFER;
2591 ret = tegra_xudc_ep0_standard_req(xudc, ctrl);
2593 ret = tegra_xudc_ep0_delegate_req(xudc, ctrl);
2596 dev_warn(xudc->dev, "setup request failed: %d\n", ret);
2597 xudc->setup_state = WAIT_FOR_SETUP;
2598 ep_halt(xudc, 0);
2602 static void tegra_xudc_handle_ep0_event(struct tegra_xudc *xudc,
2608 if (xudc->setup_state != WAIT_FOR_SETUP) {
2615 memcpy(&xudc->setup_packet.ctrl_req, ctrl, sizeof(*ctrl));
2616 xudc->setup_packet.seq_num = seq_num;
2617 xudc->queued_setup_packet = true;
2619 tegra_xudc_handle_ep0_setup_packet(xudc, ctrl, seq_num);
2639 static void tegra_xudc_handle_transfer_completion(struct tegra_xudc *xudc,
2665 dev_dbg(xudc->dev, "bytes transferred %u / %u\n",
2671 tegra_xudc_ep0_req_done(xudc);
2683 dev_warn(xudc->dev, "transfer event on dequeued request\n");
2690 static void tegra_xudc_handle_transfer_event(struct tegra_xudc *xudc,
2694 struct tegra_xudc_ep *ep = &xudc->ep[ep_index];
2699 dev_warn(xudc->dev, "transfer event on disabled EP %u\n",
2718 tegra_xudc_handle_transfer_completion(xudc, ep, event);
2721 dev_info(xudc->dev, "stream rejected on EP %u\n", ep_index);
2726 dev_info(xudc->dev, "prime pipe received on EP %u\n", ep_index);
2735 ep_wait_for_stopped(xudc, ep_index);
2744 ep_wait_for_stopped(xudc, ep_index);
2756 dev_err(xudc->dev, "completion error %#x on EP %u\n",
2759 ep_halt(xudc, ep_index);
2762 dev_info(xudc->dev, "sequence number error\n");
2769 xudc->setup_state = WAIT_FOR_SETUP;
2770 if (!xudc->queued_setup_packet)
2773 tegra_xudc_handle_ep0_setup_packet(xudc,
2774 &xudc->setup_packet.ctrl_req,
2775 xudc->setup_packet.seq_num);
2776 xudc->queued_setup_packet = false;
2779 dev_dbg(xudc->dev, "stop completion code on EP %u\n",
2786 dev_dbg(xudc->dev, "completion event %#x on EP %u\n",
2792 static void tegra_xudc_reset(struct tegra_xudc *xudc)
2794 struct tegra_xudc_ep *ep0 = &xudc->ep[0];
2798 xudc->setup_state = WAIT_FOR_SETUP;
2799 xudc->device_state = USB_STATE_DEFAULT;
2800 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
2802 ep_unpause_all(xudc);
2804 for (i = 0; i < ARRAY_SIZE(xudc->ep); i++)
2805 tegra_xudc_ep_nuke(&xudc->ep[i], -ESHUTDOWN);
2814 xudc->setup_seq_num = 0;
2815 xudc->queued_setup_packet = false;
2824 if (!dma_mapping_error(xudc->dev, deq_ptr)) {
2829 ep_unhalt_all(xudc);
2830 ep_reload(xudc, 0);
2831 ep_unpause(xudc, 0);
2834 static void tegra_xudc_port_connect(struct tegra_xudc *xudc)
2836 struct tegra_xudc_ep *ep0 = &xudc->ep[0];
2840 val = (xudc_readl(xudc, PORTSC) & PORTSC_PS_MASK) >> PORTSC_PS_SHIFT;
2843 xudc->gadget.speed = USB_SPEED_LOW;
2846 xudc->gadget.speed = USB_SPEED_FULL;
2849 xudc->gadget.speed = USB_SPEED_HIGH;
2852 xudc->gadget.speed = USB_SPEED_SUPER;
2855 xudc->gadget.speed = USB_SPEED_UNKNOWN;
2859 xudc->device_state = USB_STATE_DEFAULT;
2860 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
2862 xudc->setup_state = WAIT_FOR_SETUP;
2864 if (xudc->gadget.speed == USB_SPEED_SUPER)
2873 if (!xudc->soc->u1_enable) {
2874 val = xudc_readl(xudc, PORTPM);
2876 xudc_writel(xudc, val, PORTPM);
2879 if (!xudc->soc->u2_enable) {
2880 val = xudc_readl(xudc, PORTPM);
2882 xudc_writel(xudc, val, PORTPM);
2885 if (xudc->gadget.speed <= USB_SPEED_HIGH) {
2886 val = xudc_readl(xudc, PORTPM);
2888 if (xudc->soc->lpm_enable)
2892 xudc_writel(xudc, val, PORTPM);
2895 val = xudc_readl(xudc, ST);
2897 xudc_writel(xudc, ST_RC, ST);
2900 static void tegra_xudc_port_disconnect(struct tegra_xudc *xudc)
2902 tegra_xudc_reset(xudc);
2904 if (xudc->driver && xudc->driver->disconnect) {
2905 spin_unlock(&xudc->lock);
2906 xudc->driver->disconnect(&xudc->gadget);
2907 spin_lock(&xudc->lock);
2910 xudc->device_state = USB_STATE_NOTATTACHED;
2911 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
2913 complete(&xudc->disconnect_complete);
2916 static void tegra_xudc_port_reset(struct tegra_xudc *xudc)
2918 tegra_xudc_reset(xudc);
2920 if (xudc->driver) {
2921 spin_unlock(&xudc->lock);
2922 usb_gadget_udc_reset(&xudc->gadget, xudc->driver);
2923 spin_lock(&xudc->lock);
2926 tegra_xudc_port_connect(xudc);
2929 static void tegra_xudc_port_suspend(struct tegra_xudc *xudc)
2931 dev_dbg(xudc->dev, "port suspend\n");
2933 xudc->resume_state = xudc->device_state;
2934 xudc->device_state = USB_STATE_SUSPENDED;
2935 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
2937 if (xudc->driver->suspend) {
2938 spin_unlock(&xudc->lock);
2939 xudc->driver->suspend(&xudc->gadget);
2940 spin_lock(&xudc->lock);
2944 static void tegra_xudc_port_resume(struct tegra_xudc *xudc)
2946 dev_dbg(xudc->dev, "port resume\n");
2948 tegra_xudc_resume_device_state(xudc);
2950 if (xudc->driver->resume) {
2951 spin_unlock(&xudc->lock);
2952 xudc->driver->resume(&xudc->gadget);
2953 spin_lock(&xudc->lock);
2957 static inline void clear_port_change(struct tegra_xudc *xudc, u32 flag)
2961 val = xudc_readl(xudc, PORTSC);
2964 xudc_writel(xudc, val, PORTSC);
2967 static void __tegra_xudc_handle_port_status(struct tegra_xudc *xudc)
2971 porthalt = xudc_readl(xudc, PORTHALT);
2974 dev_dbg(xudc->dev, "STCHG_REQ, PORTHALT = %#x\n", porthalt);
2976 xudc_writel(xudc, porthalt, PORTHALT);
2979 portsc = xudc_readl(xudc, PORTSC);
2981 dev_dbg(xudc->dev, "PRC, PR, PORTSC = %#x\n", portsc);
2982 clear_port_change(xudc, PORTSC_PRC | PORTSC_PED);
2984 if (xudc->soc->port_reset_quirk) {
2985 schedule_delayed_work(&xudc->port_reset_war_work,
2987 xudc->wait_for_sec_prc = 1;
2992 dev_dbg(xudc->dev, "PRC, Not PR, PORTSC = %#x\n", portsc);
2993 clear_port_change(xudc, PORTSC_PRC | PORTSC_PED);
2994 tegra_xudc_port_reset(xudc);
2995 cancel_delayed_work(&xudc->port_reset_war_work);
2996 xudc->wait_for_sec_prc = 0;
2999 portsc = xudc_readl(xudc, PORTSC);
3001 dev_dbg(xudc->dev, "WRC, PORTSC = %#x\n", portsc);
3002 clear_port_change(xudc, PORTSC_WRC | PORTSC_PED);
3003 if (!(xudc_readl(xudc, PORTSC) & PORTSC_WPR))
3004 tegra_xudc_port_reset(xudc);
3007 portsc = xudc_readl(xudc, PORTSC);
3009 dev_dbg(xudc->dev, "CSC, PORTSC = %#x\n", portsc);
3010 clear_port_change(xudc, PORTSC_CSC);
3013 tegra_xudc_port_connect(xudc);
3015 tegra_xudc_port_disconnect(xudc);
3017 if (xudc->wait_csc) {
3018 cancel_delayed_work(&xudc->plc_reset_work);
3019 xudc->wait_csc = false;
3023 portsc = xudc_readl(xudc, PORTSC);
3027 dev_dbg(xudc->dev, "PLC, PORTSC = %#x\n", portsc);
3028 clear_port_change(xudc, PORTSC_PLC);
3031 tegra_xudc_port_suspend(xudc);
3034 if (xudc->gadget.speed < USB_SPEED_SUPER)
3035 tegra_xudc_port_resume(xudc);
3038 if (xudc->gadget.speed == USB_SPEED_SUPER)
3039 tegra_xudc_port_resume(xudc);
3042 schedule_delayed_work(&xudc->plc_reset_work,
3044 xudc->wait_csc = true;
3052 dev_warn(xudc->dev, "CEC, PORTSC = %#x\n", portsc);
3053 clear_port_change(xudc, PORTSC_CEC);
3056 dev_dbg(xudc->dev, "PORTSC = %#x\n", xudc_readl(xudc, PORTSC));
3059 static void tegra_xudc_handle_port_status(struct tegra_xudc *xudc)
3061 while ((xudc_readl(xudc, PORTSC) & PORTSC_CHANGE_MASK) ||
3062 (xudc_readl(xudc, PORTHALT) & PORTHALT_STCHG_REQ))
3063 __tegra_xudc_handle_port_status(xudc);
3066 static void tegra_xudc_handle_event(struct tegra_xudc *xudc,
3071 dump_trb(xudc, "EVENT", event);
3075 tegra_xudc_handle_port_status(xudc);
3078 tegra_xudc_handle_transfer_event(xudc, event);
3081 tegra_xudc_handle_ep0_event(xudc, event);
3084 dev_info(xudc->dev, "Unrecognized TRB type = %#x\n", type);
3089 static void tegra_xudc_process_event_ring(struct tegra_xudc *xudc)
3095 event = xudc->event_ring[xudc->event_ring_index] +
3096 xudc->event_ring_deq_ptr;
3098 if (trb_read_cycle(event) != xudc->ccs)
3101 tegra_xudc_handle_event(xudc, event);
3103 xudc->event_ring_deq_ptr++;
3104 if (xudc->event_ring_deq_ptr == XUDC_EVENT_RING_SIZE) {
3105 xudc->event_ring_deq_ptr = 0;
3106 xudc->event_ring_index++;
3109 if (xudc->event_ring_index == XUDC_NR_EVENT_RINGS) {
3110 xudc->event_ring_index = 0;
3111 xudc->ccs = !xudc->ccs;
3115 erdp = xudc->event_ring_phys[xudc->event_ring_index] +
3116 xudc->event_ring_deq_ptr * sizeof(*event);
3118 xudc_writel(xudc, upper_32_bits(erdp), ERDPHI);
3119 xudc_writel(xudc, lower_32_bits(erdp) | ERDPLO_EHB, ERDPLO);
3124 struct tegra_xudc *xudc = data;
3128 val = xudc_readl(xudc, ST);
3131 xudc_writel(xudc, ST_IP, ST);
3133 spin_lock_irqsave(&xudc->lock, flags);
3134 tegra_xudc_process_event_ring(xudc);
3135 spin_unlock_irqrestore(&xudc->lock, flags);
3140 static int tegra_xudc_alloc_ep(struct tegra_xudc *xudc, unsigned int index)
3142 struct tegra_xudc_ep *ep = &xudc->ep[index];
3144 ep->xudc = xudc;
3146 ep->context = &xudc->ep_context[index];
3156 ep->transfer_ring = dma_pool_alloc(xudc->transfer_ring_pool,
3175 list_add_tail(&ep->usb_ep.ep_list, &xudc->gadget.ep_list);
3189 static void tegra_xudc_free_ep(struct tegra_xudc *xudc, unsigned int index)
3191 struct tegra_xudc_ep *ep = &xudc->ep[index];
3200 dma_pool_free(xudc->transfer_ring_pool, ep->transfer_ring,
3204 static int tegra_xudc_alloc_eps(struct tegra_xudc *xudc)
3210 xudc->ep_context =
3211 dma_alloc_coherent(xudc->dev, XUDC_NR_EPS *
3212 sizeof(*xudc->ep_context),
3213 &xudc->ep_context_phys, GFP_KERNEL);
3214 if (!xudc->ep_context)
3217 xudc->transfer_ring_pool =
3218 dmam_pool_create(dev_name(xudc->dev), xudc->dev,
3222 if (!xudc->transfer_ring_pool) {
3227 INIT_LIST_HEAD(&xudc->gadget.ep_list);
3228 for (i = 0; i < ARRAY_SIZE(xudc->ep); i++) {
3229 err = tegra_xudc_alloc_ep(xudc, i);
3234 req = tegra_xudc_ep_alloc_request(&xudc->ep[0].usb_ep, GFP_KERNEL);
3239 xudc->ep0_req = to_xudc_req(req);
3245 tegra_xudc_free_ep(xudc, i - 1);
3247 dma_free_coherent(xudc->dev, XUDC_NR_EPS * sizeof(*xudc->ep_context),
3248 xudc->ep_context, xudc->ep_context_phys);
3252 static void tegra_xudc_init_eps(struct tegra_xudc *xudc)
3254 xudc_writel(xudc, lower_32_bits(xudc->ep_context_phys), ECPLO);
3255 xudc_writel(xudc, upper_32_bits(xudc->ep_context_phys), ECPHI);
3258 static void tegra_xudc_free_eps(struct tegra_xudc *xudc)
3262 tegra_xudc_ep_free_request(&xudc->ep[0].usb_ep,
3263 &xudc->ep0_req->usb_req);
3265 for (i = 0; i < ARRAY_SIZE(xudc->ep); i++)
3266 tegra_xudc_free_ep(xudc, i);
3268 dma_free_coherent(xudc->dev, XUDC_NR_EPS * sizeof(*xudc->ep_context),
3269 xudc->ep_context, xudc->ep_context_phys);
3272 static int tegra_xudc_alloc_event_ring(struct tegra_xudc *xudc)
3276 for (i = 0; i < ARRAY_SIZE(xudc->event_ring); i++) {
3277 xudc->event_ring[i] =
3278 dma_alloc_coherent(xudc->dev, XUDC_EVENT_RING_SIZE *
3279 sizeof(*xudc->event_ring[i]),
3280 &xudc->event_ring_phys[i],
3282 if (!xudc->event_ring[i])
3290 dma_free_coherent(xudc->dev, XUDC_EVENT_RING_SIZE *
3291 sizeof(*xudc->event_ring[i - 1]),
3292 xudc->event_ring[i - 1],
3293 xudc->event_ring_phys[i - 1]);
3298 static void tegra_xudc_init_event_ring(struct tegra_xudc *xudc)
3303 for (i = 0; i < ARRAY_SIZE(xudc->event_ring); i++) {
3304 memset(xudc->event_ring[i], 0, XUDC_EVENT_RING_SIZE *
3305 sizeof(*xudc->event_ring[i]));
3307 val = xudc_readl(xudc, ERSTSZ);
3310 xudc_writel(xudc, val, ERSTSZ);
3312 xudc_writel(xudc, lower_32_bits(xudc->event_ring_phys[i]),
3314 xudc_writel(xudc, upper_32_bits(xudc->event_ring_phys[i]),
3318 val = lower_32_bits(xudc->event_ring_phys[0]);
3319 xudc_writel(xudc, val, ERDPLO);
3321 xudc_writel(xudc, val, EREPLO);
3323 val = upper_32_bits(xudc->event_ring_phys[0]);
3324 xudc_writel(xudc, val, ERDPHI);
3325 xudc_writel(xudc, val, EREPHI);
3327 xudc->ccs = true;
3328 xudc->event_ring_index = 0;
3329 xudc->event_ring_deq_ptr = 0;
3332 static void tegra_xudc_free_event_ring(struct tegra_xudc *xudc)
3336 for (i = 0; i < ARRAY_SIZE(xudc->event_ring); i++) {
3337 dma_free_coherent(xudc->dev, XUDC_EVENT_RING_SIZE *
3338 sizeof(*xudc->event_ring[i]),
3339 xudc->event_ring[i],
3340 xudc->event_ring_phys[i]);
3344 static void tegra_xudc_fpci_ipfs_init(struct tegra_xudc *xudc)
3348 if (xudc->soc->has_ipfs) {
3349 val = ipfs_readl(xudc, XUSB_DEV_CONFIGURATION_0);
3351 ipfs_writel(xudc, val, XUSB_DEV_CONFIGURATION_0);
3358 fpci_writel(xudc, val, XUSB_DEV_CFG_1);
3361 val = fpci_readl(xudc, XUSB_DEV_CFG_4);
3363 val |= xudc->phys_base & (XUSB_DEV_CFG_4_BASE_ADDR_MASK);
3365 fpci_writel(xudc, val, XUSB_DEV_CFG_4);
3366 fpci_writel(xudc, upper_32_bits(xudc->phys_base), XUSB_DEV_CFG_5);
3370 if (xudc->soc->has_ipfs) {
3372 val = ipfs_readl(xudc, XUSB_DEV_INTR_MASK_0);
3374 ipfs_writel(xudc, val, XUSB_DEV_INTR_MASK_0);
3378 static void tegra_xudc_device_params_init(struct tegra_xudc *xudc)
3382 if (xudc->soc->has_ipfs) {
3383 val = xudc_readl(xudc, BLCG);
3391 xudc_writel(xudc, val, BLCG);
3394 if (xudc->soc->port_speed_quirk)
3395 tegra_xudc_limit_port_speed(xudc);
3398 val = xudc_readl(xudc, SSPX_CORE_PADCTL4);
3401 xudc_writel(xudc, val, SSPX_CORE_PADCTL4);
3404 val = xudc_readl(xudc, SSPX_CORE_CNT0);
3407 xudc_writel(xudc, val, SSPX_CORE_CNT0);
3410 val = xudc_readl(xudc, SSPX_CORE_CNT30);
3413 xudc_writel(xudc, val, SSPX_CORE_CNT30);
3415 if (xudc->soc->lpm_enable) {
3417 val = xudc_readl(xudc, HSFSPI_COUNT13);
3420 xudc_writel(xudc, val, HSFSPI_COUNT13);
3427 val = xudc_readl(xudc, SSPX_CORE_CNT32);
3430 xudc_writel(xudc, val, SSPX_CORE_CNT32);
3433 val = xudc_readl(xudc, CFG_DEV_FE);
3436 xudc_writel(xudc, val, CFG_DEV_FE);
3438 val = xudc_readl(xudc, PORTSC);
3441 xudc_writel(xudc, val, PORTSC);
3444 val = xudc_readl(xudc, CFG_DEV_FE);
3447 xudc_writel(xudc, val, CFG_DEV_FE);
3449 val = xudc_readl(xudc, PORTSC);
3452 xudc_writel(xudc, val, PORTSC);
3455 val = xudc_readl(xudc, CFG_DEV_FE);
3457 xudc_writel(xudc, val, CFG_DEV_FE);
3463 val = xudc_readl(xudc, CFG_DEV_FE);
3465 xudc_writel(xudc, val, CFG_DEV_FE);
3469 val = xudc_readl(xudc, RT_IMOD);
3472 xudc_writel(xudc, val, RT_IMOD);
3475 val = xudc_readl(xudc, CFG_DEV_SSPI_XFER);
3478 xudc_writel(xudc, val, CFG_DEV_SSPI_XFER);
3481 static int tegra_xudc_phy_get(struct tegra_xudc *xudc)
3486 xudc->utmi_phy = devm_kcalloc(xudc->dev, xudc->soc->num_phys,
3487 sizeof(*xudc->utmi_phy), GFP_KERNEL);
3488 if (!xudc->utmi_phy)
3491 xudc->usb3_phy = devm_kcalloc(xudc->dev, xudc->soc->num_phys,
3492 sizeof(*xudc->usb3_phy), GFP_KERNEL);
3493 if (!xudc->usb3_phy)
3496 xudc->usbphy = devm_kcalloc(xudc->dev, xudc->soc->num_phys,
3497 sizeof(*xudc->usbphy), GFP_KERNEL);
3498 if (!xudc->usbphy)
3501 xudc->vbus_nb.notifier_call = tegra_xudc_vbus_notify;
3503 for (i = 0; i < xudc->soc->num_phys; i++) {
3508 xudc->utmi_phy[i] = devm_phy_optional_get(xudc->dev, phy_name);
3509 if (IS_ERR(xudc->utmi_phy[i])) {
3510 err = PTR_ERR(xudc->utmi_phy[i]);
3512 dev_err(xudc->dev, "failed to get usb2-%d PHY: %d\n",
3516 } else if (xudc->utmi_phy[i]) {
3518 xudc->usbphy[i] = devm_usb_get_phy_by_node(xudc->dev,
3519 xudc->utmi_phy[i]->dev.of_node,
3520 &xudc->vbus_nb);
3521 if (IS_ERR(xudc->usbphy[i])) {
3522 err = PTR_ERR(xudc->usbphy[i]);
3523 dev_err(xudc->dev, "failed to get usbphy-%d: %d\n",
3527 } else if (!xudc->utmi_phy[i]) {
3533 usb3 = tegra_xusb_padctl_get_usb3_companion(xudc->padctl, i);
3538 xudc->usb3_phy[i] = devm_phy_optional_get(xudc->dev, phy_name);
3539 if (IS_ERR(xudc->usb3_phy[i])) {
3540 err = PTR_ERR(xudc->usb3_phy[i]);
3542 dev_err(xudc->dev, "failed to get usb3-%d PHY: %d\n",
3546 } else if (xudc->usb3_phy[i])
3547 dev_dbg(xudc->dev, "usb3-%d PHY registered", usb3);
3553 for (i = 0; i < xudc->soc->num_phys; i++) {
3554 xudc->usb3_phy[i] = NULL;
3555 xudc->utmi_phy[i] = NULL;
3556 xudc->usbphy[i] = NULL;
3562 static void tegra_xudc_phy_exit(struct tegra_xudc *xudc)
3566 for (i = 0; i < xudc->soc->num_phys; i++) {
3567 phy_exit(xudc->usb3_phy[i]);
3568 phy_exit(xudc->utmi_phy[i]);
3572 static int tegra_xudc_phy_init(struct tegra_xudc *xudc)
3577 for (i = 0; i < xudc->soc->num_phys; i++) {
3578 err = phy_init(xudc->utmi_phy[i]);
3580 dev_err(xudc->dev, "UTMI PHY #%u initialization failed: %d\n", i, err);
3584 err = phy_init(xudc->usb3_phy[i]);
3586 dev_err(xudc->dev, "USB3 PHY #%u initialization failed: %d\n", i, err);
3593 tegra_xudc_phy_exit(xudc);
3663 .compatible = "nvidia,tegra210-xudc",
3667 .compatible = "nvidia,tegra186-xudc",
3671 .compatible = "nvidia,tegra194-xudc",
3678 static void tegra_xudc_powerdomain_remove(struct tegra_xudc *xudc)
3680 if (xudc->genpd_dl_ss)
3681 device_link_del(xudc->genpd_dl_ss);
3682 if (xudc->genpd_dl_device)
3683 device_link_del(xudc->genpd_dl_device);
3684 if (xudc->genpd_dev_ss)
3685 dev_pm_domain_detach(xudc->genpd_dev_ss, true);
3686 if (xudc->genpd_dev_device)
3687 dev_pm_domain_detach(xudc->genpd_dev_device, true);
3690 static int tegra_xudc_powerdomain_init(struct tegra_xudc *xudc)
3692 struct device *dev = xudc->dev;
3695 xudc->genpd_dev_device = dev_pm_domain_attach_by_name(dev, "dev");
3696 if (IS_ERR(xudc->genpd_dev_device)) {
3697 err = PTR_ERR(xudc->genpd_dev_device);
3702 xudc->genpd_dev_ss = dev_pm_domain_attach_by_name(dev, "ss");
3703 if (IS_ERR(xudc->genpd_dev_ss)) {
3704 err = PTR_ERR(xudc->genpd_dev_ss);
3709 xudc->genpd_dl_device = device_link_add(dev, xudc->genpd_dev_device,
3712 if (!xudc->genpd_dl_device) {
3717 xudc->genpd_dl_ss = device_link_add(dev, xudc->genpd_dev_ss,
3720 if (!xudc->genpd_dl_ss) {
3730 struct tegra_xudc *xudc;
3735 xudc = devm_kzalloc(&pdev->dev, sizeof(*xudc), GFP_KERNEL);
3736 if (!xudc)
3739 xudc->dev = &pdev->dev;
3740 platform_set_drvdata(pdev, xudc);
3742 xudc->soc = of_device_get_match_data(&pdev->dev);
3743 if (!xudc->soc)
3747 xudc->base = devm_ioremap_resource(&pdev->dev, res);
3748 if (IS_ERR(xudc->base))
3749 return PTR_ERR(xudc->base);
3750 xudc->phys_base = res->start;
3752 xudc->fpci = devm_platform_ioremap_resource_byname(pdev, "fpci");
3753 if (IS_ERR(xudc->fpci))
3754 return PTR_ERR(xudc->fpci);
3756 if (xudc->soc->has_ipfs) {
3757 xudc->ipfs = devm_platform_ioremap_resource_byname(pdev, "ipfs");
3758 if (IS_ERR(xudc->ipfs))
3759 return PTR_ERR(xudc->ipfs);
3762 xudc->irq = platform_get_irq(pdev, 0);
3763 if (xudc->irq < 0)
3764 return xudc->irq;
3766 err = devm_request_irq(&pdev->dev, xudc->irq, tegra_xudc_irq, 0,
3767 dev_name(&pdev->dev), xudc);
3769 dev_err(xudc->dev, "failed to claim IRQ#%u: %d\n", xudc->irq,
3774 xudc->clks = devm_kcalloc(&pdev->dev, xudc->soc->num_clks, sizeof(*xudc->clks),
3776 if (!xudc->clks)
3779 for (i = 0; i < xudc->soc->num_clks; i++)
3780 xudc->clks[i].id = xudc->soc->clock_names[i];
3782 err = devm_clk_bulk_get(&pdev->dev, xudc->soc->num_clks, xudc->clks);
3785 dev_err(xudc->dev, "failed to request clocks: %d\n", err);
3790 xudc->supplies = devm_kcalloc(&pdev->dev, xudc->soc->num_supplies,
3791 sizeof(*xudc->supplies), GFP_KERNEL);
3792 if (!xudc->supplies)
3795 for (i = 0; i < xudc->soc->num_supplies; i++)
3796 xudc->supplies[i].supply = xudc->soc->supply_names[i];
3798 err = devm_regulator_bulk_get(&pdev->dev, xudc->soc->num_supplies,
3799 xudc->supplies);
3802 dev_err(xudc->dev, "failed to request regulators: %d\n", err);
3807 xudc->padctl = tegra_xusb_padctl_get(&pdev->dev);
3808 if (IS_ERR(xudc->padctl))
3809 return PTR_ERR(xudc->padctl);
3811 err = regulator_bulk_enable(xudc->soc->num_supplies, xudc->supplies);
3813 dev_err(xudc->dev, "failed to enable regulators: %d\n", err);
3817 err = tegra_xudc_phy_get(xudc);
3821 err = tegra_xudc_powerdomain_init(xudc);
3825 err = tegra_xudc_phy_init(xudc);
3829 err = tegra_xudc_alloc_event_ring(xudc);
3833 err = tegra_xudc_alloc_eps(xudc);
3837 spin_lock_init(&xudc->lock);
3839 init_completion(&xudc->disconnect_complete);
3841 INIT_WORK(&xudc->usb_role_sw_work, tegra_xudc_usb_role_sw_work);
3843 INIT_DELAYED_WORK(&xudc->plc_reset_work, tegra_xudc_plc_reset_work);
3845 INIT_DELAYED_WORK(&xudc->port_reset_war_work,
3850 xudc->gadget.ops = &tegra_xudc_gadget_ops;
3851 xudc->gadget.ep0 = &xudc->ep[0].usb_ep;
3852 xudc->gadget.name = "tegra-xudc";
3853 xudc->gadget.max_speed = USB_SPEED_SUPER;
3855 err = usb_add_gadget_udc(&pdev->dev, &xudc->gadget);
3865 tegra_xudc_free_eps(xudc);
3867 tegra_xudc_free_event_ring(xudc);
3869 tegra_xudc_phy_exit(xudc);
3871 tegra_xudc_powerdomain_remove(xudc);
3873 regulator_bulk_disable(xudc->soc->num_supplies, xudc->supplies);
3875 tegra_xusb_padctl_put(xudc->padctl);
3882 struct tegra_xudc *xudc = platform_get_drvdata(pdev);
3885 pm_runtime_get_sync(xudc->dev);
3887 cancel_delayed_work_sync(&xudc->plc_reset_work);
3888 cancel_work_sync(&xudc->usb_role_sw_work);
3890 usb_del_gadget_udc(&xudc->gadget);
3892 tegra_xudc_free_eps(xudc);
3893 tegra_xudc_free_event_ring(xudc);
3895 tegra_xudc_powerdomain_remove(xudc);
3897 regulator_bulk_disable(xudc->soc->num_supplies, xudc->supplies);
3899 for (i = 0; i < xudc->soc->num_phys; i++) {
3900 phy_power_off(xudc->utmi_phy[i]);
3901 phy_power_off(xudc->usb3_phy[i]);
3904 tegra_xudc_phy_exit(xudc);
3906 pm_runtime_disable(xudc->dev);
3907 pm_runtime_put(xudc->dev);
3909 tegra_xusb_padctl_put(xudc->padctl);
3914 static int __maybe_unused tegra_xudc_powergate(struct tegra_xudc *xudc)
3918 dev_dbg(xudc->dev, "entering ELPG\n");
3920 spin_lock_irqsave(&xudc->lock, flags);
3922 xudc->powergated = true;
3923 xudc->saved_regs.ctrl = xudc_readl(xudc, CTRL);
3924 xudc->saved_regs.portpm = xudc_readl(xudc, PORTPM);
3925 xudc_writel(xudc, 0, CTRL);
3927 spin_unlock_irqrestore(&xudc->lock, flags);
3929 clk_bulk_disable_unprepare(xudc->soc->num_clks, xudc->clks);
3931 regulator_bulk_disable(xudc->soc->num_supplies, xudc->supplies);
3933 dev_dbg(xudc->dev, "entering ELPG done\n");
3937 static int __maybe_unused tegra_xudc_unpowergate(struct tegra_xudc *xudc)
3942 dev_dbg(xudc->dev, "exiting ELPG\n");
3944 err = regulator_bulk_enable(xudc->soc->num_supplies,
3945 xudc->supplies);
3949 err = clk_bulk_prepare_enable(xudc->soc->num_clks, xudc->clks);
3953 tegra_xudc_fpci_ipfs_init(xudc);
3955 tegra_xudc_device_params_init(xudc);
3957 tegra_xudc_init_event_ring(xudc);
3959 tegra_xudc_init_eps(xudc);
3961 xudc_writel(xudc, xudc->saved_regs.portpm, PORTPM);
3962 xudc_writel(xudc, xudc->saved_regs.ctrl, CTRL);
3964 spin_lock_irqsave(&xudc->lock, flags);
3965 xudc->powergated = false;
3966 spin_unlock_irqrestore(&xudc->lock, flags);
3968 dev_dbg(xudc->dev, "exiting ELPG done\n");
3974 struct tegra_xudc *xudc = dev_get_drvdata(dev);
3977 spin_lock_irqsave(&xudc->lock, flags);
3978 xudc->suspended = true;
3979 spin_unlock_irqrestore(&xudc->lock, flags);
3981 flush_work(&xudc->usb_role_sw_work);
3985 tegra_xudc_device_mode_off(xudc);
3986 tegra_xudc_powergate(xudc);
3996 struct tegra_xudc *xudc = dev_get_drvdata(dev);
4000 err = tegra_xudc_unpowergate(xudc);
4004 spin_lock_irqsave(&xudc->lock, flags);
4005 xudc->suspended = false;
4006 spin_unlock_irqrestore(&xudc->lock, flags);
4008 schedule_work(&xudc->usb_role_sw_work);
4017 struct tegra_xudc *xudc = dev_get_drvdata(dev);
4019 return tegra_xudc_powergate(xudc);
4024 struct tegra_xudc *xudc = dev_get_drvdata(dev);
4026 return tegra_xudc_unpowergate(xudc);
4039 .name = "tegra-xudc",