Lines Matching defs:xudc
396 struct tegra_xudc *xudc;
560 static inline u32 fpci_readl(struct tegra_xudc *xudc, unsigned int offset)
562 return readl(xudc->fpci + offset);
565 static inline void fpci_writel(struct tegra_xudc *xudc, u32 val,
568 writel(val, xudc->fpci + offset);
571 static inline u32 ipfs_readl(struct tegra_xudc *xudc, unsigned int offset)
573 return readl(xudc->ipfs + offset);
576 static inline void ipfs_writel(struct tegra_xudc *xudc, u32 val,
579 writel(val, xudc->ipfs + offset);
582 static inline u32 xudc_readl(struct tegra_xudc *xudc, unsigned int offset)
584 return readl(xudc->base + offset);
587 static inline void xudc_writel(struct tegra_xudc *xudc, u32 val,
590 writel(val, xudc->base + offset);
593 static inline int xudc_readl_poll(struct tegra_xudc *xudc,
598 return readl_poll_timeout_atomic(xudc->base + offset, regval,
617 static inline void dump_trb(struct tegra_xudc *xudc, const char *type,
620 dev_dbg(xudc->dev,
626 static void tegra_xudc_limit_port_speed(struct tegra_xudc *xudc)
631 val = xudc_readl(xudc, SSPX_CORE_CNT56);
634 xudc_writel(xudc, val, SSPX_CORE_CNT56);
636 val = xudc_readl(xudc, SSPX_CORE_CNT57);
639 xudc_writel(xudc, val, SSPX_CORE_CNT57);
641 val = xudc_readl(xudc, SSPX_CORE_CNT65);
644 xudc_writel(xudc, val, SSPX_CORE_CNT66);
646 val = xudc_readl(xudc, SSPX_CORE_CNT66);
649 xudc_writel(xudc, val, SSPX_CORE_CNT66);
651 val = xudc_readl(xudc, SSPX_CORE_CNT67);
654 xudc_writel(xudc, val, SSPX_CORE_CNT67);
656 val = xudc_readl(xudc, SSPX_CORE_CNT72);
659 xudc_writel(xudc, val, SSPX_CORE_CNT72);
662 static void tegra_xudc_restore_port_speed(struct tegra_xudc *xudc)
667 val = xudc_readl(xudc, SSPX_CORE_CNT56);
670 xudc_writel(xudc, val, SSPX_CORE_CNT56);
672 val = xudc_readl(xudc, SSPX_CORE_CNT57);
675 xudc_writel(xudc, val, SSPX_CORE_CNT57);
677 val = xudc_readl(xudc, SSPX_CORE_CNT65);
680 xudc_writel(xudc, val, SSPX_CORE_CNT66);
682 val = xudc_readl(xudc, SSPX_CORE_CNT66);
685 xudc_writel(xudc, val, SSPX_CORE_CNT66);
687 val = xudc_readl(xudc, SSPX_CORE_CNT67);
690 xudc_writel(xudc, val, SSPX_CORE_CNT67);
692 val = xudc_readl(xudc, SSPX_CORE_CNT72);
695 xudc_writel(xudc, val, SSPX_CORE_CNT72);
698 static void tegra_xudc_device_mode_on(struct tegra_xudc *xudc)
702 pm_runtime_get_sync(xudc->dev);
704 tegra_phy_xusb_utmi_pad_power_on(xudc->curr_utmi_phy);
706 err = phy_power_on(xudc->curr_utmi_phy);
708 dev_err(xudc->dev, "UTMI power on failed: %d\n", err);
710 err = phy_power_on(xudc->curr_usb3_phy);
712 dev_err(xudc->dev, "USB3 PHY power on failed: %d\n", err);
714 dev_dbg(xudc->dev, "device mode on\n");
716 phy_set_mode_ext(xudc->curr_utmi_phy, PHY_MODE_USB_OTG,
720 static void tegra_xudc_device_mode_off(struct tegra_xudc *xudc)
726 dev_dbg(xudc->dev, "device mode off\n");
728 connected = !!(xudc_readl(xudc, PORTSC) & PORTSC_CCS);
730 reinit_completion(&xudc->disconnect_complete);
732 if (xudc->soc->port_speed_quirk)
733 tegra_xudc_restore_port_speed(xudc);
735 phy_set_mode_ext(xudc->curr_utmi_phy, PHY_MODE_USB_OTG, USB_ROLE_NONE);
737 pls = (xudc_readl(xudc, PORTSC) & PORTSC_PLS_MASK) >>
741 if (xudc->soc->pls_quirk && xudc->gadget.speed == USB_SPEED_SUPER &&
743 val = xudc_readl(xudc, PORTPM);
745 xudc_writel(xudc, val, PORTPM);
747 val = xudc_readl(xudc, PORTSC);
750 xudc_writel(xudc, val, PORTSC);
755 wait_for_completion(&xudc->disconnect_complete);
758 synchronize_irq(xudc->irq);
760 tegra_phy_xusb_utmi_pad_power_down(xudc->curr_utmi_phy);
762 err = phy_power_off(xudc->curr_utmi_phy);
764 dev_err(xudc->dev, "UTMI PHY power off failed: %d\n", err);
766 err = phy_power_off(xudc->curr_usb3_phy);
768 dev_err(xudc->dev, "USB3 PHY power off failed: %d\n", err);
770 pm_runtime_put(xudc->dev);
775 struct tegra_xudc *xudc = container_of(work, struct tegra_xudc,
778 if (xudc->device_mode)
779 tegra_xudc_device_mode_on(xudc);
781 tegra_xudc_device_mode_off(xudc);
784 static int tegra_xudc_get_phy_index(struct tegra_xudc *xudc,
789 for (i = 0; i < xudc->soc->num_phys; i++) {
790 if (xudc->usbphy[i] && usbphy == xudc->usbphy[i])
794 dev_info(xudc->dev, "phy index could not be found for shared USB PHY");
798 static void tegra_xudc_update_data_role(struct tegra_xudc *xudc,
803 if ((xudc->device_mode && usbphy->last_event == USB_EVENT_VBUS) ||
804 (!xudc->device_mode && usbphy->last_event != USB_EVENT_VBUS)) {
805 dev_dbg(xudc->dev, "Same role(%d) received. Ignore",
806 xudc->device_mode);
810 xudc->device_mode = (usbphy->last_event == USB_EVENT_VBUS) ? true :
813 phy_index = tegra_xudc_get_phy_index(xudc, usbphy);
814 dev_dbg(xudc->dev, "%s(): current phy index is %d\n", __func__,
817 if (!xudc->suspended && phy_index != -1) {
818 xudc->curr_utmi_phy = xudc->utmi_phy[phy_index];
819 xudc->curr_usb3_phy = xudc->usb3_phy[phy_index];
820 xudc->curr_usbphy = usbphy;
821 schedule_work(&xudc->usb_role_sw_work);
828 struct tegra_xudc *xudc = container_of(nb, struct tegra_xudc,
832 dev_dbg(xudc->dev, "%s(): event is %d\n", __func__, usbphy->last_event);
834 tegra_xudc_update_data_role(xudc, usbphy);
842 struct tegra_xudc *xudc = container_of(dwork, struct tegra_xudc,
846 spin_lock_irqsave(&xudc->lock, flags);
848 if (xudc->wait_csc) {
849 u32 pls = (xudc_readl(xudc, PORTSC) & PORTSC_PLS_MASK) >>
853 dev_info(xudc->dev, "PLS = Inactive. Toggle VBUS\n");
854 phy_set_mode_ext(xudc->curr_utmi_phy, PHY_MODE_USB_OTG,
856 phy_set_mode_ext(xudc->curr_utmi_phy, PHY_MODE_USB_OTG,
859 xudc->wait_csc = false;
863 spin_unlock_irqrestore(&xudc->lock, flags);
869 struct tegra_xudc *xudc =
875 spin_lock_irqsave(&xudc->lock, flags);
877 if (xudc->device_mode && xudc->wait_for_sec_prc) {
878 pls = (xudc_readl(xudc, PORTSC) & PORTSC_PLS_MASK) >>
880 dev_dbg(xudc->dev, "pls = %x\n", pls);
883 dev_dbg(xudc->dev, "toggle vbus\n");
886 xudc->curr_utmi_phy);
888 xudc->wait_for_sec_prc = 0;
892 spin_unlock_irqrestore(&xudc->lock, flags);
924 static void ep_reload(struct tegra_xudc *xudc, unsigned int ep)
926 xudc_writel(xudc, BIT(ep), EP_RELOAD);
927 xudc_readl_poll(xudc, EP_RELOAD, BIT(ep), 0);
930 static void ep_pause(struct tegra_xudc *xudc, unsigned int ep)
934 val = xudc_readl(xudc, EP_PAUSE);
939 xudc_writel(xudc, val, EP_PAUSE);
941 xudc_readl_poll(xudc, EP_STCHG, BIT(ep), BIT(ep));
943 xudc_writel(xudc, BIT(ep), EP_STCHG);
946 static void ep_unpause(struct tegra_xudc *xudc, unsigned int ep)
950 val = xudc_readl(xudc, EP_PAUSE);
955 xudc_writel(xudc, val, EP_PAUSE);
957 xudc_readl_poll(xudc, EP_STCHG, BIT(ep), BIT(ep));
959 xudc_writel(xudc, BIT(ep), EP_STCHG);
962 static void ep_unpause_all(struct tegra_xudc *xudc)
966 val = xudc_readl(xudc, EP_PAUSE);
968 xudc_writel(xudc, 0, EP_PAUSE);
970 xudc_readl_poll(xudc, EP_STCHG, val, val);
972 xudc_writel(xudc, val, EP_STCHG);
975 static void ep_halt(struct tegra_xudc *xudc, unsigned int ep)
979 val = xudc_readl(xudc, EP_HALT);
983 xudc_writel(xudc, val, EP_HALT);
985 xudc_readl_poll(xudc, EP_STCHG, BIT(ep), BIT(ep));
987 xudc_writel(xudc, BIT(ep), EP_STCHG);
990 static void ep_unhalt(struct tegra_xudc *xudc, unsigned int ep)
994 val = xudc_readl(xudc, EP_HALT);
998 xudc_writel(xudc, val, EP_HALT);
1000 xudc_readl_poll(xudc, EP_STCHG, BIT(ep), BIT(ep));
1002 xudc_writel(xudc, BIT(ep), EP_STCHG);
1005 static void ep_unhalt_all(struct tegra_xudc *xudc)
1009 val = xudc_readl(xudc, EP_HALT);
1012 xudc_writel(xudc, 0, EP_HALT);
1014 xudc_readl_poll(xudc, EP_STCHG, val, val);
1016 xudc_writel(xudc, val, EP_STCHG);
1019 static void ep_wait_for_stopped(struct tegra_xudc *xudc, unsigned int ep)
1021 xudc_readl_poll(xudc, EP_STOPPED, BIT(ep), BIT(ep));
1022 xudc_writel(xudc, BIT(ep), EP_STOPPED);
1025 static void ep_wait_for_inactive(struct tegra_xudc *xudc, unsigned int ep)
1027 xudc_readl_poll(xudc, EP_THREAD_ACTIVE, BIT(ep), 0);
1033 struct tegra_xudc *xudc = ep->xudc;
1035 dev_dbg(xudc->dev, "completing request %p on EP %u with status %d\n",
1044 usb_gadget_unmap_request(&xudc->gadget, &req->usb_req,
1045 (xudc->setup_state ==
1048 usb_gadget_unmap_request(&xudc->gadget, &req->usb_req,
1052 spin_unlock(&xudc->lock);
1054 spin_lock(&xudc->lock);
1084 struct tegra_xudc *xudc = ep->xudc;
1110 (xudc->setup_state == DATA_STAGE_RECV)))
1116 if (xudc->setup_state == DATA_STAGE_XFER ||
1117 xudc->setup_state == DATA_STAGE_RECV)
1122 if (xudc->setup_state == DATA_STAGE_XFER ||
1123 xudc->setup_state == STATUS_STAGE_XFER)
1145 dump_trb(xudc, "TRANSFER", trb);
1211 struct tegra_xudc *xudc = ep->xudc;
1219 val |= DB_STREAMID(xudc->setup_seq_num);
1232 dev_dbg(xudc->dev, "ring doorbell: %#x\n", val);
1233 xudc_writel(xudc, val, DB);
1256 struct tegra_xudc *xudc = ep->xudc;
1260 dev_err(xudc->dev, "control EP has pending transfers\n");
1265 err = usb_gadget_map_request(&xudc->gadget, &req->usb_req,
1266 (xudc->setup_state ==
1269 err = usb_gadget_map_request(&xudc->gadget, &req->usb_req,
1274 dev_err(xudc->dev, "failed to map request: %d\n", err);
1311 struct tegra_xudc *xudc;
1320 xudc = ep->xudc;
1322 spin_lock_irqsave(&xudc->lock, flags);
1323 if (xudc->powergated || !ep->desc) {
1330 spin_unlock_irqrestore(&xudc->lock, flags);
1383 dev_dbg(ep->xudc->dev, "%s: request %p -> %p; trb %p\n", __func__,
1407 dev_dbg(ep->xudc->dev, "%s: request %p -> %p; enq ptr: %p; trb %p\n",
1424 struct tegra_xudc *xudc = ep->xudc;
1449 ep_pause(xudc, ep->index);
1450 ep_wait_for_inactive(xudc, ep->index);
1483 if (dma_mapping_error(xudc->dev, deq_ptr)) {
1488 ep_reload(xudc, ep->index);
1507 ep_unpause(xudc, ep->index);
1520 struct tegra_xudc *xudc;
1529 xudc = ep->xudc;
1531 spin_lock_irqsave(&xudc->lock, flags);
1533 if (xudc->powergated || !ep->desc) {
1540 spin_unlock_irqrestore(&xudc->lock, flags);
1547 struct tegra_xudc *xudc = ep->xudc;
1553 dev_err(xudc->dev, "can't halt isochronous EP\n");
1557 if (!!(xudc_readl(xudc, EP_HALT) & BIT(ep->index)) == halt) {
1558 dev_dbg(xudc->dev, "EP %u already %s\n", ep->index,
1564 ep_halt(xudc, ep->index);
1568 ep_reload(xudc, ep->index);
1576 ep_reload(xudc, ep->index);
1577 ep_unpause(xudc, ep->index);
1578 ep_unhalt(xudc, ep->index);
1589 struct tegra_xudc *xudc;
1597 xudc = ep->xudc;
1599 spin_lock_irqsave(&xudc->lock, flags);
1600 if (xudc->powergated) {
1607 dev_err(xudc->dev, "can't halt EP with requests pending\n");
1614 spin_unlock_irqrestore(&xudc->lock, flags);
1623 struct tegra_xudc *xudc = ep->xudc;
1628 if (xudc->gadget.speed == USB_SPEED_SUPER) {
1634 } else if ((xudc->gadget.speed < USB_SPEED_SUPER) &&
1637 if (xudc->gadget.speed == USB_SPEED_HIGH) {
1640 dev_warn(xudc->dev,
1652 if (xudc->gadget.speed == USB_SPEED_SUPER) {
1710 struct tegra_xudc *xudc = ep->xudc;
1713 dev_err(xudc->dev, "endpoint %u already disabled\n",
1720 ep_reload(xudc, ep->index);
1724 xudc->nr_enabled_eps--;
1726 xudc->nr_isoch_eps--;
1733 ep_unpause(xudc, ep->index);
1734 ep_unhalt(xudc, ep->index);
1735 if (xudc_readl(xudc, EP_STOPPED) & BIT(ep->index))
1736 xudc_writel(xudc, BIT(ep->index), EP_STOPPED);
1742 if ((xudc->device_state == USB_STATE_CONFIGURED) &&
1743 (xudc->nr_enabled_eps == 1)) {
1746 xudc->device_state = USB_STATE_ADDRESS;
1747 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
1749 val = xudc_readl(xudc, CTRL);
1751 xudc_writel(xudc, val, CTRL);
1754 dev_info(xudc->dev, "ep %u disabled\n", ep->index);
1762 struct tegra_xudc *xudc;
1770 xudc = ep->xudc;
1772 spin_lock_irqsave(&xudc->lock, flags);
1773 if (xudc->powergated) {
1780 spin_unlock_irqrestore(&xudc->lock, flags);
1788 struct tegra_xudc *xudc = ep->xudc;
1792 if (xudc->gadget.speed == USB_SPEED_SUPER &&
1804 if (xudc->nr_isoch_eps > XUDC_MAX_ISOCH_EPS) {
1805 dev_err(xudc->dev, "too many isochronous endpoints\n");
1808 xudc->nr_isoch_eps++;
1819 xudc->nr_enabled_eps++;
1834 if (xudc->device_state == USB_STATE_ADDRESS) {
1835 val = xudc_readl(xudc, CTRL);
1837 xudc_writel(xudc, val, CTRL);
1839 xudc->device_state = USB_STATE_CONFIGURED;
1840 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
1848 for (i = 0; i < ARRAY_SIZE(xudc->ep); i++) {
1849 if (xudc->ep[i].desc &&
1850 usb_endpoint_xfer_bulk(xudc->ep[i].desc))
1851 ep_pause(xudc, i);
1855 ep_reload(xudc, ep->index);
1856 ep_unpause(xudc, ep->index);
1857 ep_unhalt(xudc, ep->index);
1860 for (i = 0; i < ARRAY_SIZE(xudc->ep); i++) {
1861 if (xudc->ep[i].desc &&
1862 usb_endpoint_xfer_bulk(xudc->ep[i].desc))
1863 ep_unpause(xudc, i);
1868 dev_info(xudc->dev, "EP %u (type: %s, dir: %s) enabled\n", ep->index,
1879 struct tegra_xudc *xudc;
1887 xudc = ep->xudc;
1889 spin_lock_irqsave(&xudc->lock, flags);
1890 if (xudc->powergated) {
1897 spin_unlock_irqrestore(&xudc->lock, flags);
1957 struct tegra_xudc *xudc = to_xudc(gadget);
1961 spin_lock_irqsave(&xudc->lock, flags);
1962 if (xudc->powergated) {
1967 ret = (xudc_readl(xudc, MFINDEX) & MFINDEX_FRAME_MASK) >>
1970 spin_unlock_irqrestore(&xudc->lock, flags);
1975 static void tegra_xudc_resume_device_state(struct tegra_xudc *xudc)
1980 ep_unpause_all(xudc);
1983 val = xudc_readl(xudc, PORTSC);
1987 xudc_writel(xudc, val, PORTSC);
1990 if (xudc->device_state == USB_STATE_SUSPENDED) {
1991 xudc->device_state = xudc->resume_state;
1992 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
1993 xudc->resume_state = 0;
2001 for (i = 0; i < ARRAY_SIZE(xudc->ep); i++)
2002 tegra_xudc_ep_ring_doorbell(&xudc->ep[i]);
2007 struct tegra_xudc *xudc = to_xudc(gadget);
2012 spin_lock_irqsave(&xudc->lock, flags);
2014 if (xudc->powergated) {
2018 val = xudc_readl(xudc, PORTPM);
2019 dev_dbg(xudc->dev, "%s: PORTPM=%#x, speed=%x\n", __func__,
2022 if (((xudc->gadget.speed <= USB_SPEED_HIGH) &&
2024 ((xudc->gadget.speed == USB_SPEED_SUPER) &&
2026 tegra_xudc_resume_device_state(xudc);
2029 if (xudc->gadget.speed == USB_SPEED_SUPER) {
2032 xudc_writel(xudc, 0, DEVNOTIF_HI);
2033 xudc_writel(xudc, val, DEVNOTIF_LO);
2038 dev_dbg(xudc->dev, "%s: ret value is %d", __func__, ret);
2039 spin_unlock_irqrestore(&xudc->lock, flags);
2046 struct tegra_xudc *xudc = to_xudc(gadget);
2050 pm_runtime_get_sync(xudc->dev);
2052 spin_lock_irqsave(&xudc->lock, flags);
2054 if (is_on != xudc->pullup) {
2055 val = xudc_readl(xudc, CTRL);
2060 xudc_writel(xudc, val, CTRL);
2063 xudc->pullup = is_on;
2064 dev_dbg(xudc->dev, "%s: pullup:%d", __func__, is_on);
2066 spin_unlock_irqrestore(&xudc->lock, flags);
2068 pm_runtime_put(xudc->dev);
2076 struct tegra_xudc *xudc = to_xudc(gadget);
2085 pm_runtime_get_sync(xudc->dev);
2087 spin_lock_irqsave(&xudc->lock, flags);
2089 if (xudc->driver) {
2094 xudc->setup_state = WAIT_FOR_SETUP;
2095 xudc->device_state = USB_STATE_DEFAULT;
2096 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
2098 ret = __tegra_xudc_ep_enable(&xudc->ep[0], &tegra_xudc_ep0_desc);
2102 val = xudc_readl(xudc, CTRL);
2104 xudc_writel(xudc, val, CTRL);
2106 val = xudc_readl(xudc, PORTHALT);
2108 xudc_writel(xudc, val, PORTHALT);
2110 if (xudc->pullup) {
2111 val = xudc_readl(xudc, CTRL);
2113 xudc_writel(xudc, val, CTRL);
2116 for (i = 0; i < xudc->soc->num_phys; i++)
2117 if (xudc->usbphy[i])
2118 otg_set_peripheral(xudc->usbphy[i]->otg, gadget);
2120 xudc->driver = driver;
2122 dev_dbg(xudc->dev, "%s: ret value is %d", __func__, ret);
2123 spin_unlock_irqrestore(&xudc->lock, flags);
2125 pm_runtime_put(xudc->dev);
2132 struct tegra_xudc *xudc = to_xudc(gadget);
2137 pm_runtime_get_sync(xudc->dev);
2139 spin_lock_irqsave(&xudc->lock, flags);
2141 for (i = 0; i < xudc->soc->num_phys; i++)
2142 if (xudc->usbphy[i])
2143 otg_set_peripheral(xudc->usbphy[i]->otg, NULL);
2145 val = xudc_readl(xudc, CTRL);
2147 xudc_writel(xudc, val, CTRL);
2149 __tegra_xudc_ep_disable(&xudc->ep[0]);
2151 xudc->driver = NULL;
2152 dev_dbg(xudc->dev, "Gadget stopped");
2154 spin_unlock_irqrestore(&xudc->lock, flags);
2156 pm_runtime_put(xudc->dev);
2164 struct tegra_xudc *xudc = to_xudc(gadget);
2166 dev_dbg(xudc->dev, "%s: %u mA\n", __func__, m_a);
2168 if (xudc->curr_usbphy && xudc->curr_usbphy->chg_type == SDP_TYPE)
2169 return usb_phy_set_power(xudc->curr_usbphy, m_a);
2176 struct tegra_xudc *xudc = to_xudc(gadget);
2178 dev_dbg(xudc->dev, "%s: %d\n", __func__, is_on);
2179 xudc->selfpowered = !!is_on;
2199 tegra_xudc_ep0_queue_status(struct tegra_xudc *xudc,
2202 xudc->ep0_req->usb_req.buf = NULL;
2203 xudc->ep0_req->usb_req.dma = 0;
2204 xudc->ep0_req->usb_req.length = 0;
2205 xudc->ep0_req->usb_req.complete = cmpl;
2206 xudc->ep0_req->usb_req.context = xudc;
2208 return __tegra_xudc_ep_queue(&xudc->ep[0], xudc->ep0_req);
2212 tegra_xudc_ep0_queue_data(struct tegra_xudc *xudc, void *buf, size_t len,
2215 xudc->ep0_req->usb_req.buf = buf;
2216 xudc->ep0_req->usb_req.length = len;
2217 xudc->ep0_req->usb_req.complete = cmpl;
2218 xudc->ep0_req->usb_req.context = xudc;
2220 return __tegra_xudc_ep_queue(&xudc->ep[0], xudc->ep0_req);
2223 static void tegra_xudc_ep0_req_done(struct tegra_xudc *xudc)
2225 switch (xudc->setup_state) {
2227 xudc->setup_state = STATUS_STAGE_RECV;
2228 tegra_xudc_ep0_queue_status(xudc, no_op_complete);
2231 xudc->setup_state = STATUS_STAGE_XFER;
2232 tegra_xudc_ep0_queue_status(xudc, no_op_complete);
2235 xudc->setup_state = WAIT_FOR_SETUP;
2240 static int tegra_xudc_ep0_delegate_req(struct tegra_xudc *xudc,
2245 spin_unlock(&xudc->lock);
2246 ret = xudc->driver->setup(&xudc->gadget, ctrl);
2247 spin_lock(&xudc->lock);
2254 struct tegra_xudc *xudc = req->context;
2256 if (xudc->test_mode_pattern) {
2257 xudc_writel(xudc, xudc->test_mode_pattern, PORT_TM);
2258 xudc->test_mode_pattern = 0;
2262 static int tegra_xudc_ep0_set_feature(struct tegra_xudc *xudc,
2278 if ((xudc->gadget.speed == USB_SPEED_SUPER) ||
2279 (xudc->device_state == USB_STATE_DEFAULT))
2282 val = xudc_readl(xudc, PORTPM);
2288 xudc_writel(xudc, val, PORTPM);
2292 if ((xudc->device_state != USB_STATE_CONFIGURED) ||
2293 (xudc->gadget.speed != USB_SPEED_SUPER))
2296 val = xudc_readl(xudc, PORTPM);
2298 xudc->soc->u1_enable) {
2306 xudc->soc->u2_enable) {
2313 xudc_writel(xudc, val, PORTPM);
2316 if (xudc->gadget.speed != USB_SPEED_HIGH)
2322 xudc->test_mode_pattern = index >> 8;
2330 if (xudc->device_state != USB_STATE_CONFIGURED)
2336 val = xudc_readl(xudc, PORTPM);
2343 xudc_writel(xudc, val, PORTPM);
2346 return tegra_xudc_ep0_delegate_req(xudc, ctrl);
2356 if ((xudc->device_state == USB_STATE_DEFAULT) ||
2357 ((xudc->device_state == USB_STATE_ADDRESS) &&
2361 ret = __tegra_xudc_ep_set_halt(&xudc->ep[ep], set);
2369 return tegra_xudc_ep0_queue_status(xudc, set_feature_complete);
2372 static int tegra_xudc_ep0_get_status(struct tegra_xudc *xudc,
2388 val = xudc_readl(xudc, PORTPM);
2390 if (xudc->selfpowered)
2393 if ((xudc->gadget.speed < USB_SPEED_SUPER) &&
2397 if (xudc->gadget.speed == USB_SPEED_SUPER) {
2405 if (xudc->gadget.speed == USB_SPEED_SUPER) {
2407 val = xudc_readl(xudc, PORTPM);
2415 ep_ctx = &xudc->ep_context[ep];
2417 if ((xudc->device_state != USB_STATE_CONFIGURED) &&
2418 ((xudc->device_state != USB_STATE_ADDRESS) || (ep != 0)))
2424 if (xudc_readl(xudc, EP_HALT) & BIT(ep))
2431 xudc->status_buf = cpu_to_le16(status);
2432 return tegra_xudc_ep0_queue_data(xudc, &xudc->status_buf,
2433 sizeof(xudc->status_buf),
2442 static int tegra_xudc_ep0_set_sel(struct tegra_xudc *xudc,
2449 if (xudc->device_state == USB_STATE_DEFAULT)
2457 return tegra_xudc_ep0_queue_data(xudc, &xudc->sel_timing,
2458 sizeof(xudc->sel_timing),
2467 static int tegra_xudc_ep0_set_isoch_delay(struct tegra_xudc *xudc,
2480 xudc->isoch_delay = delay;
2482 return tegra_xudc_ep0_queue_status(xudc, set_isoch_delay_complete);
2487 struct tegra_xudc *xudc = req->context;
2489 if ((xudc->device_state == USB_STATE_DEFAULT) &&
2490 (xudc->dev_addr != 0)) {
2491 xudc->device_state = USB_STATE_ADDRESS;
2492 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
2493 } else if ((xudc->device_state == USB_STATE_ADDRESS) &&
2494 (xudc->dev_addr == 0)) {
2495 xudc->device_state = USB_STATE_DEFAULT;
2496 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
2500 static int tegra_xudc_ep0_set_address(struct tegra_xudc *xudc,
2503 struct tegra_xudc_ep *ep0 = &xudc->ep[0];
2514 if (xudc->device_state == USB_STATE_CONFIGURED)
2517 dev_dbg(xudc->dev, "set address: %u\n", addr);
2519 xudc->dev_addr = addr;
2520 val = xudc_readl(xudc, CTRL);
2523 xudc_writel(xudc, val, CTRL);
2527 return tegra_xudc_ep0_queue_status(xudc, set_address_complete);
2530 static int tegra_xudc_ep0_standard_req(struct tegra_xudc *xudc,
2537 dev_dbg(xudc->dev, "USB_REQ_GET_STATUS\n");
2538 ret = tegra_xudc_ep0_get_status(xudc, ctrl);
2541 dev_dbg(xudc->dev, "USB_REQ_SET_ADDRESS\n");
2542 ret = tegra_xudc_ep0_set_address(xudc, ctrl);
2545 dev_dbg(xudc->dev, "USB_REQ_SET_SEL\n");
2546 ret = tegra_xudc_ep0_set_sel(xudc, ctrl);
2549 dev_dbg(xudc->dev, "USB_REQ_SET_ISOCH_DELAY\n");
2550 ret = tegra_xudc_ep0_set_isoch_delay(xudc, ctrl);
2554 dev_dbg(xudc->dev, "USB_REQ_CLEAR/SET_FEATURE\n");
2555 ret = tegra_xudc_ep0_set_feature(xudc, ctrl);
2558 dev_dbg(xudc->dev, "USB_REQ_SET_CONFIGURATION\n");
2566 ret = tegra_xudc_ep0_delegate_req(xudc, ctrl);
2573 static void tegra_xudc_handle_ep0_setup_packet(struct tegra_xudc *xudc,
2579 xudc->setup_seq_num = seq_num;
2582 ep_unhalt(xudc, 0);
2588 if (xudc->soc->invalid_seq_num &&
2590 dev_warn(xudc->dev, "invalid sequence number detected\n");
2591 ep_halt(xudc, 0);
2596 xudc->setup_state = (ctrl->bRequestType & USB_DIR_IN) ?
2599 xudc->setup_state = STATUS_STAGE_XFER;
2602 ret = tegra_xudc_ep0_standard_req(xudc, ctrl);
2604 ret = tegra_xudc_ep0_delegate_req(xudc, ctrl);
2607 dev_warn(xudc->dev, "setup request failed: %d\n", ret);
2608 xudc->setup_state = WAIT_FOR_SETUP;
2609 ep_halt(xudc, 0);
2613 static void tegra_xudc_handle_ep0_event(struct tegra_xudc *xudc,
2619 if (xudc->setup_state != WAIT_FOR_SETUP) {
2626 memcpy(&xudc->setup_packet.ctrl_req, ctrl, sizeof(*ctrl));
2627 xudc->setup_packet.seq_num = seq_num;
2628 xudc->queued_setup_packet = true;
2630 tegra_xudc_handle_ep0_setup_packet(xudc, ctrl, seq_num);
2650 static void tegra_xudc_handle_transfer_completion(struct tegra_xudc *xudc,
2676 dev_dbg(xudc->dev, "bytes transferred %u / %u\n",
2682 tegra_xudc_ep0_req_done(xudc);
2694 dev_warn(xudc->dev, "transfer event on dequeued request\n");
2701 static void tegra_xudc_handle_transfer_event(struct tegra_xudc *xudc,
2705 struct tegra_xudc_ep *ep = &xudc->ep[ep_index];
2710 dev_warn(xudc->dev, "transfer event on disabled EP %u\n",
2729 tegra_xudc_handle_transfer_completion(xudc, ep, event);
2732 dev_info(xudc->dev, "stream rejected on EP %u\n", ep_index);
2737 dev_info(xudc->dev, "prime pipe received on EP %u\n", ep_index);
2746 ep_wait_for_stopped(xudc, ep_index);
2755 ep_wait_for_stopped(xudc, ep_index);
2767 dev_err(xudc->dev, "completion error %#x on EP %u\n",
2770 ep_halt(xudc, ep_index);
2773 dev_info(xudc->dev, "sequence number error\n");
2780 xudc->setup_state = WAIT_FOR_SETUP;
2781 if (!xudc->queued_setup_packet)
2784 tegra_xudc_handle_ep0_setup_packet(xudc,
2785 &xudc->setup_packet.ctrl_req,
2786 xudc->setup_packet.seq_num);
2787 xudc->queued_setup_packet = false;
2790 dev_dbg(xudc->dev, "stop completion code on EP %u\n",
2797 dev_dbg(xudc->dev, "completion event %#x on EP %u\n",
2803 static void tegra_xudc_reset(struct tegra_xudc *xudc)
2805 struct tegra_xudc_ep *ep0 = &xudc->ep[0];
2809 xudc->setup_state = WAIT_FOR_SETUP;
2810 xudc->device_state = USB_STATE_DEFAULT;
2811 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
2813 ep_unpause_all(xudc);
2815 for (i = 0; i < ARRAY_SIZE(xudc->ep); i++)
2816 tegra_xudc_ep_nuke(&xudc->ep[i], -ESHUTDOWN);
2825 xudc->setup_seq_num = 0;
2826 xudc->queued_setup_packet = false;
2835 if (!dma_mapping_error(xudc->dev, deq_ptr)) {
2840 ep_unhalt_all(xudc);
2841 ep_reload(xudc, 0);
2842 ep_unpause(xudc, 0);
2845 static void tegra_xudc_port_connect(struct tegra_xudc *xudc)
2847 struct tegra_xudc_ep *ep0 = &xudc->ep[0];
2851 val = (xudc_readl(xudc, PORTSC) & PORTSC_PS_MASK) >> PORTSC_PS_SHIFT;
2854 xudc->gadget.speed = USB_SPEED_LOW;
2857 xudc->gadget.speed = USB_SPEED_FULL;
2860 xudc->gadget.speed = USB_SPEED_HIGH;
2863 xudc->gadget.speed = USB_SPEED_SUPER;
2866 xudc->gadget.speed = USB_SPEED_UNKNOWN;
2870 xudc->device_state = USB_STATE_DEFAULT;
2871 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
2873 xudc->setup_state = WAIT_FOR_SETUP;
2875 if (xudc->gadget.speed == USB_SPEED_SUPER)
2884 if (!xudc->soc->u1_enable) {
2885 val = xudc_readl(xudc, PORTPM);
2887 xudc_writel(xudc, val, PORTPM);
2890 if (!xudc->soc->u2_enable) {
2891 val = xudc_readl(xudc, PORTPM);
2893 xudc_writel(xudc, val, PORTPM);
2896 if (xudc->gadget.speed <= USB_SPEED_HIGH) {
2897 val = xudc_readl(xudc, PORTPM);
2899 if (xudc->soc->lpm_enable)
2903 xudc_writel(xudc, val, PORTPM);
2906 val = xudc_readl(xudc, ST);
2908 xudc_writel(xudc, ST_RC, ST);
2911 static void tegra_xudc_port_disconnect(struct tegra_xudc *xudc)
2913 tegra_xudc_reset(xudc);
2915 if (xudc->driver && xudc->driver->disconnect) {
2916 spin_unlock(&xudc->lock);
2917 xudc->driver->disconnect(&xudc->gadget);
2918 spin_lock(&xudc->lock);
2921 xudc->device_state = USB_STATE_NOTATTACHED;
2922 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
2924 complete(&xudc->disconnect_complete);
2927 static void tegra_xudc_port_reset(struct tegra_xudc *xudc)
2929 tegra_xudc_reset(xudc);
2931 if (xudc->driver) {
2932 spin_unlock(&xudc->lock);
2933 usb_gadget_udc_reset(&xudc->gadget, xudc->driver);
2934 spin_lock(&xudc->lock);
2937 tegra_xudc_port_connect(xudc);
2940 static void tegra_xudc_port_suspend(struct tegra_xudc *xudc)
2942 dev_dbg(xudc->dev, "port suspend\n");
2944 xudc->resume_state = xudc->device_state;
2945 xudc->device_state = USB_STATE_SUSPENDED;
2946 usb_gadget_set_state(&xudc->gadget, xudc->device_state);
2948 if (xudc->driver->suspend) {
2949 spin_unlock(&xudc->lock);
2950 xudc->driver->suspend(&xudc->gadget);
2951 spin_lock(&xudc->lock);
2955 static void tegra_xudc_port_resume(struct tegra_xudc *xudc)
2957 dev_dbg(xudc->dev, "port resume\n");
2959 tegra_xudc_resume_device_state(xudc);
2961 if (xudc->driver->resume) {
2962 spin_unlock(&xudc->lock);
2963 xudc->driver->resume(&xudc->gadget);
2964 spin_lock(&xudc->lock);
2968 static inline void clear_port_change(struct tegra_xudc *xudc, u32 flag)
2972 val = xudc_readl(xudc, PORTSC);
2975 xudc_writel(xudc, val, PORTSC);
2978 static void __tegra_xudc_handle_port_status(struct tegra_xudc *xudc)
2982 porthalt = xudc_readl(xudc, PORTHALT);
2985 dev_dbg(xudc->dev, "STCHG_REQ, PORTHALT = %#x\n", porthalt);
2987 xudc_writel(xudc, porthalt, PORTHALT);
2990 portsc = xudc_readl(xudc, PORTSC);
2992 dev_dbg(xudc->dev, "PRC, PR, PORTSC = %#x\n", portsc);
2993 clear_port_change(xudc, PORTSC_PRC | PORTSC_PED);
2995 if (xudc->soc->port_reset_quirk) {
2996 schedule_delayed_work(&xudc->port_reset_war_work,
2998 xudc->wait_for_sec_prc = 1;
3003 dev_dbg(xudc->dev, "PRC, Not PR, PORTSC = %#x\n", portsc);
3004 clear_port_change(xudc, PORTSC_PRC | PORTSC_PED);
3005 tegra_xudc_port_reset(xudc);
3006 cancel_delayed_work(&xudc->port_reset_war_work);
3007 xudc->wait_for_sec_prc = 0;
3010 portsc = xudc_readl(xudc, PORTSC);
3012 dev_dbg(xudc->dev, "WRC, PORTSC = %#x\n", portsc);
3013 clear_port_change(xudc, PORTSC_WRC | PORTSC_PED);
3014 if (!(xudc_readl(xudc, PORTSC) & PORTSC_WPR))
3015 tegra_xudc_port_reset(xudc);
3018 portsc = xudc_readl(xudc, PORTSC);
3020 dev_dbg(xudc->dev, "CSC, PORTSC = %#x\n", portsc);
3021 clear_port_change(xudc, PORTSC_CSC);
3024 tegra_xudc_port_connect(xudc);
3026 tegra_xudc_port_disconnect(xudc);
3028 if (xudc->wait_csc) {
3029 cancel_delayed_work(&xudc->plc_reset_work);
3030 xudc->wait_csc = false;
3034 portsc = xudc_readl(xudc, PORTSC);
3038 dev_dbg(xudc->dev, "PLC, PORTSC = %#x\n", portsc);
3039 clear_port_change(xudc, PORTSC_PLC);
3042 tegra_xudc_port_suspend(xudc);
3045 if (xudc->gadget.speed < USB_SPEED_SUPER)
3046 tegra_xudc_port_resume(xudc);
3049 if (xudc->gadget.speed == USB_SPEED_SUPER)
3050 tegra_xudc_port_resume(xudc);
3053 schedule_delayed_work(&xudc->plc_reset_work,
3055 xudc->wait_csc = true;
3063 dev_warn(xudc->dev, "CEC, PORTSC = %#x\n", portsc);
3064 clear_port_change(xudc, PORTSC_CEC);
3067 dev_dbg(xudc->dev, "PORTSC = %#x\n", xudc_readl(xudc, PORTSC));
3070 static void tegra_xudc_handle_port_status(struct tegra_xudc *xudc)
3072 while ((xudc_readl(xudc, PORTSC) & PORTSC_CHANGE_MASK) ||
3073 (xudc_readl(xudc, PORTHALT) & PORTHALT_STCHG_REQ))
3074 __tegra_xudc_handle_port_status(xudc);
3077 static void tegra_xudc_handle_event(struct tegra_xudc *xudc,
3082 dump_trb(xudc, "EVENT", event);
3086 tegra_xudc_handle_port_status(xudc);
3089 tegra_xudc_handle_transfer_event(xudc, event);
3092 tegra_xudc_handle_ep0_event(xudc, event);
3095 dev_info(xudc->dev, "Unrecognized TRB type = %#x\n", type);
3100 static void tegra_xudc_process_event_ring(struct tegra_xudc *xudc)
3106 event = xudc->event_ring[xudc->event_ring_index] +
3107 xudc->event_ring_deq_ptr;
3109 if (trb_read_cycle(event) != xudc->ccs)
3112 tegra_xudc_handle_event(xudc, event);
3114 xudc->event_ring_deq_ptr++;
3115 if (xudc->event_ring_deq_ptr == XUDC_EVENT_RING_SIZE) {
3116 xudc->event_ring_deq_ptr = 0;
3117 xudc->event_ring_index++;
3120 if (xudc->event_ring_index == XUDC_NR_EVENT_RINGS) {
3121 xudc->event_ring_index = 0;
3122 xudc->ccs = !xudc->ccs;
3126 erdp = xudc->event_ring_phys[xudc->event_ring_index] +
3127 xudc->event_ring_deq_ptr * sizeof(*event);
3129 xudc_writel(xudc, upper_32_bits(erdp), ERDPHI);
3130 xudc_writel(xudc, lower_32_bits(erdp) | ERDPLO_EHB, ERDPLO);
3135 struct tegra_xudc *xudc = data;
3139 val = xudc_readl(xudc, ST);
3142 xudc_writel(xudc, ST_IP, ST);
3144 spin_lock_irqsave(&xudc->lock, flags);
3145 tegra_xudc_process_event_ring(xudc);
3146 spin_unlock_irqrestore(&xudc->lock, flags);
3151 static int tegra_xudc_alloc_ep(struct tegra_xudc *xudc, unsigned int index)
3153 struct tegra_xudc_ep *ep = &xudc->ep[index];
3155 ep->xudc = xudc;
3157 ep->context = &xudc->ep_context[index];
3167 ep->transfer_ring = dma_pool_alloc(xudc->transfer_ring_pool,
3186 list_add_tail(&ep->usb_ep.ep_list, &xudc->gadget.ep_list);
3200 static void tegra_xudc_free_ep(struct tegra_xudc *xudc, unsigned int index)
3202 struct tegra_xudc_ep *ep = &xudc->ep[index];
3211 dma_pool_free(xudc->transfer_ring_pool, ep->transfer_ring,
3215 static int tegra_xudc_alloc_eps(struct tegra_xudc *xudc)
3221 xudc->ep_context =
3222 dma_alloc_coherent(xudc->dev, XUDC_NR_EPS *
3223 sizeof(*xudc->ep_context),
3224 &xudc->ep_context_phys, GFP_KERNEL);
3225 if (!xudc->ep_context)
3228 xudc->transfer_ring_pool =
3229 dmam_pool_create(dev_name(xudc->dev), xudc->dev,
3233 if (!xudc->transfer_ring_pool) {
3238 INIT_LIST_HEAD(&xudc->gadget.ep_list);
3239 for (i = 0; i < ARRAY_SIZE(xudc->ep); i++) {
3240 err = tegra_xudc_alloc_ep(xudc, i);
3245 req = tegra_xudc_ep_alloc_request(&xudc->ep[0].usb_ep, GFP_KERNEL);
3250 xudc->ep0_req = to_xudc_req(req);
3256 tegra_xudc_free_ep(xudc, i - 1);
3258 dma_free_coherent(xudc->dev, XUDC_NR_EPS * sizeof(*xudc->ep_context),
3259 xudc->ep_context, xudc->ep_context_phys);
3263 static void tegra_xudc_init_eps(struct tegra_xudc *xudc)
3265 xudc_writel(xudc, lower_32_bits(xudc->ep_context_phys), ECPLO);
3266 xudc_writel(xudc, upper_32_bits(xudc->ep_context_phys), ECPHI);
3269 static void tegra_xudc_free_eps(struct tegra_xudc *xudc)
3273 tegra_xudc_ep_free_request(&xudc->ep[0].usb_ep,
3274 &xudc->ep0_req->usb_req);
3276 for (i = 0; i < ARRAY_SIZE(xudc->ep); i++)
3277 tegra_xudc_free_ep(xudc, i);
3279 dma_free_coherent(xudc->dev, XUDC_NR_EPS * sizeof(*xudc->ep_context),
3280 xudc->ep_context, xudc->ep_context_phys);
3283 static int tegra_xudc_alloc_event_ring(struct tegra_xudc *xudc)
3287 for (i = 0; i < ARRAY_SIZE(xudc->event_ring); i++) {
3288 xudc->event_ring[i] =
3289 dma_alloc_coherent(xudc->dev, XUDC_EVENT_RING_SIZE *
3290 sizeof(*xudc->event_ring[i]),
3291 &xudc->event_ring_phys[i],
3293 if (!xudc->event_ring[i])
3301 dma_free_coherent(xudc->dev, XUDC_EVENT_RING_SIZE *
3302 sizeof(*xudc->event_ring[i - 1]),
3303 xudc->event_ring[i - 1],
3304 xudc->event_ring_phys[i - 1]);
3309 static void tegra_xudc_init_event_ring(struct tegra_xudc *xudc)
3314 for (i = 0; i < ARRAY_SIZE(xudc->event_ring); i++) {
3315 memset(xudc->event_ring[i], 0, XUDC_EVENT_RING_SIZE *
3316 sizeof(*xudc->event_ring[i]));
3318 val = xudc_readl(xudc, ERSTSZ);
3321 xudc_writel(xudc, val, ERSTSZ);
3323 xudc_writel(xudc, lower_32_bits(xudc->event_ring_phys[i]),
3325 xudc_writel(xudc, upper_32_bits(xudc->event_ring_phys[i]),
3329 val = lower_32_bits(xudc->event_ring_phys[0]);
3330 xudc_writel(xudc, val, ERDPLO);
3332 xudc_writel(xudc, val, EREPLO);
3334 val = upper_32_bits(xudc->event_ring_phys[0]);
3335 xudc_writel(xudc, val, ERDPHI);
3336 xudc_writel(xudc, val, EREPHI);
3338 xudc->ccs = true;
3339 xudc->event_ring_index = 0;
3340 xudc->event_ring_deq_ptr = 0;
3343 static void tegra_xudc_free_event_ring(struct tegra_xudc *xudc)
3347 for (i = 0; i < ARRAY_SIZE(xudc->event_ring); i++) {
3348 dma_free_coherent(xudc->dev, XUDC_EVENT_RING_SIZE *
3349 sizeof(*xudc->event_ring[i]),
3350 xudc->event_ring[i],
3351 xudc->event_ring_phys[i]);
3355 static void tegra_xudc_fpci_ipfs_init(struct tegra_xudc *xudc)
3359 if (xudc->soc->has_ipfs) {
3360 val = ipfs_readl(xudc, XUSB_DEV_CONFIGURATION_0);
3362 ipfs_writel(xudc, val, XUSB_DEV_CONFIGURATION_0);
3369 fpci_writel(xudc, val, XUSB_DEV_CFG_1);
3372 val = fpci_readl(xudc, XUSB_DEV_CFG_4);
3374 val |= xudc->phys_base & (XUSB_DEV_CFG_4_BASE_ADDR_MASK);
3376 fpci_writel(xudc, val, XUSB_DEV_CFG_4);
3377 fpci_writel(xudc, upper_32_bits(xudc->phys_base), XUSB_DEV_CFG_5);
3381 if (xudc->soc->has_ipfs) {
3383 val = ipfs_readl(xudc, XUSB_DEV_INTR_MASK_0);
3385 ipfs_writel(xudc, val, XUSB_DEV_INTR_MASK_0);
3389 static void tegra_xudc_device_params_init(struct tegra_xudc *xudc)
3393 if (xudc->soc->has_ipfs) {
3394 val = xudc_readl(xudc, BLCG);
3402 xudc_writel(xudc, val, BLCG);
3405 if (xudc->soc->port_speed_quirk)
3406 tegra_xudc_limit_port_speed(xudc);
3409 val = xudc_readl(xudc, SSPX_CORE_PADCTL4);
3412 xudc_writel(xudc, val, SSPX_CORE_PADCTL4);
3415 val = xudc_readl(xudc, SSPX_CORE_CNT0);
3418 xudc_writel(xudc, val, SSPX_CORE_CNT0);
3421 val = xudc_readl(xudc, SSPX_CORE_CNT30);
3424 xudc_writel(xudc, val, SSPX_CORE_CNT30);
3426 if (xudc->soc->lpm_enable) {
3428 val = xudc_readl(xudc, HSFSPI_COUNT13);
3431 xudc_writel(xudc, val, HSFSPI_COUNT13);
3438 val = xudc_readl(xudc, SSPX_CORE_CNT32);
3441 xudc_writel(xudc, val, SSPX_CORE_CNT32);
3444 val = xudc_readl(xudc, CFG_DEV_FE);
3447 xudc_writel(xudc, val, CFG_DEV_FE);
3449 val = xudc_readl(xudc, PORTSC);
3452 xudc_writel(xudc, val, PORTSC);
3455 val = xudc_readl(xudc, CFG_DEV_FE);
3458 xudc_writel(xudc, val, CFG_DEV_FE);
3460 val = xudc_readl(xudc, PORTSC);
3463 xudc_writel(xudc, val, PORTSC);
3466 val = xudc_readl(xudc, CFG_DEV_FE);
3468 xudc_writel(xudc, val, CFG_DEV_FE);
3474 val = xudc_readl(xudc, CFG_DEV_FE);
3476 xudc_writel(xudc, val, CFG_DEV_FE);
3480 val = xudc_readl(xudc, RT_IMOD);
3483 xudc_writel(xudc, val, RT_IMOD);
3486 val = xudc_readl(xudc, CFG_DEV_SSPI_XFER);
3489 xudc_writel(xudc, val, CFG_DEV_SSPI_XFER);
3492 static int tegra_xudc_phy_get(struct tegra_xudc *xudc)
3497 xudc->utmi_phy = devm_kcalloc(xudc->dev, xudc->soc->num_phys,
3498 sizeof(*xudc->utmi_phy), GFP_KERNEL);
3499 if (!xudc->utmi_phy)
3502 xudc->usb3_phy = devm_kcalloc(xudc->dev, xudc->soc->num_phys,
3503 sizeof(*xudc->usb3_phy), GFP_KERNEL);
3504 if (!xudc->usb3_phy)
3507 xudc->usbphy = devm_kcalloc(xudc->dev, xudc->soc->num_phys,
3508 sizeof(*xudc->usbphy), GFP_KERNEL);
3509 if (!xudc->usbphy)
3512 xudc->vbus_nb.notifier_call = tegra_xudc_vbus_notify;
3514 for (i = 0; i < xudc->soc->num_phys; i++) {
3519 xudc->utmi_phy[i] = devm_phy_optional_get(xudc->dev, phy_name);
3520 if (IS_ERR(xudc->utmi_phy[i])) {
3521 err = PTR_ERR(xudc->utmi_phy[i]);
3522 dev_err_probe(xudc->dev, err,
3525 } else if (xudc->utmi_phy[i]) {
3527 xudc->usbphy[i] = devm_usb_get_phy_by_node(xudc->dev,
3528 xudc->utmi_phy[i]->dev.of_node,
3530 if (IS_ERR(xudc->usbphy[i])) {
3531 err = PTR_ERR(xudc->usbphy[i]);
3532 dev_err_probe(xudc->dev, err,
3536 } else if (!xudc->utmi_phy[i]) {
3542 usb3_companion_port = tegra_xusb_padctl_get_usb3_companion(xudc->padctl, i);
3546 for (j = 0; j < xudc->soc->num_phys; j++) {
3548 xudc->usb3_phy[i] = devm_phy_optional_get(xudc->dev, phy_name);
3549 if (IS_ERR(xudc->usb3_phy[i])) {
3550 err = PTR_ERR(xudc->usb3_phy[i]);
3551 dev_err_probe(xudc->dev, err,
3554 } else if (xudc->usb3_phy[i]) {
3556 tegra_xusb_padctl_get_port_number(xudc->utmi_phy[i]);
3558 tegra_xusb_padctl_get_port_number(xudc->usb3_phy[i]);
3560 dev_dbg(xudc->dev, "USB2 port %d is paired with USB3 port %d for device mode port %d\n",
3571 for (i = 0; i < xudc->soc->num_phys; i++) {
3572 xudc->usb3_phy[i] = NULL;
3573 xudc->utmi_phy[i] = NULL;
3574 xudc->usbphy[i] = NULL;
3580 static void tegra_xudc_phy_exit(struct tegra_xudc *xudc)
3584 for (i = 0; i < xudc->soc->num_phys; i++) {
3585 phy_exit(xudc->usb3_phy[i]);
3586 phy_exit(xudc->utmi_phy[i]);
3590 static int tegra_xudc_phy_init(struct tegra_xudc *xudc)
3595 for (i = 0; i < xudc->soc->num_phys; i++) {
3596 err = phy_init(xudc->utmi_phy[i]);
3598 dev_err(xudc->dev, "UTMI PHY #%u initialization failed: %d\n", i, err);
3602 err = phy_init(xudc->usb3_phy[i]);
3604 dev_err(xudc->dev, "USB3 PHY #%u initialization failed: %d\n", i, err);
3611 tegra_xudc_phy_exit(xudc);
3694 .compatible = "nvidia,tegra210-xudc",
3698 .compatible = "nvidia,tegra186-xudc",
3702 .compatible = "nvidia,tegra194-xudc",
3706 .compatible = "nvidia,tegra234-xudc",
3713 static void tegra_xudc_powerdomain_remove(struct tegra_xudc *xudc)
3715 if (xudc->genpd_dl_ss)
3716 device_link_del(xudc->genpd_dl_ss);
3717 if (xudc->genpd_dl_device)
3718 device_link_del(xudc->genpd_dl_device);
3719 if (xudc->genpd_dev_ss)
3720 dev_pm_domain_detach(xudc->genpd_dev_ss, true);
3721 if (xudc->genpd_dev_device)
3722 dev_pm_domain_detach(xudc->genpd_dev_device, true);
3725 static int tegra_xudc_powerdomain_init(struct tegra_xudc *xudc)
3727 struct device *dev = xudc->dev;
3730 xudc->genpd_dev_device = dev_pm_domain_attach_by_name(dev, "dev");
3731 if (IS_ERR(xudc->genpd_dev_device)) {
3732 err = PTR_ERR(xudc->genpd_dev_device);
3737 xudc->genpd_dev_ss = dev_pm_domain_attach_by_name(dev, "ss");
3738 if (IS_ERR(xudc->genpd_dev_ss)) {
3739 err = PTR_ERR(xudc->genpd_dev_ss);
3744 xudc->genpd_dl_device = device_link_add(dev, xudc->genpd_dev_device,
3747 if (!xudc->genpd_dl_device) {
3752 xudc->genpd_dl_ss = device_link_add(dev, xudc->genpd_dev_ss,
3755 if (!xudc->genpd_dl_ss) {
3765 struct tegra_xudc *xudc;
3770 xudc = devm_kzalloc(&pdev->dev, sizeof(*xudc), GFP_KERNEL);
3771 if (!xudc)
3774 xudc->dev = &pdev->dev;
3775 platform_set_drvdata(pdev, xudc);
3777 xudc->soc = of_device_get_match_data(&pdev->dev);
3778 if (!xudc->soc)
3782 xudc->base = devm_ioremap_resource(&pdev->dev, res);
3783 if (IS_ERR(xudc->base))
3784 return PTR_ERR(xudc->base);
3785 xudc->phys_base = res->start;
3787 xudc->fpci = devm_platform_ioremap_resource_byname(pdev, "fpci");
3788 if (IS_ERR(xudc->fpci))
3789 return PTR_ERR(xudc->fpci);
3791 if (xudc->soc->has_ipfs) {
3792 xudc->ipfs = devm_platform_ioremap_resource_byname(pdev, "ipfs");
3793 if (IS_ERR(xudc->ipfs))
3794 return PTR_ERR(xudc->ipfs);
3797 xudc->irq = platform_get_irq(pdev, 0);
3798 if (xudc->irq < 0)
3799 return xudc->irq;
3801 err = devm_request_irq(&pdev->dev, xudc->irq, tegra_xudc_irq, 0,
3802 dev_name(&pdev->dev), xudc);
3804 dev_err(xudc->dev, "failed to claim IRQ#%u: %d\n", xudc->irq,
3809 xudc->clks = devm_kcalloc(&pdev->dev, xudc->soc->num_clks, sizeof(*xudc->clks),
3811 if (!xudc->clks)
3814 for (i = 0; i < xudc->soc->num_clks; i++)
3815 xudc->clks[i].id = xudc->soc->clock_names[i];
3817 err = devm_clk_bulk_get(&pdev->dev, xudc->soc->num_clks, xudc->clks);
3819 dev_err_probe(xudc->dev, err, "failed to request clocks\n");
3823 xudc->supplies = devm_kcalloc(&pdev->dev, xudc->soc->num_supplies,
3824 sizeof(*xudc->supplies), GFP_KERNEL);
3825 if (!xudc->supplies)
3828 for (i = 0; i < xudc->soc->num_supplies; i++)
3829 xudc->supplies[i].supply = xudc->soc->supply_names[i];
3831 err = devm_regulator_bulk_get(&pdev->dev, xudc->soc->num_supplies,
3832 xudc->supplies);
3834 dev_err_probe(xudc->dev, err, "failed to request regulators\n");
3838 xudc->padctl = tegra_xusb_padctl_get(&pdev->dev);
3839 if (IS_ERR(xudc->padctl))
3840 return PTR_ERR(xudc->padctl);
3842 err = regulator_bulk_enable(xudc->soc->num_supplies, xudc->supplies);
3844 dev_err(xudc->dev, "failed to enable regulators: %d\n", err);
3848 err = tegra_xudc_phy_get(xudc);
3852 err = tegra_xudc_powerdomain_init(xudc);
3856 err = tegra_xudc_phy_init(xudc);
3860 err = tegra_xudc_alloc_event_ring(xudc);
3864 err = tegra_xudc_alloc_eps(xudc);
3868 spin_lock_init(&xudc->lock);
3870 init_completion(&xudc->disconnect_complete);
3872 INIT_WORK(&xudc->usb_role_sw_work, tegra_xudc_usb_role_sw_work);
3874 INIT_DELAYED_WORK(&xudc->plc_reset_work, tegra_xudc_plc_reset_work);
3876 INIT_DELAYED_WORK(&xudc->port_reset_war_work,
3881 xudc->gadget.ops = &tegra_xudc_gadget_ops;
3882 xudc->gadget.ep0 = &xudc->ep[0].usb_ep;
3883 xudc->gadget.name = "tegra-xudc";
3884 xudc->gadget.max_speed = USB_SPEED_SUPER;
3886 err = usb_add_gadget_udc(&pdev->dev, &xudc->gadget);
3892 for (i = 0; i < xudc->soc->num_phys; i++) {
3893 if (!xudc->usbphy[i])
3896 usb_register_notifier(xudc->usbphy[i], &xudc->vbus_nb);
3897 tegra_xudc_update_data_role(xudc, xudc->usbphy[i]);
3904 tegra_xudc_free_eps(xudc);
3906 tegra_xudc_free_event_ring(xudc);
3908 tegra_xudc_phy_exit(xudc);
3910 tegra_xudc_powerdomain_remove(xudc);
3912 regulator_bulk_disable(xudc->soc->num_supplies, xudc->supplies);
3914 tegra_xusb_padctl_put(xudc->padctl);
3921 struct tegra_xudc *xudc = platform_get_drvdata(pdev);
3924 pm_runtime_get_sync(xudc->dev);
3926 cancel_delayed_work_sync(&xudc->plc_reset_work);
3927 cancel_work_sync(&xudc->usb_role_sw_work);
3929 usb_del_gadget_udc(&xudc->gadget);
3931 tegra_xudc_free_eps(xudc);
3932 tegra_xudc_free_event_ring(xudc);
3934 tegra_xudc_powerdomain_remove(xudc);
3936 regulator_bulk_disable(xudc->soc->num_supplies, xudc->supplies);
3938 for (i = 0; i < xudc->soc->num_phys; i++) {
3939 phy_power_off(xudc->utmi_phy[i]);
3940 phy_power_off(xudc->usb3_phy[i]);
3943 tegra_xudc_phy_exit(xudc);
3945 pm_runtime_disable(xudc->dev);
3946 pm_runtime_put(xudc->dev);
3948 tegra_xusb_padctl_put(xudc->padctl);
3951 static int __maybe_unused tegra_xudc_powergate(struct tegra_xudc *xudc)
3955 dev_dbg(xudc->dev, "entering ELPG\n");
3957 spin_lock_irqsave(&xudc->lock, flags);
3959 xudc->powergated = true;
3960 xudc->saved_regs.ctrl = xudc_readl(xudc, CTRL);
3961 xudc->saved_regs.portpm = xudc_readl(xudc, PORTPM);
3962 xudc_writel(xudc, 0, CTRL);
3964 spin_unlock_irqrestore(&xudc->lock, flags);
3966 clk_bulk_disable_unprepare(xudc->soc->num_clks, xudc->clks);
3968 regulator_bulk_disable(xudc->soc->num_supplies, xudc->supplies);
3970 dev_dbg(xudc->dev, "entering ELPG done\n");
3974 static int __maybe_unused tegra_xudc_unpowergate(struct tegra_xudc *xudc)
3979 dev_dbg(xudc->dev, "exiting ELPG\n");
3981 err = regulator_bulk_enable(xudc->soc->num_supplies,
3982 xudc->supplies);
3986 err = clk_bulk_prepare_enable(xudc->soc->num_clks, xudc->clks);
3990 tegra_xudc_fpci_ipfs_init(xudc);
3992 tegra_xudc_device_params_init(xudc);
3994 tegra_xudc_init_event_ring(xudc);
3996 tegra_xudc_init_eps(xudc);
3998 xudc_writel(xudc, xudc->saved_regs.portpm, PORTPM);
3999 xudc_writel(xudc, xudc->saved_regs.ctrl, CTRL);
4001 spin_lock_irqsave(&xudc->lock, flags);
4002 xudc->powergated = false;
4003 spin_unlock_irqrestore(&xudc->lock, flags);
4005 dev_dbg(xudc->dev, "exiting ELPG done\n");
4011 struct tegra_xudc *xudc = dev_get_drvdata(dev);
4014 spin_lock_irqsave(&xudc->lock, flags);
4015 xudc->suspended = true;
4016 spin_unlock_irqrestore(&xudc->lock, flags);
4018 flush_work(&xudc->usb_role_sw_work);
4022 tegra_xudc_device_mode_off(xudc);
4023 tegra_xudc_powergate(xudc);
4033 struct tegra_xudc *xudc = dev_get_drvdata(dev);
4037 err = tegra_xudc_unpowergate(xudc);
4041 spin_lock_irqsave(&xudc->lock, flags);
4042 xudc->suspended = false;
4043 spin_unlock_irqrestore(&xudc->lock, flags);
4045 schedule_work(&xudc->usb_role_sw_work);
4054 struct tegra_xudc *xudc = dev_get_drvdata(dev);
4056 return tegra_xudc_powergate(xudc);
4061 struct tegra_xudc *xudc = dev_get_drvdata(dev);
4063 return tegra_xudc_unpowergate(xudc);
4076 .name = "tegra-xudc",