Lines Matching refs:dev

49 	struct net_device *dev;
53 wed_m32(struct mtk_wed_device *dev, u32 reg, u32 mask, u32 val)
55 regmap_update_bits(dev->hw->regs, reg, mask | val, val);
59 wed_set(struct mtk_wed_device *dev, u32 reg, u32 mask)
61 return wed_m32(dev, reg, 0, mask);
65 wed_clr(struct mtk_wed_device *dev, u32 reg, u32 mask)
67 return wed_m32(dev, reg, mask, 0);
71 wdma_m32(struct mtk_wed_device *dev, u32 reg, u32 mask, u32 val)
73 wdma_w32(dev, reg, (wdma_r32(dev, reg) & ~mask) | val);
77 wdma_set(struct mtk_wed_device *dev, u32 reg, u32 mask)
79 wdma_m32(dev, reg, 0, mask);
83 wdma_clr(struct mtk_wed_device *dev, u32 reg, u32 mask)
85 wdma_m32(dev, reg, mask, 0);
89 wifi_r32(struct mtk_wed_device *dev, u32 reg)
91 return readl(dev->wlan.base + reg);
95 wifi_w32(struct mtk_wed_device *dev, u32 reg, u32 val)
97 writel(val, dev->wlan.base + reg);
101 mtk_wed_read_reset(struct mtk_wed_device *dev)
103 return wed_r32(dev, MTK_WED_RESET);
107 mtk_wdma_read_reset(struct mtk_wed_device *dev)
109 return wdma_r32(dev, MTK_WDMA_GLO_CFG);
113 mtk_wdma_rx_reset(struct mtk_wed_device *dev)
118 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_RX_DMA_EN);
119 ret = readx_poll_timeout(mtk_wdma_read_reset, dev, status,
122 dev_err(dev->hw->dev, "rx reset failed\n");
124 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_RX);
125 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0);
127 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) {
128 if (dev->rx_wdma[i].desc)
131 wdma_w32(dev,
139 mtk_wdma_tx_reset(struct mtk_wed_device *dev)
144 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_TX_DMA_EN);
145 if (readx_poll_timeout(mtk_wdma_read_reset, dev, status,
147 dev_err(dev->hw->dev, "tx reset failed\n");
149 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_TX);
150 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0);
152 for (i = 0; i < ARRAY_SIZE(dev->tx_wdma); i++)
153 wdma_w32(dev,
158 mtk_wed_reset(struct mtk_wed_device *dev, u32 mask)
162 wed_w32(dev, MTK_WED_RESET, mask);
163 if (readx_poll_timeout(mtk_wed_read_reset, dev, status,
169 mtk_wed_wo_read_status(struct mtk_wed_device *dev)
171 return wed_r32(dev, MTK_WED_SCR0 + 4 * MTK_WED_DUMMY_CR_WO_STATUS);
175 mtk_wed_wo_reset(struct mtk_wed_device *dev)
177 struct mtk_wed_wo *wo = dev->hw->wed_wo;
182 mtk_wdma_tx_reset(dev);
183 mtk_wed_reset(dev, MTK_WED_RESET_WED);
190 if (readx_poll_timeout(mtk_wed_wo_read_status, dev, val,
193 dev_err(dev->hw->dev, "failed to disable wed-wo\n");
198 switch (dev->hw->index) {
225 struct mtk_wed_device *dev;
231 dev = hw->wed_dev;
232 if (!dev || !dev->wlan.reset)
236 err = dev->wlan.reset(dev);
238 dev_err(dev->dev, "wlan reset failed: %d\n", err);
252 struct mtk_wed_device *dev;
257 dev = hw->wed_dev;
258 if (!dev || !dev->wlan.reset_complete)
261 dev->wlan.reset_complete(dev);
268 mtk_wed_assign(struct mtk_wed_device *dev)
273 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) {
274 hw = hw_list[pci_domain_nr(dev->wlan.pci_dev->bus)];
296 hw->wed_dev = dev;
301 mtk_wed_tx_buffer_alloc(struct mtk_wed_device *dev)
306 int token = dev->wlan.token_start;
311 ring_size = dev->wlan.nbuf & ~(MTK_WED_BUF_PER_PAGE - 1);
318 dev->tx_buf_ring.size = ring_size;
319 dev->tx_buf_ring.pages = page_list;
321 desc = dma_alloc_coherent(dev->hw->dev, ring_size * sizeof(*desc),
326 dev->tx_buf_ring.desc = desc;
327 dev->tx_buf_ring.desc_phys = desc_phys;
339 page_phys = dma_map_page(dev->hw->dev, page, 0, PAGE_SIZE,
341 if (dma_mapping_error(dev->hw->dev, page_phys)) {
347 dma_sync_single_for_cpu(dev->hw->dev, page_phys, PAGE_SIZE,
357 txd_size = dev->wlan.init_buf(buf, buf_phys, token++);
362 if (dev->hw->version == 1)
380 dma_sync_single_for_device(dev->hw->dev, page_phys, PAGE_SIZE,
388 mtk_wed_free_tx_buffer(struct mtk_wed_device *dev)
390 struct mtk_wdma_desc *desc = dev->tx_buf_ring.desc;
391 void **page_list = dev->tx_buf_ring.pages;
401 for (i = 0, page_idx = 0; i < dev->tx_buf_ring.size;
410 dma_unmap_page(dev->hw->dev, buf_addr, PAGE_SIZE,
415 dma_free_coherent(dev->hw->dev, dev->tx_buf_ring.size * sizeof(*desc),
416 desc, dev->tx_buf_ring.desc_phys);
423 mtk_wed_rx_buffer_alloc(struct mtk_wed_device *dev)
428 dev->rx_buf_ring.size = dev->wlan.rx_nbuf;
429 desc = dma_alloc_coherent(dev->hw->dev,
430 dev->wlan.rx_nbuf * sizeof(*desc),
435 dev->rx_buf_ring.desc = desc;
436 dev->rx_buf_ring.desc_phys = desc_phys;
437 dev->wlan.init_rx_buf(dev, dev->wlan.rx_npkt);
443 mtk_wed_free_rx_buffer(struct mtk_wed_device *dev)
445 struct mtk_rxbm_desc *desc = dev->rx_buf_ring.desc;
450 dev->wlan.release_rx_buf(dev);
451 dma_free_coherent(dev->hw->dev, dev->rx_buf_ring.size * sizeof(*desc),
452 desc, dev->rx_buf_ring.desc_phys);
456 mtk_wed_rx_buffer_hw_init(struct mtk_wed_device *dev)
458 wed_w32(dev, MTK_WED_RX_BM_RX_DMAD,
459 FIELD_PREP(MTK_WED_RX_BM_RX_DMAD_SDL0, dev->wlan.rx_size));
460 wed_w32(dev, MTK_WED_RX_BM_BASE, dev->rx_buf_ring.desc_phys);
461 wed_w32(dev, MTK_WED_RX_BM_INIT_PTR, MTK_WED_RX_BM_INIT_SW_TAIL |
462 FIELD_PREP(MTK_WED_RX_BM_SW_TAIL, dev->wlan.rx_npkt));
463 wed_w32(dev, MTK_WED_RX_BM_DYN_ALLOC_TH,
465 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN);
469 mtk_wed_free_ring(struct mtk_wed_device *dev, struct mtk_wed_ring *ring)
474 dma_free_coherent(dev->hw->dev, ring->size * ring->desc_size,
479 mtk_wed_free_rx_rings(struct mtk_wed_device *dev)
481 mtk_wed_free_rx_buffer(dev);
482 mtk_wed_free_ring(dev, &dev->rro.ring);
486 mtk_wed_free_tx_rings(struct mtk_wed_device *dev)
490 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++)
491 mtk_wed_free_ring(dev, &dev->tx_ring[i]);
492 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++)
493 mtk_wed_free_ring(dev, &dev->rx_wdma[i]);
497 mtk_wed_set_ext_int(struct mtk_wed_device *dev, bool en)
501 if (dev->hw->version == 1)
509 if (!dev->hw->num_flows)
512 wed_w32(dev, MTK_WED_EXT_INT_MASK, en ? mask : 0);
513 wed_r32(dev, MTK_WED_EXT_INT_MASK);
517 mtk_wed_set_512_support(struct mtk_wed_device *dev, bool enable)
520 wed_w32(dev, MTK_WED_TXDP_CTRL, MTK_WED_TXDP_DW9_OVERWR);
521 wed_w32(dev, MTK_WED_TXP_DW1,
524 wed_w32(dev, MTK_WED_TXP_DW1,
526 wed_clr(dev, MTK_WED_TXDP_CTRL, MTK_WED_TXDP_DW9_OVERWR);
532 mtk_wed_check_wfdma_rx_fill(struct mtk_wed_device *dev, int idx)
537 if (!(dev->rx_ring[idx].flags & MTK_WED_RING_CONFIGURED))
543 cur_idx = wed_r32(dev,
553 dev_err(dev->hw->dev, "rx dma enable failed\n");
557 val = wifi_r32(dev, dev->wlan.wpdma_rx_glo - dev->wlan.phy_base) |
559 wifi_w32(dev, dev->wlan.wpdma_rx_glo - dev->wlan.phy_base, val);
563 mtk_wed_dma_disable(struct mtk_wed_device *dev)
565 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG,
569 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_RX_DRV_EN);
571 wed_clr(dev, MTK_WED_GLO_CFG,
575 wdma_clr(dev, MTK_WDMA_GLO_CFG,
580 if (dev->hw->version == 1) {
581 regmap_write(dev->hw->mirror, dev->hw->index * 4, 0);
582 wdma_clr(dev, MTK_WDMA_GLO_CFG,
585 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG,
589 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG,
591 wed_clr(dev, MTK_WED_WDMA_GLO_CFG,
595 mtk_wed_set_512_support(dev, false);
599 mtk_wed_stop(struct mtk_wed_device *dev)
601 mtk_wed_set_ext_int(dev, false);
603 wed_w32(dev, MTK_WED_WPDMA_INT_TRIGGER, 0);
604 wed_w32(dev, MTK_WED_WDMA_INT_TRIGGER, 0);
605 wdma_w32(dev, MTK_WDMA_INT_MASK, 0);
606 wdma_w32(dev, MTK_WDMA_INT_GRP2, 0);
607 wed_w32(dev, MTK_WED_WPDMA_INT_MASK, 0);
609 if (dev->hw->version == 1)
612 wed_w32(dev, MTK_WED_EXT_INT_MASK1, 0);
613 wed_w32(dev, MTK_WED_EXT_INT_MASK2, 0);
617 mtk_wed_deinit(struct mtk_wed_device *dev)
619 mtk_wed_stop(dev);
620 mtk_wed_dma_disable(dev);
622 wed_clr(dev, MTK_WED_CTRL,
628 if (dev->hw->version == 1)
631 wed_clr(dev, MTK_WED_CTRL,
638 __mtk_wed_detach(struct mtk_wed_device *dev)
640 struct mtk_wed_hw *hw = dev->hw;
642 mtk_wed_deinit(dev);
644 mtk_wdma_rx_reset(dev);
645 mtk_wed_reset(dev, MTK_WED_RESET_WED);
646 mtk_wed_free_tx_buffer(dev);
647 mtk_wed_free_tx_rings(dev);
649 if (mtk_wed_get_rx_capa(dev)) {
651 mtk_wed_wo_reset(dev);
652 mtk_wed_free_rx_rings(dev);
657 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) {
660 wlan_node = dev->wlan.pci_dev->dev.of_node;
667 hw->eth->dma_dev != hw->eth->dev)
668 mtk_eth_set_dma_device(hw->eth, hw->eth->dev);
670 memset(dev, 0, sizeof(*dev));
677 mtk_wed_detach(struct mtk_wed_device *dev)
680 __mtk_wed_detach(dev);
686 mtk_wed_bus_init(struct mtk_wed_device *dev)
688 switch (dev->wlan.bus_type) {
690 struct device_node *np = dev->hw->eth->dev->of_node;
700 wed_w32(dev, MTK_WED_PCIE_INT_CTRL,
704 wed_set(dev, MTK_WED_PCIE_INT_CTRL,
707 wed_r32(dev, MTK_WED_PCIE_INT_CTRL);
709 wed_w32(dev, MTK_WED_PCIE_CFG_INTM, PCIE_BASE_ADDR0 | 0x180);
710 wed_w32(dev, MTK_WED_PCIE_CFG_BASE, PCIE_BASE_ADDR0 | 0x184);
713 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, BIT(24));
714 wed_r32(dev, MTK_WED_PCIE_INT_TRIGGER);
717 wed_set(dev, MTK_WED_PCIE_INT_CTRL,
722 wed_set(dev, MTK_WED_WPDMA_INT_CTRL,
732 mtk_wed_set_wpdma(struct mtk_wed_device *dev)
734 if (dev->hw->version == 1) {
735 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_phys);
737 mtk_wed_bus_init(dev);
739 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_int);
740 wed_w32(dev, MTK_WED_WPDMA_CFG_INT_MASK, dev->wlan.wpdma_mask);
741 wed_w32(dev, MTK_WED_WPDMA_CFG_TX, dev->wlan.wpdma_tx);
742 wed_w32(dev, MTK_WED_WPDMA_CFG_TX_FREE, dev->wlan.wpdma_txfree);
743 wed_w32(dev, MTK_WED_WPDMA_RX_GLO_CFG, dev->wlan.wpdma_rx_glo);
744 wed_w32(dev, MTK_WED_WPDMA_RX_RING, dev->wlan.wpdma_rx);
749 mtk_wed_hw_init_early(struct mtk_wed_device *dev)
753 mtk_wed_deinit(dev);
754 mtk_wed_reset(dev, MTK_WED_RESET_WED);
755 mtk_wed_set_wpdma(dev);
763 wed_m32(dev, MTK_WED_WDMA_GLO_CFG, mask, set);
765 if (dev->hw->version == 1) {
766 u32 offset = dev->hw->index ? 0x04000400 : 0;
768 wdma_set(dev, MTK_WDMA_GLO_CFG,
773 wed_w32(dev, MTK_WED_WDMA_OFFSET0, 0x2a042a20 + offset);
774 wed_w32(dev, MTK_WED_WDMA_OFFSET1, 0x29002800 + offset);
775 wed_w32(dev, MTK_WED_PCIE_CFG_BASE,
776 MTK_PCIE_BASE(dev->hw->index));
778 wed_w32(dev, MTK_WED_WDMA_CFG_BASE, dev->hw->wdma_phy);
779 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_ETH_DMAD_FMT);
780 wed_w32(dev, MTK_WED_WDMA_OFFSET0,
786 wed_w32(dev, MTK_WED_WDMA_OFFSET1,
795 mtk_wed_rro_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring,
798 ring->desc = dma_alloc_coherent(dev->hw->dev,
812 mtk_wed_rro_alloc(struct mtk_wed_device *dev)
818 index = of_property_match_string(dev->hw->node, "memory-region-names",
823 np = of_parse_phandle(dev->hw->node, "memory-region", index);
833 dev->rro.miod_phys = rmem->base;
834 dev->rro.fdbk_phys = MTK_WED_MIOD_COUNT + dev->rro.miod_phys;
836 return mtk_wed_rro_ring_alloc(dev, &dev->rro.ring,
841 mtk_wed_rro_cfg(struct mtk_wed_device *dev)
843 struct mtk_wed_wo *wo = dev->hw->wed_wo;
872 mtk_wed_rro_hw_init(struct mtk_wed_device *dev)
874 wed_w32(dev, MTK_WED_RROQM_MIOD_CFG,
880 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL0, dev->rro.miod_phys);
881 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL1,
883 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL0, dev->rro.fdbk_phys);
884 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL1,
886 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL2, 0);
887 wed_w32(dev, MTK_WED_RROQ_BASE_L, dev->rro.ring.desc_phys);
889 wed_set(dev, MTK_WED_RROQM_RST_IDX,
893 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0);
894 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL2, MTK_WED_MIOD_CNT - 1);
895 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_RRO_QM_EN);
899 mtk_wed_route_qm_hw_init(struct mtk_wed_device *dev)
901 wed_w32(dev, MTK_WED_RESET, MTK_WED_RESET_RX_ROUTE_QM);
905 if (!(wed_r32(dev, MTK_WED_RESET) & MTK_WED_RESET_RX_ROUTE_QM))
910 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST);
911 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_TXDMAD_FPORT);
912 wed_set(dev, MTK_WED_RTQM_GLO_CFG,
913 FIELD_PREP(MTK_WED_RTQM_TXDMAD_FPORT, 0x3 + dev->hw->index));
914 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST);
916 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN);
920 mtk_wed_hw_init(struct mtk_wed_device *dev)
922 if (dev->init_done)
925 dev->init_done = true;
926 mtk_wed_set_ext_int(dev, false);
927 wed_w32(dev, MTK_WED_TX_BM_CTRL,
930 dev->tx_buf_ring.size / 128) |
934 wed_w32(dev, MTK_WED_TX_BM_BASE, dev->tx_buf_ring.desc_phys);
936 wed_w32(dev, MTK_WED_TX_BM_BUF_LEN, MTK_WED_PKT_SIZE);
938 if (dev->hw->version == 1) {
939 wed_w32(dev, MTK_WED_TX_BM_TKID,
941 dev->wlan.token_start) |
943 dev->wlan.token_start +
944 dev->wlan.nbuf - 1));
945 wed_w32(dev, MTK_WED_TX_BM_DYN_THR,
949 wed_w32(dev, MTK_WED_TX_BM_TKID_V2,
951 dev->wlan.token_start) |
953 dev->wlan.token_start +
954 dev->wlan.nbuf - 1));
955 wed_w32(dev, MTK_WED_TX_BM_DYN_THR,
958 wed_w32(dev, MTK_WED_TX_TKID_CTRL,
961 dev->tx_buf_ring.size / 128) |
963 dev->tx_buf_ring.size / 128));
964 wed_w32(dev, MTK_WED_TX_TKID_DYN_THR,
969 mtk_wed_reset(dev, MTK_WED_RESET_TX_BM);
971 if (dev->hw->version == 1) {
972 wed_set(dev, MTK_WED_CTRL,
976 wed_clr(dev, MTK_WED_TX_TKID_CTRL, MTK_WED_TX_TKID_CTRL_PAUSE);
978 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX,
981 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, 0);
983 mtk_wed_rx_buffer_hw_init(dev);
984 mtk_wed_rro_hw_init(dev);
985 mtk_wed_route_qm_hw_init(dev);
988 wed_clr(dev, MTK_WED_TX_BM_CTRL, MTK_WED_TX_BM_CTRL_PAUSE);
1012 mtk_wed_check_busy(struct mtk_wed_device *dev, u32 reg, u32 mask)
1014 return !!(wed_r32(dev, reg) & mask);
1018 mtk_wed_poll_busy(struct mtk_wed_device *dev, u32 reg, u32 mask)
1025 timeout, false, dev, reg, mask);
1029 mtk_wed_rx_reset(struct mtk_wed_device *dev)
1031 struct mtk_wed_wo *wo = dev->hw->wed_wo;
1041 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, MTK_WED_WPDMA_RX_D_RX_DRV_EN);
1042 ret = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_GLO_CFG,
1045 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT);
1046 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_D_DRV);
1048 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX,
1052 wed_set(dev, MTK_WED_WPDMA_RX_D_GLO_CFG,
1055 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG,
1059 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, 0);
1063 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_RRO_QM_EN);
1064 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL,
1067 mtk_wed_reset(dev, MTK_WED_RESET_RX_RRO_QM);
1069 wed_set(dev, MTK_WED_RROQM_RST_IDX,
1072 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0);
1076 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN);
1077 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL,
1080 mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM);
1082 wed_set(dev, MTK_WED_RTQM_GLO_CFG,
1086 mtk_wdma_tx_reset(dev);
1089 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_TX_DRV_EN);
1090 mtk_wed_poll_busy(dev, MTK_WED_CTRL,
1092 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_TX_DRV);
1095 ret = mtk_wed_poll_busy(dev, MTK_WED_GLO_CFG,
1097 wed_clr(dev, MTK_WED_GLO_CFG, MTK_WED_GLO_CFG_RX_DMA_EN);
1099 mtk_wed_reset(dev, MTK_WED_RESET_WED_RX_DMA);
1101 struct mtk_eth *eth = dev->hw->eth;
1104 wed_set(dev, MTK_WED_RESET_IDX,
1107 wed_set(dev, MTK_WED_RESET_IDX, MTK_WED_RESET_IDX_RX);
1108 wed_w32(dev, MTK_WED_RESET_IDX, 0);
1112 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN);
1113 mtk_wed_poll_busy(dev, MTK_WED_CTRL,
1115 mtk_wed_reset(dev, MTK_WED_RESET_RX_BM);
1126 for (i = 0; i < ARRAY_SIZE(dev->rx_ring); i++) {
1127 if (!dev->rx_ring[i].desc)
1130 mtk_wed_ring_reset(&dev->rx_ring[i], MTK_WED_RX_RING_SIZE,
1133 mtk_wed_free_rx_buffer(dev);
1139 mtk_wed_reset_dma(struct mtk_wed_device *dev)
1145 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) {
1146 if (!dev->tx_ring[i].desc)
1149 mtk_wed_ring_reset(&dev->tx_ring[i], MTK_WED_TX_RING_SIZE,
1154 wed_clr(dev, MTK_WED_GLO_CFG, MTK_WED_GLO_CFG_TX_DMA_EN);
1155 busy = mtk_wed_poll_busy(dev, MTK_WED_GLO_CFG,
1158 mtk_wed_reset(dev, MTK_WED_RESET_WED_TX_DMA);
1160 wed_w32(dev, MTK_WED_RESET_IDX, MTK_WED_RESET_IDX_TX);
1161 wed_w32(dev, MTK_WED_RESET_IDX, 0);
1165 busy = !!mtk_wdma_rx_reset(dev);
1166 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_RX_DRV_EN);
1168 busy = mtk_wed_poll_busy(dev, MTK_WED_WDMA_GLO_CFG,
1172 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_INT_AGENT);
1173 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_RX_DRV);
1175 wed_w32(dev, MTK_WED_WDMA_RESET_IDX,
1177 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, 0);
1179 wed_set(dev, MTK_WED_WDMA_GLO_CFG,
1182 wed_clr(dev, MTK_WED_WDMA_GLO_CFG,
1187 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_FREE_AGENT_EN);
1190 val = wed_r32(dev, MTK_WED_TX_BM_INTF);
1195 mtk_wed_reset(dev, MTK_WED_RESET_TX_FREE_AGENT);
1196 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_BM_EN);
1197 mtk_wed_reset(dev, MTK_WED_RESET_TX_BM);
1200 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG,
1202 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG,
1206 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG,
1210 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT);
1211 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_TX_DRV);
1212 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_DRV);
1214 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX,
1217 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX, 0);
1220 dev->init_done = false;
1221 if (dev->hw->version == 1)
1225 wed_w32(dev, MTK_WED_RESET_IDX, MTK_WED_RESET_WPDMA_IDX_RX);
1226 wed_w32(dev, MTK_WED_RESET_IDX, 0);
1229 mtk_wed_rx_reset(dev);
1233 mtk_wed_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring,
1236 ring->desc = dma_alloc_coherent(dev->hw->dev, size * desc_size,
1249 mtk_wed_wdma_rx_ring_setup(struct mtk_wed_device *dev, int idx, int size,
1252 u32 desc_size = sizeof(struct mtk_wdma_desc) * dev->hw->version;
1255 if (idx >= ARRAY_SIZE(dev->rx_wdma))
1258 wdma = &dev->rx_wdma[idx];
1259 if (!reset && mtk_wed_ring_alloc(dev, wdma, MTK_WED_WDMA_RING_SIZE,
1263 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_BASE,
1265 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_COUNT,
1267 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0);
1269 wed_w32(dev, MTK_WED_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_BASE,
1271 wed_w32(dev, MTK_WED_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_COUNT,
1278 mtk_wed_wdma_tx_ring_setup(struct mtk_wed_device *dev, int idx, int size,
1281 u32 desc_size = sizeof(struct mtk_wdma_desc) * dev->hw->version;
1284 if (idx >= ARRAY_SIZE(dev->tx_wdma))
1287 wdma = &dev->tx_wdma[idx];
1288 if (!reset && mtk_wed_ring_alloc(dev, wdma, MTK_WED_WDMA_RING_SIZE,
1292 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_BASE,
1294 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_COUNT,
1296 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0);
1297 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_DMA_IDX, 0);
1303 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_BASE,
1305 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_COUNT,
1307 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_CPU_IDX,
1309 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_DMA_IDX,
1317 mtk_wed_ppe_check(struct mtk_wed_device *dev, struct sk_buff *skb,
1320 struct mtk_eth *eth = dev->hw->eth;
1332 mtk_ppe_check_skb(eth->ppe[dev->hw->index], skb, hash);
1336 mtk_wed_configure_irq(struct mtk_wed_device *dev, u32 irq_mask)
1341 wed_set(dev, MTK_WED_CTRL,
1347 if (dev->hw->version == 1) {
1348 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER,
1351 wed_w32(dev, MTK_WED_WPDMA_INT_TRIGGER,
1355 wed_clr(dev, MTK_WED_WDMA_INT_CTRL, wdma_mask);
1360 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_TX,
1366 dev->wlan.tx_tbit[0]) |
1368 dev->wlan.tx_tbit[1]));
1371 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_TX_FREE,
1375 dev->wlan.txfree_tbit));
1377 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RX,
1383 dev->wlan.rx_tbit[0]) |
1385 dev->wlan.rx_tbit[1]));
1387 wed_w32(dev, MTK_WED_WDMA_INT_CLR, wdma_mask);
1388 wed_set(dev, MTK_WED_WDMA_INT_CTRL,
1390 dev->wdma_idx));
1393 wed_w32(dev, MTK_WED_WDMA_INT_TRIGGER, wdma_mask);
1395 wdma_w32(dev, MTK_WDMA_INT_MASK, wdma_mask);
1396 wdma_w32(dev, MTK_WDMA_INT_GRP2, wdma_mask);
1397 wed_w32(dev, MTK_WED_WPDMA_INT_MASK, irq_mask);
1398 wed_w32(dev, MTK_WED_INT_MASK, irq_mask);
1402 mtk_wed_dma_enable(struct mtk_wed_device *dev)
1404 wed_set(dev, MTK_WED_WPDMA_INT_CTRL, MTK_WED_WPDMA_INT_CTRL_SUBRT_ADV);
1406 wed_set(dev, MTK_WED_GLO_CFG,
1409 wed_set(dev, MTK_WED_WPDMA_GLO_CFG,
1412 wed_set(dev, MTK_WED_WDMA_GLO_CFG,
1415 wdma_set(dev, MTK_WDMA_GLO_CFG,
1420 if (dev->hw->version == 1) {
1421 wdma_set(dev, MTK_WDMA_GLO_CFG,
1426 wed_set(dev, MTK_WED_WPDMA_CTRL,
1429 wed_set(dev, MTK_WED_WDMA_GLO_CFG,
1433 wed_set(dev, MTK_WED_WPDMA_GLO_CFG,
1437 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG,
1441 wed_set(dev, MTK_WED_WPDMA_RX_D_GLO_CFG,
1448 mtk_wed_check_wfdma_rx_fill(dev, i);
1453 mtk_wed_start(struct mtk_wed_device *dev, u32 irq_mask)
1457 if (mtk_wed_get_rx_capa(dev) && mtk_wed_rx_buffer_alloc(dev))
1460 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++)
1461 if (!dev->rx_wdma[i].desc)
1462 mtk_wed_wdma_rx_ring_setup(dev, i, 16, false);
1464 mtk_wed_hw_init(dev);
1465 mtk_wed_configure_irq(dev, irq_mask);
1467 mtk_wed_set_ext_int(dev, true);
1469 if (dev->hw->version == 1) {
1470 u32 val = dev->wlan.wpdma_phys | MTK_PCIE_MIRROR_MAP_EN |
1472 dev->hw->index);
1474 val |= BIT(0) | (BIT(1) * !!dev->hw->index);
1475 regmap_write(dev->hw->mirror, dev->hw->index * 4, val);
1478 wed_w32(dev, MTK_WED_EXT_INT_MASK1,
1480 wed_w32(dev, MTK_WED_EXT_INT_MASK2,
1483 wed_r32(dev, MTK_WED_EXT_INT_MASK1);
1484 wed_r32(dev, MTK_WED_EXT_INT_MASK2);
1486 if (mtk_wed_rro_cfg(dev))
1491 mtk_wed_set_512_support(dev, dev->wlan.wcid_512);
1493 mtk_wed_dma_enable(dev);
1494 dev->running = true;
1498 mtk_wed_attach(struct mtk_wed_device *dev)
1508 if ((dev->wlan.bus_type == MTK_WED_BUS_PCIE &&
1509 pci_domain_nr(dev->wlan.pci_dev->bus) > 1) ||
1520 hw = mtk_wed_assign(dev);
1527 device = dev->wlan.bus_type == MTK_WED_BUS_PCIE
1528 ? &dev->wlan.pci_dev->dev
1529 : &dev->wlan.platform_dev->dev;
1533 dev->hw = hw;
1534 dev->dev = hw->dev;
1535 dev->irq = hw->irq;
1536 dev->wdma_idx = hw->index;
1537 dev->version = hw->version;
1539 if (hw->eth->dma_dev == hw->eth->dev &&
1540 of_dma_is_coherent(hw->eth->dev->of_node))
1541 mtk_eth_set_dma_device(hw->eth, hw->dev);
1543 ret = mtk_wed_tx_buffer_alloc(dev);
1547 if (mtk_wed_get_rx_capa(dev)) {
1548 ret = mtk_wed_rro_alloc(dev);
1553 mtk_wed_hw_init_early(dev);
1558 dev->rev_id = wed_r32(dev, MTK_WED_REV_ID);
1563 dev_err(dev->hw->dev, "failed to attach wed device\n");
1564 __mtk_wed_detach(dev);
1573 mtk_wed_tx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs,
1576 struct mtk_wed_ring *ring = &dev->tx_ring[idx];
1590 if (WARN_ON(idx >= ARRAY_SIZE(dev->tx_ring)))
1593 if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_TX_RING_SIZE,
1597 if (mtk_wed_wdma_rx_ring_setup(dev, idx, MTK_WED_WDMA_RING_SIZE,
1605 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys);
1606 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_COUNT, MTK_WED_TX_RING_SIZE);
1607 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_CPU_IDX, 0);
1609 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_BASE,
1611 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_COUNT,
1613 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0);
1619 mtk_wed_txfree_ring_setup(struct mtk_wed_device *dev, void __iomem *regs)
1621 struct mtk_wed_ring *ring = &dev->txfree_ring;
1622 int i, index = dev->hw->version == 1;
1635 wed_w32(dev, MTK_WED_RING_RX(index) + i, val);
1636 wed_w32(dev, MTK_WED_WPDMA_RING_RX(index) + i, val);
1643 mtk_wed_rx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs,
1646 struct mtk_wed_ring *ring = &dev->rx_ring[idx];
1648 if (WARN_ON(idx >= ARRAY_SIZE(dev->rx_ring)))
1651 if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_RX_RING_SIZE,
1655 if (mtk_wed_wdma_tx_ring_setup(dev, idx, MTK_WED_WDMA_RING_SIZE,
1664 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys);
1665 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_COUNT, MTK_WED_RX_RING_SIZE);
1667 wed_w32(dev, MTK_WED_WPDMA_RING_RX_DATA(idx) + MTK_WED_RING_OFS_BASE,
1669 wed_w32(dev, MTK_WED_WPDMA_RING_RX_DATA(idx) + MTK_WED_RING_OFS_COUNT,
1676 mtk_wed_irq_get(struct mtk_wed_device *dev, u32 mask)
1680 if (dev->hw->version == 1)
1688 val = wed_r32(dev, MTK_WED_EXT_INT_STATUS);
1689 wed_w32(dev, MTK_WED_EXT_INT_STATUS, val);
1691 if (!dev->hw->num_flows)
1694 pr_err("mtk_wed%d: error status=%08x\n", dev->hw->index, val);
1696 val = wed_r32(dev, MTK_WED_INT_STATUS);
1698 wed_w32(dev, MTK_WED_INT_STATUS, val); /* ACK */
1704 mtk_wed_irq_set_mask(struct mtk_wed_device *dev, u32 mask)
1706 if (!dev->running)
1709 mtk_wed_set_ext_int(dev, !!mask);
1710 wed_w32(dev, MTK_WED_INT_MASK, mask);
1771 if (!tc_can_offload(priv->dev))
1781 mtk_wed_setup_tc_block(struct mtk_wed_hw *hw, struct net_device *dev,
1801 block_cb = flow_block_cb_lookup(f->block, cb, dev);
1812 priv->dev = dev;
1813 block_cb = flow_block_cb_alloc(cb, dev, priv, NULL);
1824 block_cb = flow_block_cb_lookup(f->block, cb, dev);
1840 mtk_wed_setup_tc(struct mtk_wed_device *wed, struct net_device *dev,
1851 return mtk_wed_setup_tc_block(hw, dev, type_data);
1878 struct device_node *eth_np = eth->dev->of_node;
1891 get_device(&pdev->dev);
1914 hw->dev = &pdev->dev;
1948 put_device(&pdev->dev);
1970 put_device(hw->dev);