Lines Matching refs:rtlpci
65 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
73 ppsc->const_amdpci_aspm = rtlpci->const_amdpci_aspm;
74 switch (rtlpci->const_pci_aspm) {
112 switch (rtlpci->const_hwsw_rfoff_d3) {
130 switch (rtlpci->const_support_pciaspm) {
147 rtlpci->const_support_pciaspm);
154 pci_read_config_byte(rtlpci->pdev, 0x80, &init_aspm);
164 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
172 pcie_capability_clear_and_set_word(rtlpci->pdev, PCI_EXP_LNKCTL,
182 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
187 pcie_capability_clear_and_set_word(rtlpci->pdev, PCI_EXP_LNKCTL,
201 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
224 pci_read_config_byte(rtlpci->pdev, 0x80, &tmp_u1b);
243 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
258 aspmlevel = rtlpci->const_devicepci_aspm_setting;
279 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
285 pci_write_config_byte(rtlpci->pdev, 0xe0, 0xa0);
287 pci_read_config_byte(rtlpci->pdev, 0xe0, &offset_e0);
290 pci_read_config_dword(rtlpci->pdev, 0xe4, &offset_e4);
438 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
455 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[hw_queue];
490 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
492 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[prio];
511 dma_unmap_single(&rtlpci->pdev->dev,
593 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
602 skb = dev_alloc_skb(rtlpci->rxbuffersize);
609 dma_map_single(&rtlpci->pdev->dev, skb_tail_pointer(skb),
610 rtlpci->rxbuffersize, DMA_FROM_DEVICE);
612 if (dma_mapping_error(&rtlpci->pdev->dev, bufferaddress))
614 rtlpci->rx_ring[rxring_idx].rx_buf[desc_idx] = skb;
626 (u8 *)&rtlpci->rxbuffersize);
673 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
677 rtlpci->sys_irq_mask);
683 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
686 unsigned int count = rtlpci->rxringcount;
707 struct sk_buff *skb = rtlpci->rx_ring[rxring_idx].rx_buf[
708 rtlpci->rx_ring[rxring_idx].idx];
718 buffer_desc = &rtlpci->rx_ring[rxring_idx].buffer_desc[
719 rtlpci->rx_ring[rxring_idx].idx];
722 pdesc = &rtlpci->rx_ring[rxring_idx].desc[
723 rtlpci->rx_ring[rxring_idx].idx];
736 dma_unmap_single(&rtlpci->pdev->dev, *((dma_addr_t *)skb->cb),
737 rtlpci->rxbuffersize, DMA_FROM_DEVICE);
740 new_skb = dev_alloc_skb(rtlpci->rxbuffersize);
827 rtlpci->rx_ring[hw_queue].next_rx_rp += 1;
828 rtlpci->rx_ring[hw_queue].next_rx_rp %=
833 rtlpci->rx_ring[hw_queue].next_rx_rp);
844 rtlpci->rx_ring[rxring_idx].idx);
848 rtlpci->rx_ring[rxring_idx].idx);
849 if (rtlpci->rx_ring[rxring_idx].idx ==
850 rtlpci->rxringcount - 1)
856 rtlpci->rx_ring[rxring_idx].idx =
857 (rtlpci->rx_ring[rxring_idx].idx + 1) %
858 rtlpci->rxringcount;
865 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
873 if (rtlpci->irq_enabled == 0)
1036 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1050 ring = &rtlpci->tx_ring[BEACON_QUEUE];
1057 dma_unmap_single(&rtlpci->pdev->dev,
1092 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1106 rtlpci->txringcount[i] = desc_num;
1111 rtlpci->txringcount[BEACON_QUEUE] = 2;
1118 rtlpci->txringcount[BE_QUEUE] = RT_TXDESC_NUM_BE_QUEUE;
1120 rtlpci->rxbuffersize = 9100; /*2048/1024; */
1121 rtlpci->rxringcount = RTL_PCI_MAX_RX_COUNT; /*64; */
1129 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1132 rtlpci->up_first_time = true;
1133 rtlpci->being_init_adapter = false;
1136 rtlpci->pdev = pdev;
1156 rtlpci->acm_method = EACMWAY2_SW;
1169 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1180 dma_alloc_coherent(&rtlpci->pdev->dev,
1190 rtlpci->tx_ring[prio].buffer_desc = buffer_desc;
1191 rtlpci->tx_ring[prio].buffer_desc_dma = buffer_desc_dma;
1193 rtlpci->tx_ring[prio].cur_tx_rp = 0;
1194 rtlpci->tx_ring[prio].cur_tx_wp = 0;
1198 desc = dma_alloc_coherent(&rtlpci->pdev->dev, sizeof(*desc) * entries,
1206 rtlpci->tx_ring[prio].desc = desc;
1207 rtlpci->tx_ring[prio].dma = desc_dma;
1209 rtlpci->tx_ring[prio].idx = 0;
1210 rtlpci->tx_ring[prio].entries = entries;
1211 skb_queue_head_init(&rtlpci->tx_ring[prio].queue);
1234 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1241 rtlpci->rx_ring[rxring_idx].buffer_desc =
1242 dma_alloc_coherent(&rtlpci->pdev->dev,
1243 sizeof(*rtlpci->rx_ring[rxring_idx].buffer_desc) *
1244 rtlpci->rxringcount,
1245 &rtlpci->rx_ring[rxring_idx].dma, GFP_KERNEL);
1246 if (!rtlpci->rx_ring[rxring_idx].buffer_desc ||
1247 (ulong)rtlpci->rx_ring[rxring_idx].buffer_desc & 0xFF) {
1253 rtlpci->rx_ring[rxring_idx].idx = 0;
1254 for (i = 0; i < rtlpci->rxringcount; i++) {
1255 entry = &rtlpci->rx_ring[rxring_idx].buffer_desc[i];
1264 rtlpci->rx_ring[rxring_idx].desc =
1265 dma_alloc_coherent(&rtlpci->pdev->dev,
1266 sizeof(*rtlpci->rx_ring[rxring_idx].desc) *
1267 rtlpci->rxringcount,
1268 &rtlpci->rx_ring[rxring_idx].dma, GFP_KERNEL);
1269 if (!rtlpci->rx_ring[rxring_idx].desc ||
1270 (unsigned long)rtlpci->rx_ring[rxring_idx].desc & 0xFF) {
1276 rtlpci->rx_ring[rxring_idx].idx = 0;
1278 for (i = 0; i < rtlpci->rxringcount; i++) {
1279 entry = &rtlpci->rx_ring[rxring_idx].desc[i];
1295 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1296 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[prio];
1308 dma_unmap_single(&rtlpci->pdev->dev,
1317 dma_free_coherent(&rtlpci->pdev->dev,
1322 dma_free_coherent(&rtlpci->pdev->dev,
1332 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1336 for (i = 0; i < rtlpci->rxringcount; i++) {
1337 struct sk_buff *skb = rtlpci->rx_ring[rxring_idx].rx_buf[i];
1341 dma_unmap_single(&rtlpci->pdev->dev, *((dma_addr_t *)skb->cb),
1342 rtlpci->rxbuffersize, DMA_FROM_DEVICE);
1348 dma_free_coherent(&rtlpci->pdev->dev,
1349 sizeof(*rtlpci->rx_ring[rxring_idx].buffer_desc) *
1350 rtlpci->rxringcount,
1351 rtlpci->rx_ring[rxring_idx].buffer_desc,
1352 rtlpci->rx_ring[rxring_idx].dma);
1353 rtlpci->rx_ring[rxring_idx].buffer_desc = NULL;
1355 dma_free_coherent(&rtlpci->pdev->dev,
1356 sizeof(*rtlpci->rx_ring[rxring_idx].desc) *
1357 rtlpci->rxringcount,
1358 rtlpci->rx_ring[rxring_idx].desc,
1359 rtlpci->rx_ring[rxring_idx].dma);
1360 rtlpci->rx_ring[rxring_idx].desc = NULL;
1366 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1380 ret = _rtl_pci_init_tx_ring(hw, i, rtlpci->txringcount[i]);
1392 if (rtlpci->tx_ring[i].desc ||
1393 rtlpci->tx_ring[i].buffer_desc)
1417 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1430 rtlpci->rx_ring[rxring_idx].desc) {
1433 rtlpci->rx_ring[rxring_idx].idx = 0;
1434 for (i = 0; i < rtlpci->rxringcount; i++) {
1435 entry = &rtlpci->rx_ring[rxring_idx].desc[i];
1440 sizeof(*rtlpci->rx_ring
1455 (u8 *)&rtlpci->rxbuffersize);
1465 rtlpci->rx_ring[rxring_idx].idx = 0;
1473 if (rtlpci->tx_ring[i].desc ||
1474 rtlpci->tx_ring[i].buffer_desc) {
1475 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[i];
1487 dma_unmap_single(&rtlpci->pdev->dev,
1496 rtlpci->tx_ring[i].cur_tx_rp = 0;
1497 rtlpci->tx_ring[i].cur_tx_wp = 0;
1501 ring->entries = rtlpci->txringcount[i];
1564 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1587 ring = &rtlpci->tx_ring[hw_queue];
1699 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1703 synchronize_irq(rtlpci->pdev->irq);
1730 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1739 rtlpci->driver_is_goingto_unload = false;
1770 rtlpci->up_first_time = false;
1779 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1796 rtlpci->driver_is_goingto_unload = true;
2032 struct rtl_pci *rtlpci = rtl_pcidev(pcipriv);
2035 ret = pci_enable_msi(rtlpci->pdev);
2039 ret = request_irq(rtlpci->pdev->irq, &_rtl_pci_interrupt,
2042 pci_disable_msi(rtlpci->pdev);
2046 rtlpci->using_msi = true;
2057 struct rtl_pci *rtlpci = rtl_pcidev(pcipriv);
2060 ret = request_irq(rtlpci->pdev->irq, &_rtl_pci_interrupt,
2065 rtlpci->using_msi = false;
2074 struct rtl_pci *rtlpci = rtl_pcidev(pcipriv);
2077 if (rtlpci->msi_support) {
2109 struct rtl_pci *rtlpci;
2256 rtlpci = rtl_pcidev(pcipriv);
2264 rtlpci->irq_alloc = 1;
2294 struct rtl_pci *rtlpci = rtl_pcidev(pcipriv);
2321 if (rtlpci->irq_alloc) {
2322 free_irq(rtlpci->pdev->irq, hw);
2323 rtlpci->irq_alloc = 0;
2326 if (rtlpci->using_msi)
2327 pci_disable_msi(rtlpci->pdev);