Lines Matching refs:pcie

299 static inline void advk_writel(struct advk_pcie *pcie, u32 val, u64 reg)
301 writel(val, pcie->base + reg);
304 static inline u32 advk_readl(struct advk_pcie *pcie, u64 reg)
306 return readl(pcie->base + reg);
309 static u8 advk_pcie_ltssm_state(struct advk_pcie *pcie)
314 val = advk_readl(pcie, CFG_REG);
319 static inline bool advk_pcie_link_up(struct advk_pcie *pcie)
322 u8 ltssm_state = advk_pcie_ltssm_state(pcie);
326 static inline bool advk_pcie_link_active(struct advk_pcie *pcie)
336 u8 ltssm_state = advk_pcie_ltssm_state(pcie);
340 static inline bool advk_pcie_link_training(struct advk_pcie *pcie)
347 u8 ltssm_state = advk_pcie_ltssm_state(pcie);
354 static int advk_pcie_wait_for_link(struct advk_pcie *pcie)
360 if (advk_pcie_link_up(pcie))
369 static void advk_pcie_wait_for_retrain(struct advk_pcie *pcie)
374 if (advk_pcie_link_training(pcie))
380 static void advk_pcie_issue_perst(struct advk_pcie *pcie)
382 if (!pcie->reset_gpio)
386 dev_info(&pcie->pdev->dev, "issuing PERST via reset GPIO for 10ms\n");
387 gpiod_set_value_cansleep(pcie->reset_gpio, 1);
389 gpiod_set_value_cansleep(pcie->reset_gpio, 0);
392 static void advk_pcie_train_link(struct advk_pcie *pcie)
394 struct device *dev = &pcie->pdev->dev;
402 reg = advk_readl(pcie, PCIE_CORE_CTRL0_REG);
404 if (pcie->link_gen == 3)
406 else if (pcie->link_gen == 2)
410 advk_writel(pcie, reg, PCIE_CORE_CTRL0_REG);
417 reg = advk_readl(pcie, PCIE_CORE_PCIEXP_CAP + PCI_EXP_LNKCTL2);
419 if (pcie->link_gen == 3)
421 else if (pcie->link_gen == 2)
425 advk_writel(pcie, reg, PCIE_CORE_PCIEXP_CAP + PCI_EXP_LNKCTL2);
428 reg = advk_readl(pcie, PCIE_CORE_CTRL0_REG);
430 advk_writel(pcie, reg, PCIE_CORE_CTRL0_REG);
436 advk_pcie_issue_perst(pcie);
449 ret = advk_pcie_wait_for_link(pcie);
460 static void advk_pcie_set_ob_win(struct advk_pcie *pcie, u8 win_num,
464 advk_writel(pcie, OB_WIN_ENABLE |
466 advk_writel(pcie, upper_32_bits(match), OB_WIN_MATCH_MS(win_num));
467 advk_writel(pcie, lower_32_bits(remap), OB_WIN_REMAP_LS(win_num));
468 advk_writel(pcie, upper_32_bits(remap), OB_WIN_REMAP_MS(win_num));
469 advk_writel(pcie, lower_32_bits(mask), OB_WIN_MASK_LS(win_num));
470 advk_writel(pcie, upper_32_bits(mask), OB_WIN_MASK_MS(win_num));
471 advk_writel(pcie, actions, OB_WIN_ACTIONS(win_num));
474 static void advk_pcie_disable_ob_win(struct advk_pcie *pcie, u8 win_num)
476 advk_writel(pcie, 0, OB_WIN_MATCH_LS(win_num));
477 advk_writel(pcie, 0, OB_WIN_MATCH_MS(win_num));
478 advk_writel(pcie, 0, OB_WIN_REMAP_LS(win_num));
479 advk_writel(pcie, 0, OB_WIN_REMAP_MS(win_num));
480 advk_writel(pcie, 0, OB_WIN_MASK_LS(win_num));
481 advk_writel(pcie, 0, OB_WIN_MASK_MS(win_num));
482 advk_writel(pcie, 0, OB_WIN_ACTIONS(win_num));
485 static void advk_pcie_setup_hw(struct advk_pcie *pcie)
496 reg = advk_readl(pcie, PCIE_CORE_REF_CLK_REG);
499 advk_writel(pcie, reg, PCIE_CORE_REF_CLK_REG);
502 reg = advk_readl(pcie, CTRL_CONFIG_REG);
505 advk_writel(pcie, reg, CTRL_CONFIG_REG);
508 reg = advk_readl(pcie, PCIE_CORE_CTRL0_REG);
510 advk_writel(pcie, reg, PCIE_CORE_CTRL0_REG);
520 advk_writel(pcie, reg, VENDOR_ID_REG);
537 reg = advk_readl(pcie, PCIE_CORE_DEV_REV_REG);
540 advk_writel(pcie, reg, PCIE_CORE_DEV_REV_REG);
543 reg = advk_readl(pcie, PCIE_CORE_CMD_STATUS_REG);
545 advk_writel(pcie, reg, PCIE_CORE_CMD_STATUS_REG);
552 advk_writel(pcie, reg, PCIE_CORE_ERR_CAPCTL_REG);
555 reg = advk_readl(pcie, PCIE_CORE_PCIEXP_CAP + PCI_EXP_DEVCTL);
562 advk_writel(pcie, reg, PCIE_CORE_PCIEXP_CAP + PCI_EXP_DEVCTL);
567 advk_writel(pcie, reg, PCIE_CORE_CTRL2_REG);
570 reg = advk_readl(pcie, PCIE_CORE_CTRL0_REG);
573 advk_writel(pcie, reg, PCIE_CORE_CTRL0_REG);
576 reg = advk_readl(pcie, PCIE_CORE_CTRL2_REG);
578 advk_writel(pcie, reg, PCIE_CORE_CTRL2_REG);
581 advk_writel(pcie, PCIE_MSI_ALL_MASK, PCIE_MSI_STATUS_REG);
582 advk_writel(pcie, PCIE_ISR0_ALL_MASK, PCIE_ISR0_REG);
583 advk_writel(pcie, PCIE_ISR1_ALL_MASK, PCIE_ISR1_REG);
584 advk_writel(pcie, PCIE_IRQ_ALL_MASK, HOST_CTRL_INT_STATUS_REG);
589 advk_writel(pcie, reg, PCIE_ISR0_MASK_REG);
591 advk_writel(pcie, PCIE_ISR1_ALL_MASK, PCIE_ISR1_MASK_REG);
594 advk_writel(pcie, ~(u32)PCIE_MSI_ALL_MASK, PCIE_MSI_MASK_REG);
598 advk_writel(pcie, reg, HOST_CTRL_INT_MASK_REG);
610 reg = advk_readl(pcie, PCIE_CORE_CTRL2_REG);
612 advk_writel(pcie, reg, PCIE_CORE_CTRL2_REG);
619 advk_writel(pcie, OB_WIN_TYPE_MEM, OB_WIN_DEFAULT_ACTIONS);
627 reg = advk_readl(pcie, PIO_CTRL);
629 advk_writel(pcie, reg, PIO_CTRL);
636 for (i = 0; i < pcie->wins_count; i++)
637 advk_pcie_set_ob_win(pcie, i,
638 pcie->wins[i].match, pcie->wins[i].remap,
639 pcie->wins[i].mask, pcie->wins[i].actions);
642 for (i = pcie->wins_count; i < OB_WIN_COUNT; i++)
643 advk_pcie_disable_ob_win(pcie, i);
645 advk_pcie_train_link(pcie);
648 static int advk_pcie_check_pio_status(struct advk_pcie *pcie, bool allow_crs, u32 *val)
650 struct device *dev = &pcie->pdev->dev;
656 reg = advk_readl(pcie, PIO_STAT);
684 *val = advk_readl(pcie, PIO_RD_DATA);
750 str_posted, strcomp_status, reg, advk_readl(pcie, PIO_ADDR_LS));
755 static int advk_pcie_wait_pio(struct advk_pcie *pcie)
757 struct device *dev = &pcie->pdev->dev;
763 start = advk_readl(pcie, PIO_START);
764 isr = advk_readl(pcie, PIO_ISR);
778 struct advk_pcie *pcie = bridge->data;
782 *value = advk_readl(pcie, PCIE_CORE_CMD_STATUS_REG);
793 if (advk_readl(pcie, PCIE_CORE_CTRL1_REG) & HOT_RESET_GEN)
810 struct advk_pcie *pcie = bridge->data;
814 advk_writel(pcie, new, PCIE_CORE_CMD_STATUS_REG);
819 u32 val = advk_readl(pcie, PCIE_CORE_CTRL1_REG);
824 advk_writel(pcie, val, PCIE_CORE_CTRL1_REG);
837 struct advk_pcie *pcie = bridge->data;
846 u32 val = advk_readl(pcie, PCIE_ISR0_MASK_REG);
854 u32 isr0 = advk_readl(pcie, PCIE_ISR0_REG);
855 u32 msglog = advk_readl(pcie, PCIE_MSG_LOG_REG);
863 u32 val = advk_readl(pcie, PCIE_CORE_PCIEXP_CAP + reg);
876 u32 val = advk_readl(pcie, PCIE_CORE_PCIEXP_CAP + reg) &
878 if (advk_pcie_link_training(pcie))
880 if (advk_pcie_link_active(pcie))
888 *value = advk_readl(pcie, PCIE_CORE_PCIEXP_CAP + reg);
900 struct advk_pcie *pcie = bridge->data;
904 advk_writel(pcie, new, PCIE_CORE_PCIEXP_CAP + reg);
908 advk_writel(pcie, new, PCIE_CORE_PCIEXP_CAP + reg);
910 advk_pcie_wait_for_retrain(pcie);
915 u32 val = advk_readl(pcie, PCIE_ISR0_MASK_REG) &
919 advk_writel(pcie, val, PCIE_ISR0_MASK_REG);
925 advk_writel(pcie, new, PCIE_ISR0_REG);
944 static int advk_sw_pci_bridge_init(struct advk_pcie *pcie)
946 struct pci_bridge_emul *bridge = &pcie->bridge;
949 cpu_to_le16(advk_readl(pcie, PCIE_CORE_DEV_ID_REG) & 0xffff);
951 cpu_to_le16(advk_readl(pcie, PCIE_CORE_DEV_ID_REG) >> 16);
953 cpu_to_le32(advk_readl(pcie, PCIE_CORE_DEV_REV_REG) & 0xff);
973 bridge->data = pcie;
979 static bool advk_pcie_valid_device(struct advk_pcie *pcie, struct pci_bus *bus,
989 if (!pci_is_root_bus(bus) && !advk_pcie_link_up(pcie))
995 static bool advk_pcie_pio_is_running(struct advk_pcie *pcie)
997 struct device *dev = &pcie->pdev->dev;
1016 if (advk_readl(pcie, PIO_START)) {
1027 struct advk_pcie *pcie = bus->sysdata;
1033 if (!advk_pcie_valid_device(pcie, bus, devfn)) {
1039 return pci_bridge_emul_conf_read(&pcie->bridge, where,
1048 (le16_to_cpu(pcie->bridge.pcie_conf.rootctl) &
1051 if (advk_pcie_pio_is_running(pcie))
1055 reg = advk_readl(pcie, PIO_CTRL);
1061 advk_writel(pcie, reg, PIO_CTRL);
1065 advk_writel(pcie, reg, PIO_ADDR_LS);
1066 advk_writel(pcie, 0, PIO_ADDR_MS);
1069 advk_writel(pcie, 0xf, PIO_WR_DATA_STRB);
1074 advk_writel(pcie, 1, PIO_ISR);
1075 advk_writel(pcie, 1, PIO_START);
1077 ret = advk_pcie_wait_pio(pcie);
1084 ret = advk_pcie_check_pio_status(pcie, allow_crs, val);
1115 struct advk_pcie *pcie = bus->sysdata;
1122 if (!advk_pcie_valid_device(pcie, bus, devfn))
1126 return pci_bridge_emul_conf_write(&pcie->bridge, where,
1132 if (advk_pcie_pio_is_running(pcie))
1136 reg = advk_readl(pcie, PIO_CTRL);
1142 advk_writel(pcie, reg, PIO_CTRL);
1146 advk_writel(pcie, reg, PIO_ADDR_LS);
1147 advk_writel(pcie, 0, PIO_ADDR_MS);
1155 advk_writel(pcie, reg, PIO_WR_DATA);
1158 advk_writel(pcie, data_strobe, PIO_WR_DATA_STRB);
1163 advk_writel(pcie, 1, PIO_ISR);
1164 advk_writel(pcie, 1, PIO_START);
1166 ret = advk_pcie_wait_pio(pcie);
1172 ret = advk_pcie_check_pio_status(pcie, false, NULL);
1186 struct advk_pcie *pcie = irq_data_get_irq_chip_data(data);
1187 phys_addr_t msi_msg = virt_to_phys(&pcie->msi_msg);
1204 struct advk_pcie *pcie = domain->host_data;
1207 mutex_lock(&pcie->msi_used_lock);
1208 hwirq = bitmap_find_free_region(pcie->msi_used, MSI_IRQ_NUM,
1210 mutex_unlock(&pcie->msi_used_lock);
1216 &pcie->msi_bottom_irq_chip,
1227 struct advk_pcie *pcie = domain->host_data;
1229 mutex_lock(&pcie->msi_used_lock);
1230 bitmap_release_region(pcie->msi_used, d->hwirq, order_base_2(nr_irqs));
1231 mutex_unlock(&pcie->msi_used_lock);
1241 struct advk_pcie *pcie = d->domain->host_data;
1246 raw_spin_lock_irqsave(&pcie->irq_lock, flags);
1247 mask = advk_readl(pcie, PCIE_ISR1_MASK_REG);
1249 advk_writel(pcie, mask, PCIE_ISR1_MASK_REG);
1250 raw_spin_unlock_irqrestore(&pcie->irq_lock, flags);
1255 struct advk_pcie *pcie = d->domain->host_data;
1260 raw_spin_lock_irqsave(&pcie->irq_lock, flags);
1261 mask = advk_readl(pcie, PCIE_ISR1_MASK_REG);
1263 advk_writel(pcie, mask, PCIE_ISR1_MASK_REG);
1264 raw_spin_unlock_irqrestore(&pcie->irq_lock, flags);
1270 struct advk_pcie *pcie = h->host_data;
1274 irq_set_chip_and_handler(virq, &pcie->irq_chip,
1276 irq_set_chip_data(virq, pcie);
1286 static int advk_pcie_init_msi_irq_domain(struct advk_pcie *pcie)
1288 struct device *dev = &pcie->pdev->dev;
1294 mutex_init(&pcie->msi_used_lock);
1296 bottom_ic = &pcie->msi_bottom_irq_chip;
1302 msi_ic = &pcie->msi_irq_chip;
1305 msi_di = &pcie->msi_domain_info;
1310 msi_msg_phys = virt_to_phys(&pcie->msi_msg);
1312 advk_writel(pcie, lower_32_bits(msi_msg_phys),
1314 advk_writel(pcie, upper_32_bits(msi_msg_phys),
1317 pcie->msi_inner_domain =
1319 &advk_msi_domain_ops, pcie);
1320 if (!pcie->msi_inner_domain)
1323 pcie->msi_domain =
1325 msi_di, pcie->msi_inner_domain);
1326 if (!pcie->msi_domain) {
1327 irq_domain_remove(pcie->msi_inner_domain);
1334 static void advk_pcie_remove_msi_irq_domain(struct advk_pcie *pcie)
1336 irq_domain_remove(pcie->msi_domain);
1337 irq_domain_remove(pcie->msi_inner_domain);
1340 static int advk_pcie_init_irq_domain(struct advk_pcie *pcie)
1342 struct device *dev = &pcie->pdev->dev;
1348 raw_spin_lock_init(&pcie->irq_lock);
1356 irq_chip = &pcie->irq_chip;
1369 pcie->irq_domain =
1371 &advk_pcie_irq_domain_ops, pcie);
1372 if (!pcie->irq_domain) {
1383 static void advk_pcie_remove_irq_domain(struct advk_pcie *pcie)
1385 irq_domain_remove(pcie->irq_domain);
1388 static void advk_pcie_handle_msi(struct advk_pcie *pcie)
1393 msi_mask = advk_readl(pcie, PCIE_MSI_MASK_REG);
1394 msi_val = advk_readl(pcie, PCIE_MSI_STATUS_REG);
1401 advk_writel(pcie, BIT(msi_idx), PCIE_MSI_STATUS_REG);
1402 virq = irq_find_mapping(pcie->msi_inner_domain, msi_idx);
1406 advk_writel(pcie, PCIE_ISR0_MSI_INT_PENDING,
1410 static void advk_pcie_handle_int(struct advk_pcie *pcie)
1416 isr0_val = advk_readl(pcie, PCIE_ISR0_REG);
1417 isr0_mask = advk_readl(pcie, PCIE_ISR0_MASK_REG);
1420 isr1_val = advk_readl(pcie, PCIE_ISR1_REG);
1421 isr1_mask = advk_readl(pcie, PCIE_ISR1_MASK_REG);
1426 advk_pcie_handle_msi(pcie);
1433 advk_writel(pcie, PCIE_ISR1_INTX_ASSERT(i),
1436 virq = irq_find_mapping(pcie->irq_domain, i);
1443 struct advk_pcie *pcie = arg;
1446 status = advk_readl(pcie, HOST_CTRL_INT_STATUS_REG);
1450 advk_pcie_handle_int(pcie);
1453 advk_writel(pcie, PCIE_IRQ_CORE_INT, HOST_CTRL_INT_STATUS_REG);
1458 static void __maybe_unused advk_pcie_disable_phy(struct advk_pcie *pcie)
1460 phy_power_off(pcie->phy);
1461 phy_exit(pcie->phy);
1464 static int advk_pcie_enable_phy(struct advk_pcie *pcie)
1468 if (!pcie->phy)
1471 ret = phy_init(pcie->phy);
1475 ret = phy_set_mode(pcie->phy, PHY_MODE_PCIE);
1477 phy_exit(pcie->phy);
1481 ret = phy_power_on(pcie->phy);
1483 dev_warn(&pcie->pdev->dev, "PHY unsupported by firmware\n");
1485 phy_exit(pcie->phy);
1492 static int advk_pcie_setup_phy(struct advk_pcie *pcie)
1494 struct device *dev = &pcie->pdev->dev;
1498 pcie->phy = devm_of_phy_get(dev, node, NULL);
1499 if (IS_ERR(pcie->phy) && (PTR_ERR(pcie->phy) == -EPROBE_DEFER))
1500 return PTR_ERR(pcie->phy);
1503 if (IS_ERR(pcie->phy)) {
1504 dev_warn(dev, "PHY unavailable (%ld)\n", PTR_ERR(pcie->phy));
1505 pcie->phy = NULL;
1509 ret = advk_pcie_enable_phy(pcie);
1519 struct advk_pcie *pcie;
1528 pcie = pci_host_bridge_priv(bridge);
1529 pcie->pdev = pdev;
1530 platform_set_drvdata(pdev, pcie);
1566 while (pcie->wins_count < OB_WIN_COUNT && size > 0) {
1576 pcie->wins_count, (unsigned long long)start,
1580 pcie->wins[pcie->wins_count].actions = OB_WIN_TYPE_IO;
1581 pcie->wins[pcie->wins_count].match = pci_pio_to_address(start);
1583 pcie->wins[pcie->wins_count].actions = OB_WIN_TYPE_MEM;
1584 pcie->wins[pcie->wins_count].match = start;
1586 pcie->wins[pcie->wins_count].remap = start - entry->offset;
1587 pcie->wins[pcie->wins_count].mask = ~(win_size - 1);
1589 if (pcie->wins[pcie->wins_count].remap & (win_size - 1))
1594 pcie->wins_count++;
1598 dev_err(&pcie->pdev->dev,
1606 pcie->base = devm_platform_ioremap_resource(pdev, 0);
1607 if (IS_ERR(pcie->base))
1608 return PTR_ERR(pcie->base);
1615 IRQF_SHARED | IRQF_NO_THREAD, "advk-pcie",
1616 pcie);
1622 pcie->reset_gpio = devm_gpiod_get_from_of_node(dev, dev->of_node,
1626 ret = PTR_ERR_OR_ZERO(pcie->reset_gpio);
1629 pcie->reset_gpio = NULL;
1640 pcie->link_gen = 3;
1642 pcie->link_gen = ret;
1644 ret = advk_pcie_setup_phy(pcie);
1648 advk_pcie_setup_hw(pcie);
1650 ret = advk_sw_pci_bridge_init(pcie);
1656 ret = advk_pcie_init_irq_domain(pcie);
1662 ret = advk_pcie_init_msi_irq_domain(pcie);
1665 advk_pcie_remove_irq_domain(pcie);
1669 bridge->sysdata = pcie;
1674 advk_pcie_remove_msi_irq_domain(pcie);
1675 advk_pcie_remove_irq_domain(pcie);
1684 struct advk_pcie *pcie = platform_get_drvdata(pdev);
1685 struct pci_host_bridge *bridge = pci_host_bridge_from_priv(pcie);
1693 advk_pcie_remove_msi_irq_domain(pcie);
1694 advk_pcie_remove_irq_domain(pcie);
1698 advk_pcie_disable_ob_win(pcie, i);
1704 { .compatible = "marvell,armada-3700-pcie", },
1711 .name = "advk-pcie",