Lines Matching defs:adev

908 static u32 si_pcie_rreg(struct amdgpu_device *adev, u32 reg)
913 spin_lock_irqsave(&adev->pcie_idx_lock, flags);
917 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
921 static void si_pcie_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
925 spin_lock_irqsave(&adev->pcie_idx_lock, flags);
930 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
933 static u32 si_pciep_rreg(struct amdgpu_device *adev, u32 reg)
938 spin_lock_irqsave(&adev->pcie_idx_lock, flags);
942 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
946 static void si_pciep_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
950 spin_lock_irqsave(&adev->pcie_idx_lock, flags);
955 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
958 static u32 si_smc_rreg(struct amdgpu_device *adev, u32 reg)
963 spin_lock_irqsave(&adev->smc_idx_lock, flags);
966 spin_unlock_irqrestore(&adev->smc_idx_lock, flags);
970 static void si_smc_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
974 spin_lock_irqsave(&adev->smc_idx_lock, flags);
977 spin_unlock_irqrestore(&adev->smc_idx_lock, flags);
980 static u32 si_uvd_ctx_rreg(struct amdgpu_device *adev, u32 reg)
985 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags);
988 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags);
992 static void si_uvd_ctx_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
996 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags);
999 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags);
1054 static uint32_t si_get_register_value(struct amdgpu_device *adev,
1065 return adev->gfx.config.rb_config[se_idx][sh_idx].rb_backend_disable;
1067 return adev->gfx.config.rb_config[se_idx][sh_idx].user_rb_backend_disable;
1069 return adev->gfx.config.rb_config[se_idx][sh_idx].raster_config;
1072 mutex_lock(&adev->grbm_idx_mutex);
1074 amdgpu_gfx_select_se_sh(adev, se_num, sh_num, 0xffffffff);
1079 amdgpu_gfx_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff);
1080 mutex_unlock(&adev->grbm_idx_mutex);
1087 return adev->gfx.config.gb_addr_config;
1089 return adev->gfx.config.mc_arb_ramcfg;
1123 return adev->gfx.config.tile_mode_array[idx];
1129 static int si_read_register(struct amdgpu_device *adev, u32 se_num,
1141 *value = si_get_register_value(adev, indexed, se_num, sh_num,
1148 static bool si_read_disabled_bios(struct amdgpu_device *adev)
1158 if (adev->mode_info.num_crtc) {
1167 if (adev->mode_info.num_crtc) {
1180 r = amdgpu_read_bios(adev);
1184 if (adev->mode_info.num_crtc) {
1196 static bool si_read_bios_from_rom(struct amdgpu_device *adev,
1207 if (adev->flags & AMD_IS_APU)
1220 static void si_set_clk_bypass_mode(struct amdgpu_device *adev)
1232 for (i = 0; i < adev->usec_timeout; i++) {
1247 static void si_spll_powerdown(struct amdgpu_device *adev)
1268 static int si_gpu_pci_config_reset(struct amdgpu_device *adev)
1273 dev_info(adev->dev, "GPU pci config reset\n");
1276 si_set_clk_bypass_mode(adev);
1278 si_spll_powerdown(adev);
1280 pci_clear_master(adev->pdev);
1282 amdgpu_device_pci_config_reset(adev);
1287 for (i = 0; i < adev->usec_timeout; i++) {
1290 pci_set_master(adev->pdev);
1291 adev->has_hw_reset = true;
1301 static int si_asic_reset(struct amdgpu_device *adev)
1305 dev_info(adev->dev, "PCI CONFIG reset\n");
1307 amdgpu_atombios_scratch_regs_engine_hung(adev, true);
1309 r = si_gpu_pci_config_reset(adev);
1311 amdgpu_atombios_scratch_regs_engine_hung(adev, false);
1316 static bool si_asic_supports_baco(struct amdgpu_device *adev)
1322 si_asic_reset_method(struct amdgpu_device *adev)
1326 dev_warn(adev->dev, "Specified reset method:%d isn't supported, using AUTO instead.\n",
1332 static u32 si_get_config_memsize(struct amdgpu_device *adev)
1337 static void si_vga_set_state(struct amdgpu_device *adev, bool state)
1351 static u32 si_get_xclk(struct amdgpu_device *adev)
1353 u32 reference_clock = adev->clock.spll.reference_freq;
1367 static void si_flush_hdp(struct amdgpu_device *adev, struct amdgpu_ring *ring)
1377 static void si_invalidate_hdp(struct amdgpu_device *adev,
1388 static bool si_need_full_reset(struct amdgpu_device *adev)
1394 static bool si_need_reset_on_init(struct amdgpu_device *adev)
1399 static int si_get_pcie_lanes(struct amdgpu_device *adev)
1403 if (adev->flags & AMD_IS_APU)
1424 static void si_set_pcie_lanes(struct amdgpu_device *adev, int lanes)
1428 if (adev->flags & AMD_IS_APU)
1464 static void si_get_pcie_usage(struct amdgpu_device *adev, uint64_t *count0,
1474 if (adev->flags & AMD_IS_APU)
1510 static uint64_t si_get_pcie_replay_count(struct amdgpu_device *adev)
1522 static int si_uvd_send_upll_ctlreq(struct amdgpu_device *adev,
1580 * @adev: amdgpu_device pointer
1597 static int si_calc_upll_dividers(struct amdgpu_device *adev,
1607 unsigned vco_freq, ref_freq = adev->clock.spll.reference_freq;
1659 static int si_set_uvd_clocks(struct amdgpu_device *adev, u32 vclk, u32 dclk)
1677 r = si_calc_upll_dividers(adev, vclk, dclk, 125000, 250000,
1697 r = si_uvd_send_upll_ctlreq(adev, CG_UPLL_FUNC_CNTL);
1736 r = si_uvd_send_upll_ctlreq(adev, CG_UPLL_FUNC_CNTL);
1750 static int si_vce_send_vcepll_ctlreq(struct amdgpu_device *adev)
1782 static int si_set_vce_clocks(struct amdgpu_device *adev, u32 evclk, u32 ecclk)
1803 r = si_calc_upll_dividers(adev, evclk, ecclk, 125000, 250000,
1826 r = si_vce_send_vcepll_ctlreq(adev);
1860 r = si_vce_send_vcepll_ctlreq(adev);
1874 static void si_pre_asic_init(struct amdgpu_device *adev)
1902 static uint32_t si_get_rev_id(struct amdgpu_device *adev)
1910 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
1912 adev->smc_rreg = &si_smc_rreg;
1913 adev->smc_wreg = &si_smc_wreg;
1914 adev->pcie_rreg = &si_pcie_rreg;
1915 adev->pcie_wreg = &si_pcie_wreg;
1916 adev->pciep_rreg = &si_pciep_rreg;
1917 adev->pciep_wreg = &si_pciep_wreg;
1918 adev->uvd_ctx_rreg = si_uvd_ctx_rreg;
1919 adev->uvd_ctx_wreg = si_uvd_ctx_wreg;
1920 adev->didt_rreg = NULL;
1921 adev->didt_wreg = NULL;
1923 adev->asic_funcs = &si_asic_funcs;
1925 adev->rev_id = si_get_rev_id(adev);
1926 adev->external_rev_id = 0xFF;
1927 switch (adev->asic_type) {
1929 adev->cg_flags =
1943 adev->pg_flags = 0;
1944 adev->external_rev_id = (adev->rev_id == 0) ? 1 :
1945 (adev->rev_id == 1) ? 5 : 6;
1948 adev->cg_flags =
1964 adev->pg_flags = 0;
1965 adev->external_rev_id = adev->rev_id + 20;
1969 adev->cg_flags =
1985 adev->pg_flags = 0;
1987 adev->external_rev_id = adev->rev_id + 40;
1990 adev->cg_flags =
2005 adev->pg_flags = 0;
2006 adev->external_rev_id = 60;
2009 adev->cg_flags =
2023 adev->pg_flags = 0;
2024 adev->external_rev_id = 70;
2045 static void si_init_golden_registers(struct amdgpu_device *adev)
2047 switch (adev->asic_type) {
2049 amdgpu_device_program_register_sequence(adev,
2052 amdgpu_device_program_register_sequence(adev,
2055 amdgpu_device_program_register_sequence(adev,
2058 amdgpu_device_program_register_sequence(adev,
2063 amdgpu_device_program_register_sequence(adev,
2066 amdgpu_device_program_register_sequence(adev,
2069 amdgpu_device_program_register_sequence(adev,
2074 amdgpu_device_program_register_sequence(adev,
2077 amdgpu_device_program_register_sequence(adev,
2080 amdgpu_device_program_register_sequence(adev,
2083 amdgpu_device_program_register_sequence(adev,
2088 amdgpu_device_program_register_sequence(adev,
2091 amdgpu_device_program_register_sequence(adev,
2094 amdgpu_device_program_register_sequence(adev,
2099 amdgpu_device_program_register_sequence(adev,
2102 amdgpu_device_program_register_sequence(adev,
2105 amdgpu_device_program_register_sequence(adev,
2116 static void si_pcie_gen3_enable(struct amdgpu_device *adev)
2118 struct pci_dev *root = adev->pdev->bus->self;
2123 if (pci_is_root_bus(adev->pdev->bus))
2129 if (adev->flags & AMD_IS_APU)
2132 if (!(adev->pm.pcie_gen_mask & (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2 |
2139 if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3) {
2145 } else if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2) {
2153 if (!pci_is_pcie(root) || !pci_is_pcie(adev->pdev))
2156 if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3) {
2163 pcie_capability_set_word(adev->pdev, PCI_EXP_LNKCTL, PCI_EXP_LNKCTL_HAWD);
2180 pcie_capability_read_word(adev->pdev,
2188 pcie_capability_read_word(adev->pdev,
2194 pcie_capability_read_word(adev->pdev,
2212 pcie_capability_clear_and_set_word(adev->pdev, PCI_EXP_LNKCTL,
2228 pcie_capability_read_word(adev->pdev,
2236 pcie_capability_write_word(adev->pdev,
2251 pcie_capability_read_word(adev->pdev, PCI_EXP_LNKCTL2, &tmp16);
2254 if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3)
2256 else if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2)
2260 pcie_capability_write_word(adev->pdev, PCI_EXP_LNKCTL2, tmp16);
2266 for (i = 0; i < adev->usec_timeout; i++) {
2274 static inline u32 si_pif_phy0_rreg(struct amdgpu_device *adev, u32 reg)
2279 spin_lock_irqsave(&adev->pcie_idx_lock, flags);
2282 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
2286 static inline void si_pif_phy0_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
2290 spin_lock_irqsave(&adev->pcie_idx_lock, flags);
2293 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
2296 static inline u32 si_pif_phy1_rreg(struct amdgpu_device *adev, u32 reg)
2301 spin_lock_irqsave(&adev->pcie_idx_lock, flags);
2304 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
2308 static inline void si_pif_phy1_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
2312 spin_lock_irqsave(&adev->pcie_idx_lock, flags);
2315 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
2317 static void si_program_aspm(struct amdgpu_device *adev)
2326 if (adev->flags & AMD_IS_APU)
2359 orig = data = si_pif_phy0_rreg(adev,PB0_PIF_PWRDOWN_0);
2363 si_pif_phy0_wreg(adev,PB0_PIF_PWRDOWN_0, data);
2365 orig = data = si_pif_phy0_rreg(adev,PB0_PIF_PWRDOWN_1);
2369 si_pif_phy0_wreg(adev,PB0_PIF_PWRDOWN_1, data);
2371 orig = data = si_pif_phy1_rreg(adev,PB1_PIF_PWRDOWN_0);
2375 si_pif_phy1_wreg(adev,PB1_PIF_PWRDOWN_0, data);
2377 orig = data = si_pif_phy1_rreg(adev,PB1_PIF_PWRDOWN_1);
2381 si_pif_phy1_wreg(adev,PB1_PIF_PWRDOWN_1, data);
2383 if ((adev->asic_type != CHIP_OLAND) && (adev->asic_type != CHIP_HAINAN)) {
2384 orig = data = si_pif_phy0_rreg(adev,PB0_PIF_PWRDOWN_0);
2387 si_pif_phy0_wreg(adev,PB0_PIF_PWRDOWN_0, data);
2389 orig = data = si_pif_phy0_rreg(adev,PB0_PIF_PWRDOWN_1);
2392 si_pif_phy0_wreg(adev,PB0_PIF_PWRDOWN_1, data);
2394 orig = data = si_pif_phy0_rreg(adev,PB0_PIF_PWRDOWN_2);
2397 si_pif_phy0_wreg(adev,PB0_PIF_PWRDOWN_2, data);
2399 orig = data = si_pif_phy0_rreg(adev,PB0_PIF_PWRDOWN_3);
2402 si_pif_phy0_wreg(adev,PB0_PIF_PWRDOWN_3, data);
2404 orig = data = si_pif_phy1_rreg(adev,PB1_PIF_PWRDOWN_0);
2407 si_pif_phy1_wreg(adev,PB1_PIF_PWRDOWN_0, data);
2409 orig = data = si_pif_phy1_rreg(adev,PB1_PIF_PWRDOWN_1);
2412 si_pif_phy1_wreg(adev,PB1_PIF_PWRDOWN_1, data);
2414 orig = data = si_pif_phy1_rreg(adev,PB1_PIF_PWRDOWN_2);
2417 si_pif_phy1_wreg(adev,PB1_PIF_PWRDOWN_2, data);
2419 orig = data = si_pif_phy1_rreg(adev,PB1_PIF_PWRDOWN_3);
2422 si_pif_phy1_wreg(adev,PB1_PIF_PWRDOWN_3, data);
2430 orig = data = si_pif_phy0_rreg(adev,PB0_PIF_CNTL);
2432 if ((adev->asic_type == CHIP_OLAND) || (adev->asic_type == CHIP_HAINAN))
2435 si_pif_phy0_wreg(adev,PB0_PIF_CNTL, data);
2437 orig = data = si_pif_phy1_rreg(adev,PB1_PIF_CNTL);
2439 if ((adev->asic_type == CHIP_OLAND) || (adev->asic_type == CHIP_HAINAN))
2442 si_pif_phy1_wreg(adev,PB1_PIF_CNTL, data);
2445 !pci_is_root_bus(adev->pdev->bus)) {
2446 struct pci_dev *root = adev->pdev->bus->self;
2521 static void si_fix_pci_max_read_req_size(struct amdgpu_device *adev)
2526 readrq = pcie_get_readrq(adev->pdev);
2529 pcie_set_readrq(adev->pdev, 512);
2534 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2536 si_fix_pci_max_read_req_size(adev);
2537 si_init_golden_registers(adev);
2538 si_pcie_gen3_enable(adev);
2539 si_program_aspm(adev);
2551 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2553 return si_common_hw_fini(adev);
2558 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2560 return si_common_hw_init(adev);
2616 int si_set_ip_blocks(struct amdgpu_device *adev)
2618 switch (adev->asic_type) {
2622 amdgpu_device_ip_block_add(adev, &si_common_ip_block);
2623 amdgpu_device_ip_block_add(adev, &gmc_v6_0_ip_block);
2624 amdgpu_device_ip_block_add(adev, &si_ih_ip_block);
2625 amdgpu_device_ip_block_add(adev, &gfx_v6_0_ip_block);
2626 amdgpu_device_ip_block_add(adev, &si_dma_ip_block);
2627 amdgpu_device_ip_block_add(adev, &si_smu_ip_block);
2628 if (adev->enable_virtual_display)
2629 amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
2631 else if (amdgpu_device_has_dc_support(adev))
2632 amdgpu_device_ip_block_add(adev, &dm_ip_block);
2635 amdgpu_device_ip_block_add(adev, &dce_v6_0_ip_block);
2636 amdgpu_device_ip_block_add(adev, &uvd_v3_1_ip_block);
2637 /* amdgpu_device_ip_block_add(adev, &vce_v1_0_ip_block); */
2640 amdgpu_device_ip_block_add(adev, &si_common_ip_block);
2641 amdgpu_device_ip_block_add(adev, &gmc_v6_0_ip_block);
2642 amdgpu_device_ip_block_add(adev, &si_ih_ip_block);
2643 amdgpu_device_ip_block_add(adev, &gfx_v6_0_ip_block);
2644 amdgpu_device_ip_block_add(adev, &si_dma_ip_block);
2645 amdgpu_device_ip_block_add(adev, &si_smu_ip_block);
2646 if (adev->enable_virtual_display)
2647 amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
2649 else if (amdgpu_device_has_dc_support(adev))
2650 amdgpu_device_ip_block_add(adev, &dm_ip_block);
2653 amdgpu_device_ip_block_add(adev, &dce_v6_4_ip_block);
2654 amdgpu_device_ip_block_add(adev, &uvd_v3_1_ip_block);
2655 /* amdgpu_device_ip_block_add(adev, &vce_v1_0_ip_block); */
2658 amdgpu_device_ip_block_add(adev, &si_common_ip_block);
2659 amdgpu_device_ip_block_add(adev, &gmc_v6_0_ip_block);
2660 amdgpu_device_ip_block_add(adev, &si_ih_ip_block);
2661 amdgpu_device_ip_block_add(adev, &gfx_v6_0_ip_block);
2662 amdgpu_device_ip_block_add(adev, &si_dma_ip_block);
2663 amdgpu_device_ip_block_add(adev, &si_smu_ip_block);
2664 if (adev->enable_virtual_display)
2665 amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);