Lines Matching defs:adev
76 static u32 cik_pcie_rreg(struct amdgpu_device *adev, u32 reg)
81 spin_lock_irqsave(&adev->pcie_idx_lock, flags);
85 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
89 static void cik_pcie_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
93 spin_lock_irqsave(&adev->pcie_idx_lock, flags);
98 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
101 static u32 cik_smc_rreg(struct amdgpu_device *adev, u32 reg)
106 spin_lock_irqsave(&adev->smc_idx_lock, flags);
109 spin_unlock_irqrestore(&adev->smc_idx_lock, flags);
113 static void cik_smc_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
117 spin_lock_irqsave(&adev->smc_idx_lock, flags);
120 spin_unlock_irqrestore(&adev->smc_idx_lock, flags);
123 static u32 cik_uvd_ctx_rreg(struct amdgpu_device *adev, u32 reg)
128 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags);
131 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags);
135 static void cik_uvd_ctx_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
139 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags);
142 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags);
145 static u32 cik_didt_rreg(struct amdgpu_device *adev, u32 reg)
150 spin_lock_irqsave(&adev->didt_idx_lock, flags);
153 spin_unlock_irqrestore(&adev->didt_idx_lock, flags);
157 static void cik_didt_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
161 spin_lock_irqsave(&adev->didt_idx_lock, flags);
164 spin_unlock_irqrestore(&adev->didt_idx_lock, flags);
751 static void cik_init_golden_registers(struct amdgpu_device *adev)
754 mutex_lock(&adev->grbm_idx_mutex);
756 switch (adev->asic_type) {
758 amdgpu_device_program_register_sequence(adev,
761 amdgpu_device_program_register_sequence(adev,
764 amdgpu_device_program_register_sequence(adev,
767 amdgpu_device_program_register_sequence(adev,
772 amdgpu_device_program_register_sequence(adev,
775 amdgpu_device_program_register_sequence(adev,
778 amdgpu_device_program_register_sequence(adev,
781 amdgpu_device_program_register_sequence(adev,
786 amdgpu_device_program_register_sequence(adev,
789 amdgpu_device_program_register_sequence(adev,
792 amdgpu_device_program_register_sequence(adev,
795 amdgpu_device_program_register_sequence(adev,
800 amdgpu_device_program_register_sequence(adev,
803 amdgpu_device_program_register_sequence(adev,
806 amdgpu_device_program_register_sequence(adev,
809 amdgpu_device_program_register_sequence(adev,
814 amdgpu_device_program_register_sequence(adev,
817 amdgpu_device_program_register_sequence(adev,
820 amdgpu_device_program_register_sequence(adev,
823 amdgpu_device_program_register_sequence(adev,
830 mutex_unlock(&adev->grbm_idx_mutex);
836 * @adev: amdgpu_device pointer
841 static u32 cik_get_xclk(struct amdgpu_device *adev)
843 u32 reference_clock = adev->clock.spll.reference_freq;
845 if (adev->flags & AMD_IS_APU) {
858 * @adev: amdgpu_device pointer
868 void cik_srbm_select(struct amdgpu_device *adev,
879 static void cik_vga_set_state(struct amdgpu_device *adev, bool state)
891 static bool cik_read_disabled_bios(struct amdgpu_device *adev)
901 if (adev->mode_info.num_crtc) {
910 if (adev->mode_info.num_crtc) {
923 r = amdgpu_read_bios(adev);
927 if (adev->mode_info.num_crtc) {
936 static bool cik_read_bios_from_rom(struct amdgpu_device *adev,
948 if (adev->flags & AMD_IS_APU)
954 spin_lock_irqsave(&adev->smc_idx_lock, flags);
962 spin_unlock_irqrestore(&adev->smc_idx_lock, flags);
1046 static uint32_t cik_get_register_value(struct amdgpu_device *adev,
1057 return adev->gfx.config.rb_config[se_idx][sh_idx].rb_backend_disable;
1059 return adev->gfx.config.rb_config[se_idx][sh_idx].user_rb_backend_disable;
1061 return adev->gfx.config.rb_config[se_idx][sh_idx].raster_config;
1063 return adev->gfx.config.rb_config[se_idx][sh_idx].raster_config_1;
1066 mutex_lock(&adev->grbm_idx_mutex);
1068 amdgpu_gfx_select_se_sh(adev, se_num, sh_num, 0xffffffff);
1073 amdgpu_gfx_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff);
1074 mutex_unlock(&adev->grbm_idx_mutex);
1081 return adev->gfx.config.gb_addr_config;
1083 return adev->gfx.config.mc_arb_ramcfg;
1117 return adev->gfx.config.tile_mode_array[idx];
1135 return adev->gfx.config.macrotile_mode_array[idx];
1142 static int cik_read_register(struct amdgpu_device *adev, u32 se_num,
1154 *value = cik_get_register_value(adev, indexed, se_num, sh_num,
1167 static void kv_save_regs_for_reset(struct amdgpu_device *adev,
1181 static void kv_restore_regs_for_reset(struct amdgpu_device *adev,
1254 static int cik_gpu_pci_config_reset(struct amdgpu_device *adev)
1260 dev_info(adev->dev, "GPU pci config reset\n");
1262 if (adev->flags & AMD_IS_APU)
1263 kv_save_regs_for_reset(adev, &kv_save);
1266 pci_clear_master(adev->pdev);
1268 amdgpu_device_pci_config_reset(adev);
1273 for (i = 0; i < adev->usec_timeout; i++) {
1276 pci_set_master(adev->pdev);
1277 adev->has_hw_reset = true;
1285 if (adev->flags & AMD_IS_APU)
1286 kv_restore_regs_for_reset(adev, &kv_save);
1294 * @adev: amdgpu_device pointer
1300 static int cik_asic_pci_config_reset(struct amdgpu_device *adev)
1304 amdgpu_atombios_scratch_regs_engine_hung(adev, true);
1306 r = cik_gpu_pci_config_reset(adev);
1308 amdgpu_atombios_scratch_regs_engine_hung(adev, false);
1313 static bool cik_asic_supports_baco(struct amdgpu_device *adev)
1315 switch (adev->asic_type) {
1318 return amdgpu_dpm_is_baco_supported(adev);
1325 cik_asic_reset_method(struct amdgpu_device *adev)
1334 dev_warn(adev->dev, "Specified reset:%d isn't supported, using AUTO instead.\n",
1337 switch (adev->asic_type) {
1340 /* smu7_asic_get_baco_capability(adev, &baco_reset); */
1344 baco_reset = cik_asic_supports_baco(adev);
1360 * @adev: amdgpu_device pointer
1366 static int cik_asic_reset(struct amdgpu_device *adev)
1370 if (cik_asic_reset_method(adev) == AMD_RESET_METHOD_BACO) {
1371 dev_info(adev->dev, "BACO reset\n");
1372 r = amdgpu_dpm_baco_reset(adev);
1374 dev_info(adev->dev, "PCI CONFIG reset\n");
1375 r = cik_asic_pci_config_reset(adev);
1381 static u32 cik_get_config_memsize(struct amdgpu_device *adev)
1386 static int cik_set_uvd_clock(struct amdgpu_device *adev, u32 clock,
1393 r = amdgpu_atombios_get_clock_dividers(adev,
1416 static int cik_set_uvd_clocks(struct amdgpu_device *adev, u32 vclk, u32 dclk)
1420 r = cik_set_uvd_clock(adev, vclk, ixCG_VCLK_CNTL, ixCG_VCLK_STATUS);
1424 r = cik_set_uvd_clock(adev, dclk, ixCG_DCLK_CNTL, ixCG_DCLK_STATUS);
1428 static int cik_set_vce_clocks(struct amdgpu_device *adev, u32 evclk, u32 ecclk)
1434 r = amdgpu_atombios_get_clock_dividers(adev,
1465 static void cik_pcie_gen3_enable(struct amdgpu_device *adev)
1467 struct pci_dev *root = adev->pdev->bus->self;
1472 if (pci_is_root_bus(adev->pdev->bus))
1478 if (adev->flags & AMD_IS_APU)
1481 if (!(adev->pm.pcie_gen_mask & (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2 |
1488 if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3) {
1494 } else if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2) {
1502 if (!pci_is_pcie(root) || !pci_is_pcie(adev->pdev))
1505 if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3) {
1513 pcie_capability_set_word(adev->pdev, PCI_EXP_LNKCTL, PCI_EXP_LNKCTL_HAWD);
1537 pcie_capability_read_word(adev->pdev,
1545 pcie_capability_read_word(adev->pdev,
1551 pcie_capability_read_word(adev->pdev,
1570 pcie_capability_clear_and_set_word(adev->pdev, PCI_EXP_LNKCTL,
1587 pcie_capability_read_word(adev->pdev,
1595 pcie_capability_write_word(adev->pdev,
1612 pcie_capability_read_word(adev->pdev, PCI_EXP_LNKCTL2, &tmp16);
1615 if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3)
1617 else if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2)
1621 pcie_capability_write_word(adev->pdev, PCI_EXP_LNKCTL2, tmp16);
1627 for (i = 0; i < adev->usec_timeout; i++) {
1635 static void cik_program_aspm(struct amdgpu_device *adev)
1644 if (pci_is_root_bus(adev->pdev->bus))
1648 if (adev->flags & AMD_IS_APU)
1723 struct pci_dev *root = adev->pdev->bus->self;
1802 static uint32_t cik_get_rev_id(struct amdgpu_device *adev)
1808 static void cik_flush_hdp(struct amdgpu_device *adev, struct amdgpu_ring *ring)
1818 static void cik_invalidate_hdp(struct amdgpu_device *adev,
1829 static bool cik_need_full_reset(struct amdgpu_device *adev)
1835 static void cik_get_pcie_usage(struct amdgpu_device *adev, uint64_t *count0,
1845 if (adev->flags & AMD_IS_APU)
1881 static bool cik_need_reset_on_init(struct amdgpu_device *adev)
1885 if (adev->flags & AMD_IS_APU)
1898 static uint64_t cik_get_pcie_replay_count(struct amdgpu_device *adev)
1910 static void cik_pre_asic_init(struct amdgpu_device *adev)
1939 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
1941 adev->smc_rreg = &cik_smc_rreg;
1942 adev->smc_wreg = &cik_smc_wreg;
1943 adev->pcie_rreg = &cik_pcie_rreg;
1944 adev->pcie_wreg = &cik_pcie_wreg;
1945 adev->uvd_ctx_rreg = &cik_uvd_ctx_rreg;
1946 adev->uvd_ctx_wreg = &cik_uvd_ctx_wreg;
1947 adev->didt_rreg = &cik_didt_rreg;
1948 adev->didt_wreg = &cik_didt_wreg;
1950 adev->asic_funcs = &cik_asic_funcs;
1952 adev->rev_id = cik_get_rev_id(adev);
1953 adev->external_rev_id = 0xFF;
1954 switch (adev->asic_type) {
1956 adev->cg_flags =
1973 adev->pg_flags = 0;
1974 adev->external_rev_id = adev->rev_id + 0x14;
1977 adev->cg_flags =
1993 adev->pg_flags = 0;
1994 adev->external_rev_id = 0x28;
1997 adev->cg_flags =
2012 adev->pg_flags =
2024 if (adev->pdev->device == 0x1312 ||
2025 adev->pdev->device == 0x1316 ||
2026 adev->pdev->device == 0x1317)
2027 adev->external_rev_id = 0x41;
2029 adev->external_rev_id = 0x1;
2033 adev->cg_flags =
2048 adev->pg_flags =
2058 if (adev->asic_type == CHIP_KABINI) {
2059 if (adev->rev_id == 0)
2060 adev->external_rev_id = 0x81;
2061 else if (adev->rev_id == 1)
2062 adev->external_rev_id = 0x82;
2063 else if (adev->rev_id == 2)
2064 adev->external_rev_id = 0x85;
2066 adev->external_rev_id = adev->rev_id + 0xa1;
2088 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2091 cik_init_golden_registers(adev);
2093 cik_pcie_gen3_enable(adev);
2095 cik_program_aspm(adev);
2107 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2109 return cik_common_hw_fini(adev);
2114 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2116 return cik_common_hw_init(adev);
2173 int cik_set_ip_blocks(struct amdgpu_device *adev)
2175 switch (adev->asic_type) {
2177 amdgpu_device_ip_block_add(adev, &cik_common_ip_block);
2178 amdgpu_device_ip_block_add(adev, &gmc_v7_0_ip_block);
2179 amdgpu_device_ip_block_add(adev, &cik_ih_ip_block);
2180 amdgpu_device_ip_block_add(adev, &gfx_v7_2_ip_block);
2181 amdgpu_device_ip_block_add(adev, &cik_sdma_ip_block);
2182 amdgpu_device_ip_block_add(adev, &pp_smu_ip_block);
2183 if (adev->enable_virtual_display)
2184 amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
2186 else if (amdgpu_device_has_dc_support(adev))
2187 amdgpu_device_ip_block_add(adev, &dm_ip_block);
2190 amdgpu_device_ip_block_add(adev, &dce_v8_2_ip_block);
2191 amdgpu_device_ip_block_add(adev, &uvd_v4_2_ip_block);
2192 amdgpu_device_ip_block_add(adev, &vce_v2_0_ip_block);
2195 amdgpu_device_ip_block_add(adev, &cik_common_ip_block);
2196 amdgpu_device_ip_block_add(adev, &gmc_v7_0_ip_block);
2197 amdgpu_device_ip_block_add(adev, &cik_ih_ip_block);
2198 amdgpu_device_ip_block_add(adev, &gfx_v7_3_ip_block);
2199 amdgpu_device_ip_block_add(adev, &cik_sdma_ip_block);
2200 amdgpu_device_ip_block_add(adev, &pp_smu_ip_block);
2201 if (adev->enable_virtual_display)
2202 amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
2204 else if (amdgpu_device_has_dc_support(adev))
2205 amdgpu_device_ip_block_add(adev, &dm_ip_block);
2208 amdgpu_device_ip_block_add(adev, &dce_v8_5_ip_block);
2209 amdgpu_device_ip_block_add(adev, &uvd_v4_2_ip_block);
2210 amdgpu_device_ip_block_add(adev, &vce_v2_0_ip_block);
2213 amdgpu_device_ip_block_add(adev, &cik_common_ip_block);
2214 amdgpu_device_ip_block_add(adev, &gmc_v7_0_ip_block);
2215 amdgpu_device_ip_block_add(adev, &cik_ih_ip_block);
2216 amdgpu_device_ip_block_add(adev, &gfx_v7_1_ip_block);
2217 amdgpu_device_ip_block_add(adev, &cik_sdma_ip_block);
2218 amdgpu_device_ip_block_add(adev, &kv_smu_ip_block);
2219 if (adev->enable_virtual_display)
2220 amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
2222 else if (amdgpu_device_has_dc_support(adev))
2223 amdgpu_device_ip_block_add(adev, &dm_ip_block);
2226 amdgpu_device_ip_block_add(adev, &dce_v8_1_ip_block);
2228 amdgpu_device_ip_block_add(adev, &uvd_v4_2_ip_block);
2229 amdgpu_device_ip_block_add(adev, &vce_v2_0_ip_block);
2233 amdgpu_device_ip_block_add(adev, &cik_common_ip_block);
2234 amdgpu_device_ip_block_add(adev, &gmc_v7_0_ip_block);
2235 amdgpu_device_ip_block_add(adev, &cik_ih_ip_block);
2236 amdgpu_device_ip_block_add(adev, &gfx_v7_2_ip_block);
2237 amdgpu_device_ip_block_add(adev, &cik_sdma_ip_block);
2238 amdgpu_device_ip_block_add(adev, &kv_smu_ip_block);
2239 if (adev->enable_virtual_display)
2240 amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
2242 else if (amdgpu_device_has_dc_support(adev))
2243 amdgpu_device_ip_block_add(adev, &dm_ip_block);
2246 amdgpu_device_ip_block_add(adev, &dce_v8_3_ip_block);
2247 amdgpu_device_ip_block_add(adev, &uvd_v4_2_ip_block);
2248 amdgpu_device_ip_block_add(adev, &vce_v2_0_ip_block);