Lines Matching defs:rdev
132 static u32 cik_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh);
133 static void cik_rlc_stop(struct radeon_device *rdev);
134 static void cik_pcie_gen3_enable(struct radeon_device *rdev);
135 static void cik_program_aspm(struct radeon_device *rdev);
136 static void cik_init_pg(struct radeon_device *rdev);
137 static void cik_init_cg(struct radeon_device *rdev);
138 static void cik_fini_pg(struct radeon_device *rdev);
139 static void cik_fini_cg(struct radeon_device *rdev);
140 static void cik_enable_gui_idle_interrupt(struct radeon_device *rdev,
146 * @rdev: radeon_device pointer
153 int cik_get_allowed_info_register(struct radeon_device *rdev,
179 u32 cik_didt_rreg(struct radeon_device *rdev, u32 reg)
184 spin_lock_irqsave(&rdev->didt_idx_lock, flags);
187 spin_unlock_irqrestore(&rdev->didt_idx_lock, flags);
191 void cik_didt_wreg(struct radeon_device *rdev, u32 reg, u32 v)
195 spin_lock_irqsave(&rdev->didt_idx_lock, flags);
198 spin_unlock_irqrestore(&rdev->didt_idx_lock, flags);
202 int ci_get_temp(struct radeon_device *rdev)
219 int kv_get_temp(struct radeon_device *rdev)
237 u32 cik_pciep_rreg(struct radeon_device *rdev, u32 reg)
242 spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
246 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
250 void cik_pciep_wreg(struct radeon_device *rdev, u32 reg, u32 v)
254 spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
259 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
1619 static void cik_init_golden_registers(struct radeon_device *rdev)
1621 switch (rdev->family) {
1623 radeon_program_register_sequence(rdev,
1626 radeon_program_register_sequence(rdev,
1629 radeon_program_register_sequence(rdev,
1632 radeon_program_register_sequence(rdev,
1637 radeon_program_register_sequence(rdev,
1640 radeon_program_register_sequence(rdev,
1643 radeon_program_register_sequence(rdev,
1646 radeon_program_register_sequence(rdev,
1651 radeon_program_register_sequence(rdev,
1654 radeon_program_register_sequence(rdev,
1657 radeon_program_register_sequence(rdev,
1660 radeon_program_register_sequence(rdev,
1665 radeon_program_register_sequence(rdev,
1668 radeon_program_register_sequence(rdev,
1671 radeon_program_register_sequence(rdev,
1674 radeon_program_register_sequence(rdev,
1679 radeon_program_register_sequence(rdev,
1682 radeon_program_register_sequence(rdev,
1685 radeon_program_register_sequence(rdev,
1688 radeon_program_register_sequence(rdev,
1700 * @rdev: radeon_device pointer
1705 u32 cik_get_xclk(struct radeon_device *rdev)
1707 u32 reference_clock = rdev->clock.spll.reference_freq;
1709 if (rdev->flags & RADEON_IS_IGP) {
1722 * @rdev: radeon_device pointer
1728 u32 cik_mm_rdoorbell(struct radeon_device *rdev, u32 index)
1730 if (index < rdev->doorbell.num_doorbells) {
1731 return readl(rdev->doorbell.ptr + index);
1741 * @rdev: radeon_device pointer
1748 void cik_mm_wdoorbell(struct radeon_device *rdev, u32 index, u32 v)
1750 if (index < rdev->doorbell.num_doorbells) {
1751 writel(v, rdev->doorbell.ptr + index);
1831 * @rdev: radeon_device pointer
1841 static void cik_srbm_select(struct radeon_device *rdev,
1855 * @rdev: radeon_device pointer
1860 int ci_mc_load_microcode(struct radeon_device *rdev)
1869 if (!rdev->mc_fw)
1872 if (rdev->new_fw) {
1874 (const struct mc_firmware_header_v1_0 *)rdev->mc_fw->data;
1880 (rdev->mc_fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes));
1883 (rdev->mc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
1885 ucode_size = rdev->mc_fw->size / 4;
1887 switch (rdev->family) {
1899 fw_data = (const __be32 *)rdev->mc_fw->data;
1911 if (rdev->new_fw) {
1921 if ((rdev->pdev->device == 0x6649) && ((tmp & 0xff00) == 0x5600)) {
1930 if (rdev->new_fw)
1942 for (i = 0; i < rdev->usec_timeout; i++) {
1947 for (i = 0; i < rdev->usec_timeout; i++) {
1960 * @rdev: radeon_device pointer
1966 static int cik_init_microcode(struct radeon_device *rdev)
1981 switch (rdev->family) {
1984 if ((rdev->pdev->revision == 0x80) ||
1985 (rdev->pdev->revision == 0x81) ||
1986 (rdev->pdev->device == 0x665f))
2002 if (rdev->pdev->revision == 0x80)
2055 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
2058 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
2061 if (rdev->pfp_fw->size != pfp_req_size) {
2063 rdev->pfp_fw->size, fw_name);
2068 err = radeon_ucode_validate(rdev->pfp_fw);
2079 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
2082 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
2085 if (rdev->me_fw->size != me_req_size) {
2087 rdev->me_fw->size, fw_name);
2091 err = radeon_ucode_validate(rdev->me_fw);
2102 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev);
2105 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev);
2108 if (rdev->ce_fw->size != ce_req_size) {
2110 rdev->ce_fw->size, fw_name);
2114 err = radeon_ucode_validate(rdev->ce_fw);
2125 err = request_firmware(&rdev->mec_fw, fw_name, rdev->dev);
2128 err = request_firmware(&rdev->mec_fw, fw_name, rdev->dev);
2131 if (rdev->mec_fw->size != mec_req_size) {
2133 rdev->mec_fw->size, fw_name);
2137 err = radeon_ucode_validate(rdev->mec_fw);
2147 if (rdev->family == CHIP_KAVERI) {
2149 err = request_firmware(&rdev->mec2_fw, fw_name, rdev->dev);
2153 err = radeon_ucode_validate(rdev->mec2_fw);
2163 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
2166 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
2169 if (rdev->rlc_fw->size != rlc_req_size) {
2171 rdev->rlc_fw->size, fw_name);
2175 err = radeon_ucode_validate(rdev->rlc_fw);
2186 err = request_firmware(&rdev->sdma_fw, fw_name, rdev->dev);
2189 err = request_firmware(&rdev->sdma_fw, fw_name, rdev->dev);
2192 if (rdev->sdma_fw->size != sdma_req_size) {
2194 rdev->sdma_fw->size, fw_name);
2198 err = radeon_ucode_validate(rdev->sdma_fw);
2209 if (!(rdev->flags & RADEON_IS_IGP)) {
2211 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
2214 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
2217 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
2221 if ((rdev->mc_fw->size != mc_req_size) &&
2222 (rdev->mc_fw->size != mc2_req_size)){
2224 rdev->mc_fw->size, fw_name);
2227 DRM_INFO("%s: %zu bytes\n", fw_name, rdev->mc_fw->size);
2229 err = radeon_ucode_validate(rdev->mc_fw);
2243 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
2246 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
2250 release_firmware(rdev->smc_fw);
2251 rdev->smc_fw = NULL;
2253 } else if (rdev->smc_fw->size != smc_req_size) {
2255 rdev->smc_fw->size, fw_name);
2259 err = radeon_ucode_validate(rdev->smc_fw);
2271 rdev->new_fw = false;
2276 rdev->new_fw = true;
2284 release_firmware(rdev->pfp_fw);
2285 rdev->pfp_fw = NULL;
2286 release_firmware(rdev->me_fw);
2287 rdev->me_fw = NULL;
2288 release_firmware(rdev->ce_fw);
2289 rdev->ce_fw = NULL;
2290 release_firmware(rdev->mec_fw);
2291 rdev->mec_fw = NULL;
2292 release_firmware(rdev->mec2_fw);
2293 rdev->mec2_fw = NULL;
2294 release_firmware(rdev->rlc_fw);
2295 rdev->rlc_fw = NULL;
2296 release_firmware(rdev->sdma_fw);
2297 rdev->sdma_fw = NULL;
2298 release_firmware(rdev->mc_fw);
2299 rdev->mc_fw = NULL;
2300 release_firmware(rdev->smc_fw);
2301 rdev->smc_fw = NULL;
2312 * @rdev: radeon_device pointer
2320 static void cik_tiling_mode_table_init(struct radeon_device *rdev)
2322 u32 *tile = rdev->config.cik.tile_mode_array;
2323 u32 *macrotile = rdev->config.cik.macrotile_mode_array;
2325 ARRAY_SIZE(rdev->config.cik.tile_mode_array);
2327 ARRAY_SIZE(rdev->config.cik.macrotile_mode_array);
2330 u32 num_rbs = rdev->config.cik.max_backends_per_se *
2331 rdev->config.cik.max_shader_engines;
2333 switch (rdev->config.cik.mem_row_size_in_kb) {
2346 num_pipe_configs = rdev->config.cik.max_tile_pipes;
3018 * @rdev: radeon_device pointer
3026 static void cik_select_se_sh(struct radeon_device *rdev,
3064 * @rdev: radeon_device pointer
3071 static u32 cik_get_rb_disabled(struct radeon_device *rdev,
3094 * @rdev: radeon_device pointer
3101 static void cik_setup_rb(struct radeon_device *rdev,
3112 cik_select_se_sh(rdev, i, j);
3113 data = cik_get_rb_disabled(rdev, max_rb_num_per_se, sh_per_se);
3114 if (rdev->family == CHIP_HAWAII)
3120 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
3129 rdev->config.cik.backend_enable_mask = enabled_rbs;
3132 cik_select_se_sh(rdev, i, 0xffffffff);
3157 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
3163 * @rdev: radeon_device pointer
3168 static void cik_gpu_init(struct radeon_device *rdev)
3176 switch (rdev->family) {
3178 rdev->config.cik.max_shader_engines = 2;
3179 rdev->config.cik.max_tile_pipes = 4;
3180 rdev->config.cik.max_cu_per_sh = 7;
3181 rdev->config.cik.max_sh_per_se = 1;
3182 rdev->config.cik.max_backends_per_se = 2;
3183 rdev->config.cik.max_texture_channel_caches = 4;
3184 rdev->config.cik.max_gprs = 256;
3185 rdev->config.cik.max_gs_threads = 32;
3186 rdev->config.cik.max_hw_contexts = 8;
3188 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20;
3189 rdev->config.cik.sc_prim_fifo_size_backend = 0x100;
3190 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30;
3191 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130;
3195 rdev->config.cik.max_shader_engines = 4;
3196 rdev->config.cik.max_tile_pipes = 16;
3197 rdev->config.cik.max_cu_per_sh = 11;
3198 rdev->config.cik.max_sh_per_se = 1;
3199 rdev->config.cik.max_backends_per_se = 4;
3200 rdev->config.cik.max_texture_channel_caches = 16;
3201 rdev->config.cik.max_gprs = 256;
3202 rdev->config.cik.max_gs_threads = 32;
3203 rdev->config.cik.max_hw_contexts = 8;
3205 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20;
3206 rdev->config.cik.sc_prim_fifo_size_backend = 0x100;
3207 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30;
3208 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130;
3212 rdev->config.cik.max_shader_engines = 1;
3213 rdev->config.cik.max_tile_pipes = 4;
3214 rdev->config.cik.max_cu_per_sh = 8;
3215 rdev->config.cik.max_backends_per_se = 2;
3216 rdev->config.cik.max_sh_per_se = 1;
3217 rdev->config.cik.max_texture_channel_caches = 4;
3218 rdev->config.cik.max_gprs = 256;
3219 rdev->config.cik.max_gs_threads = 16;
3220 rdev->config.cik.max_hw_contexts = 8;
3222 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20;
3223 rdev->config.cik.sc_prim_fifo_size_backend = 0x100;
3224 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30;
3225 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130;
3231 rdev->config.cik.max_shader_engines = 1;
3232 rdev->config.cik.max_tile_pipes = 2;
3233 rdev->config.cik.max_cu_per_sh = 2;
3234 rdev->config.cik.max_sh_per_se = 1;
3235 rdev->config.cik.max_backends_per_se = 1;
3236 rdev->config.cik.max_texture_channel_caches = 2;
3237 rdev->config.cik.max_gprs = 256;
3238 rdev->config.cik.max_gs_threads = 16;
3239 rdev->config.cik.max_hw_contexts = 8;
3241 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20;
3242 rdev->config.cik.sc_prim_fifo_size_backend = 0x100;
3243 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30;
3244 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130;
3267 rdev->config.cik.num_tile_pipes = rdev->config.cik.max_tile_pipes;
3268 rdev->config.cik.mem_max_burst_length_bytes = 256;
3270 rdev->config.cik.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024;
3271 if (rdev->config.cik.mem_row_size_in_kb > 4)
3272 rdev->config.cik.mem_row_size_in_kb = 4;
3274 rdev->config.cik.shader_engine_tile_size = 32;
3275 rdev->config.cik.num_gpus = 1;
3276 rdev->config.cik.multi_gpu_tile_size = 64;
3280 switch (rdev->config.cik.mem_row_size_in_kb) {
3300 rdev->config.cik.tile_config = 0;
3301 switch (rdev->config.cik.num_tile_pipes) {
3303 rdev->config.cik.tile_config |= (0 << 0);
3306 rdev->config.cik.tile_config |= (1 << 0);
3309 rdev->config.cik.tile_config |= (2 << 0);
3314 rdev->config.cik.tile_config |= (3 << 0);
3317 rdev->config.cik.tile_config |=
3319 rdev->config.cik.tile_config |=
3321 rdev->config.cik.tile_config |=
3333 cik_tiling_mode_table_init(rdev);
3335 cik_setup_rb(rdev, rdev->config.cik.max_shader_engines,
3336 rdev->config.cik.max_sh_per_se,
3337 rdev->config.cik.max_backends_per_se);
3339 rdev->config.cik.active_cus = 0;
3340 for (i = 0; i < rdev->config.cik.max_shader_engines; i++) {
3341 for (j = 0; j < rdev->config.cik.max_sh_per_se; j++) {
3342 rdev->config.cik.active_cus +=
3343 hweight32(cik_get_cu_active_bitmap(rdev, i, j));
3376 WREG32(PA_SC_FIFO_SIZE, (SC_FRONTEND_PRIM_FIFO_SIZE(rdev->config.cik.sc_prim_fifo_size_frontend) |
3377 SC_BACKEND_PRIM_FIFO_SIZE(rdev->config.cik.sc_prim_fifo_size_backend) |
3378 SC_HIZ_TILE_FIFO_SIZE(rdev->config.cik.sc_hiz_tile_fifo_size) |
3379 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.cik.sc_earlyz_tile_fifo_size)));
3415 * @rdev: radeon_device pointer
3422 static void cik_scratch_init(struct radeon_device *rdev)
3426 rdev->scratch.num_reg = 7;
3427 rdev->scratch.reg_base = SCRATCH_REG0;
3428 for (i = 0; i < rdev->scratch.num_reg; i++) {
3429 rdev->scratch.free[i] = true;
3430 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4);
3437 * @rdev: radeon_device pointer
3445 int cik_ring_test(struct radeon_device *rdev, struct radeon_ring *ring)
3452 r = radeon_scratch_get(rdev, &scratch);
3458 r = radeon_ring_lock(rdev, ring, 3);
3461 radeon_scratch_free(rdev, scratch);
3467 radeon_ring_unlock_commit(rdev, ring, false);
3469 for (i = 0; i < rdev->usec_timeout; i++) {
3475 if (i < rdev->usec_timeout) {
3482 radeon_scratch_free(rdev, scratch);
3489 * @rdev: radeon_device pointer
3494 static void cik_hdp_flush_cp_ring_emit(struct radeon_device *rdev,
3497 struct radeon_ring *ring = &rdev->ring[ridx];
3534 * @rdev: radeon_device pointer
3540 void cik_fence_gfx_ring_emit(struct radeon_device *rdev,
3543 struct radeon_ring *ring = &rdev->ring[fence->ring];
3544 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
3575 * @rdev: radeon_device pointer
3581 void cik_fence_compute_ring_emit(struct radeon_device *rdev,
3584 struct radeon_ring *ring = &rdev->ring[fence->ring];
3585 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
3603 * @rdev: radeon_device pointer
3611 bool cik_semaphore_ring_emit(struct radeon_device *rdev,
3635 * @rdev: radeon_device pointer
3645 struct radeon_fence *cik_copy_cpdma(struct radeon_device *rdev,
3652 int ring_index = rdev->asic->copy.blit_ring_index;
3653 struct radeon_ring *ring = &rdev->ring[ring_index];
3662 r = radeon_ring_lock(rdev, ring, num_loops * 7 + 18);
3665 radeon_sync_free(rdev, &sync, NULL);
3669 radeon_sync_resv(rdev, &sync, resv, false);
3670 radeon_sync_rings(rdev, &sync, ring->idx);
3691 r = radeon_fence_emit(rdev, &fence, ring->idx);
3693 radeon_ring_unlock_undo(rdev, ring);
3694 radeon_sync_free(rdev, &sync, NULL);
3698 radeon_ring_unlock_commit(rdev, ring, false);
3699 radeon_sync_free(rdev, &sync, fence);
3710 * @rdev: radeon_device pointer
3719 void cik_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3721 struct radeon_ring *ring = &rdev->ring[ib->ring];
3739 } else if (rdev->wb.enabled) {
3762 * @rdev: radeon_device pointer
3769 int cik_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
3777 r = radeon_scratch_get(rdev, &scratch);
3783 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
3786 radeon_scratch_free(rdev, scratch);
3793 r = radeon_ib_schedule(rdev, &ib, NULL, false);
3795 radeon_scratch_free(rdev, scratch);
3796 radeon_ib_free(rdev, &ib);
3804 radeon_scratch_free(rdev, scratch);
3805 radeon_ib_free(rdev, &ib);
3809 radeon_scratch_free(rdev, scratch);
3810 radeon_ib_free(rdev, &ib);
3814 for (i = 0; i < rdev->usec_timeout; i++) {
3820 if (i < rdev->usec_timeout) {
3827 radeon_scratch_free(rdev, scratch);
3828 radeon_ib_free(rdev, &ib);
3858 * @rdev: radeon_device pointer
3863 static void cik_cp_gfx_enable(struct radeon_device *rdev, bool enable)
3868 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
3869 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
3871 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
3879 * @rdev: radeon_device pointer
3884 static int cik_cp_gfx_load_microcode(struct radeon_device *rdev)
3888 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw)
3891 cik_cp_gfx_enable(rdev, false);
3893 if (rdev->new_fw) {
3895 (const struct gfx_firmware_header_v1_0 *)rdev->pfp_fw->data;
3897 (const struct gfx_firmware_header_v1_0 *)rdev->ce_fw->data;
3899 (const struct gfx_firmware_header_v1_0 *)rdev->me_fw->data;
3909 (rdev->pfp_fw->data + le32_to_cpu(pfp_hdr->header.ucode_array_offset_bytes));
3918 (rdev->ce_fw->data + le32_to_cpu(ce_hdr->header.ucode_array_offset_bytes));
3927 (rdev->me_fw->data + le32_to_cpu(me_hdr->header.ucode_array_offset_bytes));
3938 fw_data = (const __be32 *)rdev->pfp_fw->data;
3945 fw_data = (const __be32 *)rdev->ce_fw->data;
3952 fw_data = (const __be32 *)rdev->me_fw->data;
3965 * @rdev: radeon_device pointer
3971 static int cik_cp_gfx_start(struct radeon_device *rdev)
3973 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3977 WREG32(CP_MAX_CONTEXT, rdev->config.cik.max_hw_contexts - 1);
3981 cik_cp_gfx_enable(rdev, true);
3983 r = radeon_ring_lock(rdev, ring, cik_default_size + 17);
4018 radeon_ring_unlock_commit(rdev, ring, false);
4026 * @rdev: radeon_device pointer
4031 static void cik_cp_gfx_fini(struct radeon_device *rdev)
4033 cik_cp_gfx_enable(rdev, false);
4034 radeon_ring_fini(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
4040 * @rdev: radeon_device pointer
4046 static int cik_cp_gfx_resume(struct radeon_device *rdev)
4055 if (rdev->family != CHIP_HAWAII)
4064 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
4068 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
4082 WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC);
4083 WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
4088 if (!rdev->wb.enabled)
4099 cik_cp_gfx_start(rdev);
4100 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true;
4101 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
4103 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
4107 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
4108 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
4113 u32 cik_gfx_get_rptr(struct radeon_device *rdev,
4118 if (rdev->wb.enabled)
4119 rptr = rdev->wb.wb[ring->rptr_offs/4];
4126 u32 cik_gfx_get_wptr(struct radeon_device *rdev,
4132 void cik_gfx_set_wptr(struct radeon_device *rdev,
4139 u32 cik_compute_get_rptr(struct radeon_device *rdev,
4144 if (rdev->wb.enabled) {
4145 rptr = rdev->wb.wb[ring->rptr_offs/4];
4147 mutex_lock(&rdev->srbm_mutex);
4148 cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0);
4150 cik_srbm_select(rdev, 0, 0, 0, 0);
4151 mutex_unlock(&rdev->srbm_mutex);
4157 u32 cik_compute_get_wptr(struct radeon_device *rdev,
4162 if (rdev->wb.enabled) {
4164 wptr = rdev->wb.wb[ring->wptr_offs/4];
4166 mutex_lock(&rdev->srbm_mutex);
4167 cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0);
4169 cik_srbm_select(rdev, 0, 0, 0, 0);
4170 mutex_unlock(&rdev->srbm_mutex);
4176 void cik_compute_set_wptr(struct radeon_device *rdev,
4180 rdev->wb.wb[ring->wptr_offs/4] = ring->wptr;
4184 static void cik_compute_stop(struct radeon_device *rdev,
4189 cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0);
4197 for (j = 0; j < rdev->usec_timeout; j++) {
4206 cik_srbm_select(rdev, 0, 0, 0, 0);
4212 * @rdev: radeon_device pointer
4217 static void cik_cp_compute_enable(struct radeon_device *rdev, bool enable)
4226 mutex_lock(&rdev->srbm_mutex);
4227 cik_compute_stop(rdev,&rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]);
4228 cik_compute_stop(rdev,&rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]);
4229 mutex_unlock(&rdev->srbm_mutex);
4232 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
4233 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
4241 * @rdev: radeon_device pointer
4246 static int cik_cp_compute_load_microcode(struct radeon_device *rdev)
4250 if (!rdev->mec_fw)
4253 cik_cp_compute_enable(rdev, false);
4255 if (rdev->new_fw) {
4257 (const struct gfx_firmware_header_v1_0 *)rdev->mec_fw->data;
4265 (rdev->mec_fw->data + le32_to_cpu(mec_hdr->header.ucode_array_offset_bytes));
4273 if (rdev->family == CHIP_KAVERI) {
4275 (const struct gfx_firmware_header_v1_0 *)rdev->mec2_fw->data;
4278 (rdev->mec2_fw->data +
4290 fw_data = (const __be32 *)rdev->mec_fw->data;
4296 if (rdev->family == CHIP_KAVERI) {
4298 fw_data = (const __be32 *)rdev->mec_fw->data;
4312 * @rdev: radeon_device pointer
4317 static int cik_cp_compute_start(struct radeon_device *rdev)
4319 cik_cp_compute_enable(rdev, true);
4327 * @rdev: radeon_device pointer
4332 static void cik_cp_compute_fini(struct radeon_device *rdev)
4336 cik_cp_compute_enable(rdev, false);
4344 if (rdev->ring[idx].mqd_obj) {
4345 r = radeon_bo_reserve(rdev->ring[idx].mqd_obj, false);
4347 dev_warn(rdev->dev, "(%d) reserve MQD bo failed\n", r);
4349 radeon_bo_unpin(rdev->ring[idx].mqd_obj);
4350 radeon_bo_unreserve(rdev->ring[idx].mqd_obj);
4352 radeon_bo_unref(&rdev->ring[idx].mqd_obj);
4353 rdev->ring[idx].mqd_obj = NULL;
4358 static void cik_mec_fini(struct radeon_device *rdev)
4362 if (rdev->mec.hpd_eop_obj) {
4363 r = radeon_bo_reserve(rdev->mec.hpd_eop_obj, false);
4365 dev_warn(rdev->dev, "(%d) reserve HPD EOP bo failed\n", r);
4366 radeon_bo_unpin(rdev->mec.hpd_eop_obj);
4367 radeon_bo_unreserve(rdev->mec.hpd_eop_obj);
4369 radeon_bo_unref(&rdev->mec.hpd_eop_obj);
4370 rdev->mec.hpd_eop_obj = NULL;
4376 static int cik_mec_init(struct radeon_device *rdev)
4385 if (rdev->family == CHIP_KAVERI)
4386 rdev->mec.num_mec = 2;
4388 rdev->mec.num_mec = 1;
4389 rdev->mec.num_pipe = 4;
4390 rdev->mec.num_queue = rdev->mec.num_mec * rdev->mec.num_pipe * 8;
4392 if (rdev->mec.hpd_eop_obj == NULL) {
4393 r = radeon_bo_create(rdev,
4394 rdev->mec.num_mec *rdev->mec.num_pipe * MEC_HPD_SIZE * 2,
4397 &rdev->mec.hpd_eop_obj);
4399 dev_warn(rdev->dev, "(%d) create HDP EOP bo failed\n", r);
4404 r = radeon_bo_reserve(rdev->mec.hpd_eop_obj, false);
4406 cik_mec_fini(rdev);
4409 r = radeon_bo_pin(rdev->mec.hpd_eop_obj, RADEON_GEM_DOMAIN_GTT,
4410 &rdev->mec.hpd_eop_gpu_addr);
4412 dev_warn(rdev->dev, "(%d) pin HDP EOP bo failed\n", r);
4413 cik_mec_fini(rdev);
4416 r = radeon_bo_kmap(rdev->mec.hpd_eop_obj, (void **)&hpd);
4418 dev_warn(rdev->dev, "(%d) map HDP EOP bo failed\n", r);
4419 cik_mec_fini(rdev);
4424 memset(hpd, 0, rdev->mec.num_mec *rdev->mec.num_pipe * MEC_HPD_SIZE * 2);
4426 radeon_bo_kunmap(rdev->mec.hpd_eop_obj);
4427 radeon_bo_unreserve(rdev->mec.hpd_eop_obj);
4502 * @rdev: radeon_device pointer
4508 static int cik_cp_compute_resume(struct radeon_device *rdev)
4520 r = cik_cp_compute_start(rdev);
4530 mutex_lock(&rdev->srbm_mutex);
4532 for (i = 0; i < (rdev->mec.num_pipe * rdev->mec.num_mec); ++i) {
4536 cik_srbm_select(rdev, me, pipe, 0, 0);
4538 eop_gpu_addr = rdev->mec.hpd_eop_gpu_addr + (i * MEC_HPD_SIZE * 2) ;
4553 cik_srbm_select(rdev, 0, 0, 0, 0);
4554 mutex_unlock(&rdev->srbm_mutex);
4563 if (rdev->ring[idx].mqd_obj == NULL) {
4564 r = radeon_bo_create(rdev,
4568 NULL, &rdev->ring[idx].mqd_obj);
4570 dev_warn(rdev->dev, "(%d) create MQD bo failed\n", r);
4575 r = radeon_bo_reserve(rdev->ring[idx].mqd_obj, false);
4577 cik_cp_compute_fini(rdev);
4580 r = radeon_bo_pin(rdev->ring[idx].mqd_obj, RADEON_GEM_DOMAIN_GTT,
4583 dev_warn(rdev->dev, "(%d) pin MQD bo failed\n", r);
4584 cik_cp_compute_fini(rdev);
4587 r = radeon_bo_kmap(rdev->ring[idx].mqd_obj, (void **)&buf);
4589 dev_warn(rdev->dev, "(%d) map MQD bo failed\n", r);
4590 cik_cp_compute_fini(rdev);
4604 mutex_lock(&rdev->srbm_mutex);
4605 cik_srbm_select(rdev, rdev->ring[idx].me,
4606 rdev->ring[idx].pipe,
4607 rdev->ring[idx].queue, 0);
4630 for (j = 0; j < rdev->usec_timeout; j++) {
4651 hqd_gpu_addr = rdev->ring[idx].gpu_addr >> 8;
4663 order_base_2(rdev->ring[idx].ring_size / 8);
4677 wb_gpu_addr = rdev->wb.gpu_addr + CIK_WB_CP1_WPTR_OFFSET;
4679 wb_gpu_addr = rdev->wb.gpu_addr + CIK_WB_CP2_WPTR_OFFSET;
4688 wb_gpu_addr = rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET;
4690 wb_gpu_addr = rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET;
4705 DOORBELL_OFFSET(rdev->ring[idx].doorbell_index);
4717 rdev->ring[idx].wptr = 0;
4718 mqd->queue_state.cp_hqd_pq_wptr = rdev->ring[idx].wptr;
4730 cik_srbm_select(rdev, 0, 0, 0, 0);
4731 mutex_unlock(&rdev->srbm_mutex);
4733 radeon_bo_kunmap(rdev->ring[idx].mqd_obj);
4734 radeon_bo_unreserve(rdev->ring[idx].mqd_obj);
4736 rdev->ring[idx].ready = true;
4737 r = radeon_ring_test(rdev, idx, &rdev->ring[idx]);
4739 rdev->ring[idx].ready = false;
4745 static void cik_cp_enable(struct radeon_device *rdev, bool enable)
4747 cik_cp_gfx_enable(rdev, enable);
4748 cik_cp_compute_enable(rdev, enable);
4751 static int cik_cp_load_microcode(struct radeon_device *rdev)
4755 r = cik_cp_gfx_load_microcode(rdev);
4758 r = cik_cp_compute_load_microcode(rdev);
4765 static void cik_cp_fini(struct radeon_device *rdev)
4767 cik_cp_gfx_fini(rdev);
4768 cik_cp_compute_fini(rdev);
4771 static int cik_cp_resume(struct radeon_device *rdev)
4775 cik_enable_gui_idle_interrupt(rdev, false);
4777 r = cik_cp_load_microcode(rdev);
4781 r = cik_cp_gfx_resume(rdev);
4784 r = cik_cp_compute_resume(rdev);
4788 cik_enable_gui_idle_interrupt(rdev, true);
4793 static void cik_print_gpu_status_regs(struct radeon_device *rdev)
4795 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n",
4797 dev_info(rdev->dev, " GRBM_STATUS2=0x%08X\n",
4799 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n",
4801 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n",
4803 dev_info(rdev->dev, " GRBM_STATUS_SE2=0x%08X\n",
4805 dev_info(rdev->dev, " GRBM_STATUS_SE3=0x%08X\n",
4807 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n",
4809 dev_info(rdev->dev, " SRBM_STATUS2=0x%08X\n",
4811 dev_info(rdev->dev, " SDMA0_STATUS_REG = 0x%08X\n",
4813 dev_info(rdev->dev, " SDMA1_STATUS_REG = 0x%08X\n",
4815 dev_info(rdev->dev, " CP_STAT = 0x%08x\n", RREG32(CP_STAT));
4816 dev_info(rdev->dev, " CP_STALLED_STAT1 = 0x%08x\n",
4818 dev_info(rdev->dev, " CP_STALLED_STAT2 = 0x%08x\n",
4820 dev_info(rdev->dev, " CP_STALLED_STAT3 = 0x%08x\n",
4822 dev_info(rdev->dev, " CP_CPF_BUSY_STAT = 0x%08x\n",
4824 dev_info(rdev->dev, " CP_CPF_STALLED_STAT1 = 0x%08x\n",
4826 dev_info(rdev->dev, " CP_CPF_STATUS = 0x%08x\n", RREG32(CP_CPF_STATUS));
4827 dev_info(rdev->dev, " CP_CPC_BUSY_STAT = 0x%08x\n", RREG32(CP_CPC_BUSY_STAT));
4828 dev_info(rdev->dev, " CP_CPC_STALLED_STAT1 = 0x%08x\n",
4830 dev_info(rdev->dev, " CP_CPC_STATUS = 0x%08x\n", RREG32(CP_CPC_STATUS));
4836 * @rdev: radeon_device pointer
4842 u32 cik_gpu_check_soft_reset(struct radeon_device *rdev)
4902 if (evergreen_is_display_hung(rdev))
4917 * @rdev: radeon_device pointer
4922 static void cik_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
4931 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
4933 cik_print_gpu_status_regs(rdev);
4934 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
4936 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4940 cik_fini_pg(rdev);
4941 cik_fini_cg(rdev);
4944 cik_rlc_stop(rdev);
4965 evergreen_mc_stop(rdev, &save);
4966 if (evergreen_mc_wait_for_idle(rdev)) {
4967 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
5003 if (!(rdev->flags & RADEON_IS_IGP)) {
5011 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
5025 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
5039 evergreen_mc_resume(rdev, &save);
5042 cik_print_gpu_status_regs(rdev);
5051 static void kv_save_regs_for_reset(struct radeon_device *rdev,
5063 static void kv_restore_regs_for_reset(struct radeon_device *rdev,
5136 static void cik_gpu_pci_config_reset(struct radeon_device *rdev)
5142 dev_info(rdev->dev, "GPU pci config reset\n");
5147 cik_fini_pg(rdev);
5148 cik_fini_cg(rdev);
5167 cik_rlc_stop(rdev);
5172 evergreen_mc_stop(rdev, &save);
5173 if (evergreen_mc_wait_for_idle(rdev)) {
5174 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
5177 if (rdev->flags & RADEON_IS_IGP)
5178 kv_save_regs_for_reset(rdev, &kv_save);
5181 pci_clear_master(rdev->pdev);
5183 radeon_pci_config_reset(rdev);
5188 for (i = 0; i < rdev->usec_timeout; i++) {
5195 if (rdev->flags & RADEON_IS_IGP)
5196 kv_restore_regs_for_reset(rdev, &kv_save);
5202 * @rdev: radeon_device pointer
5209 int cik_asic_reset(struct radeon_device *rdev, bool hard)
5214 cik_gpu_pci_config_reset(rdev);
5218 reset_mask = cik_gpu_check_soft_reset(rdev);
5221 r600_set_bios_scratch_engine_hung(rdev, true);
5224 cik_gpu_soft_reset(rdev, reset_mask);
5226 reset_mask = cik_gpu_check_soft_reset(rdev);
5230 cik_gpu_pci_config_reset(rdev);
5232 reset_mask = cik_gpu_check_soft_reset(rdev);
5235 r600_set_bios_scratch_engine_hung(rdev, false);
5243 * @rdev: radeon_device pointer
5249 bool cik_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
5251 u32 reset_mask = cik_gpu_check_soft_reset(rdev);
5256 radeon_ring_lockup_update(rdev, ring);
5259 return radeon_ring_test_lockup(rdev, ring);
5266 * @rdev: radeon_device pointer
5271 static void cik_mc_program(struct radeon_device *rdev)
5287 evergreen_mc_stop(rdev, &save);
5288 if (radeon_mc_wait_for_idle(rdev)) {
5289 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
5295 rdev->mc.vram_start >> 12);
5297 rdev->mc.vram_end >> 12);
5299 rdev->vram_scratch.gpu_addr >> 12);
5300 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
5301 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
5304 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
5310 if (radeon_mc_wait_for_idle(rdev)) {
5311 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
5313 evergreen_mc_resume(rdev, &save);
5316 rv515_vga_render_disable(rdev);
5322 * @rdev: radeon_device pointer
5328 static int cik_mc_init(struct radeon_device *rdev)
5334 rdev->mc.vram_is_ddr = true;
5372 rdev->mc.vram_width = numchan * chansize;
5374 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
5375 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
5377 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
5378 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
5379 rdev->mc.visible_vram_size = rdev->mc.aper_size;
5380 si_vram_gtt_location(rdev, &rdev->mc);
5381 radeon_update_bandwidth_info(rdev);
5395 * @rdev: radeon_device pointer
5399 void cik_pcie_gart_tlb_flush(struct radeon_device *rdev)
5411 * @rdev: radeon_device pointer
5419 static int cik_pcie_gart_enable(struct radeon_device *rdev)
5423 if (rdev->gart.robj == NULL) {
5424 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
5427 r = radeon_gart_table_vram_pin(rdev);
5450 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
5451 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
5452 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
5454 (u32)(rdev->dummy_page.addr >> 12));
5466 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn - 1);
5470 rdev->vm_manager.saved_table_addr[i]);
5473 rdev->vm_manager.saved_table_addr[i]);
5478 (u32)(rdev->dummy_page.addr >> 12));
5495 if (rdev->family == CHIP_KAVERI) {
5503 mutex_lock(&rdev->srbm_mutex);
5505 cik_srbm_select(rdev, 0, 0, 0, i);
5518 cik_srbm_select(rdev, 0, 0, 0, 0);
5519 mutex_unlock(&rdev->srbm_mutex);
5521 cik_pcie_gart_tlb_flush(rdev);
5523 (unsigned)(rdev->mc.gtt_size >> 20),
5524 (unsigned long long)rdev->gart.table_addr);
5525 rdev->gart.ready = true;
5532 * @rdev: radeon_device pointer
5536 static void cik_pcie_gart_disable(struct radeon_device *rdev)
5546 rdev->vm_manager.saved_table_addr[i] = RREG32(reg);
5565 radeon_gart_table_vram_unpin(rdev);
5571 * @rdev: radeon_device pointer
5575 static void cik_pcie_gart_fini(struct radeon_device *rdev)
5577 cik_pcie_gart_disable(rdev);
5578 radeon_gart_table_vram_free(rdev);
5579 radeon_gart_fini(rdev);
5586 * @rdev: radeon_device pointer
5591 int cik_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib)
5605 * @rdev: radeon_device pointer
5611 int cik_vm_init(struct radeon_device *rdev)
5618 rdev->vm_manager.nvm = 16;
5620 if (rdev->flags & RADEON_IS_IGP) {
5623 rdev->vm_manager.vram_base_offset = tmp;
5625 rdev->vm_manager.vram_base_offset = 0;
5633 * @rdev: radeon_device pointer
5637 void cik_vm_fini(struct radeon_device *rdev)
5644 * @rdev: radeon_device pointer
5651 static void cik_vm_decode_fault(struct radeon_device *rdev,
5660 if (rdev->family == CHIP_HAWAII)
5677 void cik_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
5722 cik_hdp_flush_cp_ring_emit(rdev, ring->idx);
5757 static void cik_enable_gui_idle_interrupt(struct radeon_device *rdev,
5769 static void cik_enable_lbpw(struct radeon_device *rdev, bool enable)
5781 static void cik_wait_for_rlc_serdes(struct radeon_device *rdev)
5786 for (i = 0; i < rdev->config.cik.max_shader_engines; i++) {
5787 for (j = 0; j < rdev->config.cik.max_sh_per_se; j++) {
5788 cik_select_se_sh(rdev, i, j);
5789 for (k = 0; k < rdev->usec_timeout; k++) {
5796 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
5799 for (k = 0; k < rdev->usec_timeout; k++) {
5806 static void cik_update_rlc(struct radeon_device *rdev, u32 rlc)
5815 static u32 cik_halt_rlc(struct radeon_device *rdev)
5827 for (i = 0; i < rdev->usec_timeout; i++) {
5833 cik_wait_for_rlc_serdes(rdev);
5839 void cik_enter_rlc_safe_mode(struct radeon_device *rdev)
5847 for (i = 0; i < rdev->usec_timeout; i++) {
5853 for (i = 0; i < rdev->usec_timeout; i++) {
5860 void cik_exit_rlc_safe_mode(struct radeon_device *rdev)
5871 * @rdev: radeon_device pointer
5875 static void cik_rlc_stop(struct radeon_device *rdev)
5879 cik_enable_gui_idle_interrupt(rdev, false);
5881 cik_wait_for_rlc_serdes(rdev);
5887 * @rdev: radeon_device pointer
5891 static void cik_rlc_start(struct radeon_device *rdev)
5895 cik_enable_gui_idle_interrupt(rdev, true);
5903 * @rdev: radeon_device pointer
5909 static int cik_rlc_resume(struct radeon_device *rdev)
5913 if (!rdev->rlc_fw)
5916 cik_rlc_stop(rdev);
5922 si_rlc_reset(rdev);
5924 cik_init_pg(rdev);
5926 cik_init_cg(rdev);
5931 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
5939 if (rdev->new_fw) {
5941 (const struct rlc_firmware_header_v1_0 *)rdev->rlc_fw->data;
5943 (rdev->rlc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
5955 switch (rdev->family) {
5972 fw_data = (const __be32 *)rdev->rlc_fw->data;
5980 cik_enable_lbpw(rdev, false);
5982 if (rdev->family == CHIP_BONAIRE)
5985 cik_rlc_start(rdev);
5990 static void cik_enable_cgcg(struct radeon_device *rdev, bool enable)
5996 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGCG)) {
5997 cik_enable_gui_idle_interrupt(rdev, true);
5999 tmp = cik_halt_rlc(rdev);
6001 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
6007 cik_update_rlc(rdev, tmp);
6011 cik_enable_gui_idle_interrupt(rdev, false);
6026 static void cik_enable_mgcg(struct radeon_device *rdev, bool enable)
6030 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGCG)) {
6031 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGLS) {
6032 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CP_LS) {
6046 tmp = cik_halt_rlc(rdev);
6048 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
6054 cik_update_rlc(rdev, tmp);
6056 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGTS) {
6062 if ((rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGLS) &&
6063 (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGTS_LS))
6094 tmp = cik_halt_rlc(rdev);
6096 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
6102 cik_update_rlc(rdev, tmp);
6119 static void cik_enable_mc_ls(struct radeon_device *rdev,
6127 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_LS))
6136 static void cik_enable_mc_mgcg(struct radeon_device *rdev,
6144 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_MGCG))
6153 static void cik_enable_sdma_mgcg(struct radeon_device *rdev,
6158 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_MGCG)) {
6174 static void cik_enable_sdma_mgls(struct radeon_device *rdev,
6179 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_LS)) {
6202 static void cik_enable_uvd_mgcg(struct radeon_device *rdev,
6207 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_UVD_MGCG)) {
6228 static void cik_enable_bif_mgls(struct radeon_device *rdev,
6235 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_BIF_LS))
6246 static void cik_enable_hdp_mgcg(struct radeon_device *rdev,
6253 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_MGCG))
6262 static void cik_enable_hdp_ls(struct radeon_device *rdev,
6269 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_LS))
6278 void cik_update_cg(struct radeon_device *rdev,
6283 cik_enable_gui_idle_interrupt(rdev, false);
6286 cik_enable_mgcg(rdev, true);
6287 cik_enable_cgcg(rdev, true);
6289 cik_enable_cgcg(rdev, false);
6290 cik_enable_mgcg(rdev, false);
6292 cik_enable_gui_idle_interrupt(rdev, true);
6296 if (!(rdev->flags & RADEON_IS_IGP)) {
6297 cik_enable_mc_mgcg(rdev, enable);
6298 cik_enable_mc_ls(rdev, enable);
6303 cik_enable_sdma_mgcg(rdev, enable);
6304 cik_enable_sdma_mgls(rdev, enable);
6308 cik_enable_bif_mgls(rdev, enable);
6312 if (rdev->has_uvd)
6313 cik_enable_uvd_mgcg(rdev, enable);
6317 cik_enable_hdp_mgcg(rdev, enable);
6318 cik_enable_hdp_ls(rdev, enable);
6322 vce_v2_0_enable_mgcg(rdev, enable);
6326 static void cik_init_cg(struct radeon_device *rdev)
6329 cik_update_cg(rdev, RADEON_CG_BLOCK_GFX, true);
6331 if (rdev->has_uvd)
6332 si_init_uvd_internal_cg(rdev);
6334 cik_update_cg(rdev, (RADEON_CG_BLOCK_MC |
6341 static void cik_fini_cg(struct radeon_device *rdev)
6343 cik_update_cg(rdev, (RADEON_CG_BLOCK_MC |
6349 cik_update_cg(rdev, RADEON_CG_BLOCK_GFX, false);
6352 static void cik_enable_sck_slowdown_on_pu(struct radeon_device *rdev,
6358 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_RLC_SMU_HS))
6366 static void cik_enable_sck_slowdown_on_pd(struct radeon_device *rdev,
6372 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_RLC_SMU_HS))
6380 static void cik_enable_cp_pg(struct radeon_device *rdev, bool enable)
6385 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_CP))
6393 static void cik_enable_gds_pg(struct radeon_device *rdev, bool enable)
6398 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GDS))
6410 void cik_init_cp_pg_table(struct radeon_device *rdev)
6417 if (rdev->family == CHIP_KAVERI)
6420 if (rdev->rlc.cp_table_ptr == NULL)
6424 dst_ptr = rdev->rlc.cp_table_ptr;
6426 if (rdev->new_fw) {
6431 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->ce_fw->data;
6433 (rdev->ce_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
6437 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->pfp_fw->data;
6439 (rdev->pfp_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
6443 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->me_fw->data;
6445 (rdev->me_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
6449 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->mec_fw->data;
6451 (rdev->mec_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
6455 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->mec2_fw->data;
6457 (rdev->mec2_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
6472 fw_data = (const __be32 *)rdev->ce_fw->data;
6475 fw_data = (const __be32 *)rdev->pfp_fw->data;
6478 fw_data = (const __be32 *)rdev->me_fw->data;
6481 fw_data = (const __be32 *)rdev->mec_fw->data;
6494 static void cik_enable_gfx_cgpg(struct radeon_device *rdev,
6499 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG)) {
6524 static u32 cik_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh)
6529 cik_select_se_sh(rdev, se, sh);
6532 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
6539 for (i = 0; i < rdev->config.cik.max_cu_per_sh; i ++) {
6547 static void cik_init_ao_cu_mask(struct radeon_device *rdev)
6553 for (i = 0; i < rdev->config.cik.max_shader_engines; i++) {
6554 for (j = 0; j < rdev->config.cik.max_sh_per_se; j++) {
6558 for (k = 0; k < rdev->config.cik.max_cu_per_sh; k ++) {
6559 if (cik_get_cu_active_bitmap(rdev, i, j) & mask) {
6580 static void cik_enable_gfx_static_mgpg(struct radeon_device *rdev,
6586 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_SMG))
6594 static void cik_enable_gfx_dynamic_mgpg(struct radeon_device *rdev,
6600 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_DMG))
6611 static void cik_init_gfx_cgpg(struct radeon_device *rdev)
6616 if (rdev->rlc.cs_data) {
6618 WREG32(RLC_GPM_SCRATCH_DATA, upper_32_bits(rdev->rlc.clear_state_gpu_addr));
6619 WREG32(RLC_GPM_SCRATCH_DATA, lower_32_bits(rdev->rlc.clear_state_gpu_addr));
6620 WREG32(RLC_GPM_SCRATCH_DATA, rdev->rlc.clear_state_size);
6626 if (rdev->rlc.reg_list) {
6628 for (i = 0; i < rdev->rlc.reg_list_size; i++)
6629 WREG32(RLC_GPM_SCRATCH_DATA, rdev->rlc.reg_list[i]);
6637 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
6638 WREG32(RLC_CP_TABLE_RESTORE, rdev->rlc.cp_table_gpu_addr >> 8);
6660 static void cik_update_gfx_pg(struct radeon_device *rdev, bool enable)
6662 cik_enable_gfx_cgpg(rdev, enable);
6663 cik_enable_gfx_static_mgpg(rdev, enable);
6664 cik_enable_gfx_dynamic_mgpg(rdev, enable);
6667 u32 cik_get_csb_size(struct radeon_device *rdev)
6673 if (rdev->rlc.cs_data == NULL)
6681 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) {
6699 void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer)
6705 if (rdev->rlc.cs_data == NULL)
6717 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) {
6733 switch (rdev->family) {
6764 static void cik_init_pg(struct radeon_device *rdev)
6766 if (rdev->pg_flags) {
6767 cik_enable_sck_slowdown_on_pu(rdev, true);
6768 cik_enable_sck_slowdown_on_pd(rdev, true);
6769 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) {
6770 cik_init_gfx_cgpg(rdev);
6771 cik_enable_cp_pg(rdev, true);
6772 cik_enable_gds_pg(rdev, true);
6774 cik_init_ao_cu_mask(rdev);
6775 cik_update_gfx_pg(rdev, true);
6779 static void cik_fini_pg(struct radeon_device *rdev)
6781 if (rdev->pg_flags) {
6782 cik_update_gfx_pg(rdev, false);
6783 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) {
6784 cik_enable_cp_pg(rdev, false);
6785 cik_enable_gds_pg(rdev, false);
6808 * @rdev: radeon_device pointer
6812 static void cik_enable_interrupts(struct radeon_device *rdev)
6821 rdev->ih.enabled = true;
6827 * @rdev: radeon_device pointer
6831 static void cik_disable_interrupts(struct radeon_device *rdev)
6843 rdev->ih.enabled = false;
6844 rdev->ih.rptr = 0;
6850 * @rdev: radeon_device pointer
6854 static void cik_disable_interrupt_state(struct radeon_device *rdev)
6883 if (rdev->num_crtc >= 4) {
6887 if (rdev->num_crtc >= 6) {
6892 if (rdev->num_crtc >= 2) {
6896 if (rdev->num_crtc >= 4) {
6900 if (rdev->num_crtc >= 6) {
6927 * @rdev: radeon_device pointer
6935 static int cik_irq_init(struct radeon_device *rdev)
6942 ret = r600_ih_ring_alloc(rdev);
6947 cik_disable_interrupts(rdev);
6950 ret = cik_rlc_resume(rdev);
6952 r600_ih_ring_fini(rdev);
6958 WREG32(INTERRUPT_CNTL2, rdev->dummy_page.addr >> 8);
6968 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
6969 rb_bufsz = order_base_2(rdev->ih.ring_size / 4);
6975 if (rdev->wb.enabled)
6979 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC);
6980 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF);
6991 if (rdev->msi_enabled)
6996 cik_disable_interrupt_state(rdev);
6998 pci_set_master(rdev->pdev);
7001 cik_enable_interrupts(rdev);
7009 * @rdev: radeon_device pointer
7015 int cik_irq_set(struct radeon_device *rdev)
7025 if (!rdev->irq.installed) {
7030 if (!rdev->ih.enabled) {
7031 cik_disable_interrupts(rdev);
7033 cik_disable_interrupt_state(rdev);
7061 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
7065 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
7066 struct radeon_ring *ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
7108 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
7109 struct radeon_ring *ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
7152 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
7157 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
7162 if (rdev->irq.crtc_vblank_int[0] ||
7163 atomic_read(&rdev->irq.pflip[0])) {
7167 if (rdev->irq.crtc_vblank_int[1] ||
7168 atomic_read(&rdev->irq.pflip[1])) {
7172 if (rdev->irq.crtc_vblank_int[2] ||
7173 atomic_read(&rdev->irq.pflip[2])) {
7177 if (rdev->irq.crtc_vblank_int[3] ||
7178 atomic_read(&rdev->irq.pflip[3])) {
7182 if (rdev->irq.crtc_vblank_int[4] ||
7183 atomic_read(&rdev->irq.pflip[4])) {
7187 if (rdev->irq.crtc_vblank_int[5] ||
7188 atomic_read(&rdev->irq.pflip[5])) {
7192 if (rdev->irq.hpd[0]) {
7196 if (rdev->irq.hpd[1]) {
7200 if (rdev->irq.hpd[2]) {
7204 if (rdev->irq.hpd[3]) {
7208 if (rdev->irq.hpd[4]) {
7212 if (rdev->irq.hpd[5]) {
7235 if (rdev->num_crtc >= 4) {
7239 if (rdev->num_crtc >= 6) {
7244 if (rdev->num_crtc >= 2) {
7250 if (rdev->num_crtc >= 4) {
7256 if (rdev->num_crtc >= 6) {
7279 * @rdev: radeon_device pointer
7285 static inline void cik_irq_ack(struct radeon_device *rdev)
7289 rdev->irq.stat_regs.cik.disp_int = RREG32(DISP_INTERRUPT_STATUS);
7290 rdev->irq.stat_regs.cik.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
7291 rdev->irq.stat_regs.cik.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
7292 rdev->irq.stat_regs.cik.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
7293 rdev->irq.stat_regs.cik.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
7294 rdev->irq.stat_regs.cik.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
7295 rdev->irq.stat_regs.cik.disp_int_cont6 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE6);
7297 rdev->irq.stat_regs.cik.d1grph_int = RREG32(GRPH_INT_STATUS +
7299 rdev->irq.stat_regs.cik.d2grph_int = RREG32(GRPH_INT_STATUS +
7301 if (rdev->num_crtc >= 4) {
7302 rdev->irq.stat_regs.cik.d3grph_int = RREG32(GRPH_INT_STATUS +
7304 rdev->irq.stat_regs.cik.d4grph_int = RREG32(GRPH_INT_STATUS +
7307 if (rdev->num_crtc >= 6) {
7308 rdev->irq.stat_regs.cik.d5grph_int = RREG32(GRPH_INT_STATUS +
7310 rdev->irq.stat_regs.cik.d6grph_int = RREG32(GRPH_INT_STATUS +
7314 if (rdev->irq.stat_regs.cik.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
7317 if (rdev->irq.stat_regs.cik.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
7320 if (rdev->irq.stat_regs.cik.disp_int & LB_D1_VBLANK_INTERRUPT)
7322 if (rdev->irq.stat_regs.cik.disp_int & LB_D1_VLINE_INTERRUPT)
7324 if (rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
7326 if (rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VLINE_INTERRUPT)
7329 if (rdev->num_crtc >= 4) {
7330 if (rdev->irq.stat_regs.cik.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
7333 if (rdev->irq.stat_regs.cik.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
7336 if (rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
7338 if (rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
7340 if (rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
7342 if (rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
7346 if (rdev->num_crtc >= 6) {
7347 if (rdev->irq.stat_regs.cik.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
7350 if (rdev->irq.stat_regs.cik.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
7353 if (rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
7355 if (rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
7357 if (rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
7359 if (rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
7363 if (rdev->irq.stat_regs.cik.disp_int & DC_HPD1_INTERRUPT) {
7368 if (rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_INTERRUPT) {
7373 if (rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_INTERRUPT) {
7378 if (rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_INTERRUPT) {
7383 if (rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_INTERRUPT) {
7388 if (rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_INTERRUPT) {
7393 if (rdev->irq.stat_regs.cik.disp_int & DC_HPD1_RX_INTERRUPT) {
7398 if (rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
7403 if (rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
7408 if (rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
7413 if (rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
7418 if (rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
7428 * @rdev: radeon_device pointer
7432 static void cik_irq_disable(struct radeon_device *rdev)
7434 cik_disable_interrupts(rdev);
7437 cik_irq_ack(rdev);
7438 cik_disable_interrupt_state(rdev);
7444 * @rdev: radeon_device pointer
7449 static void cik_irq_suspend(struct radeon_device *rdev)
7451 cik_irq_disable(rdev);
7452 cik_rlc_stop(rdev);
7458 * @rdev: radeon_device pointer
7464 static void cik_irq_fini(struct radeon_device *rdev)
7466 cik_irq_suspend(rdev);
7467 r600_ih_ring_fini(rdev);
7473 * @rdev: radeon_device pointer
7481 static inline u32 cik_get_ih_wptr(struct radeon_device *rdev)
7485 if (rdev->wb.enabled)
7486 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
7496 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
7497 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
7498 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
7503 return (wptr & rdev->ih.ptr_mask);
7531 * @rdev: radeon_device pointer
7538 int cik_irq_process(struct radeon_device *rdev)
7540 struct radeon_ring *cp1_ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
7541 struct radeon_ring *cp2_ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
7553 if (!rdev->ih.enabled || rdev->shutdown)
7556 wptr = cik_get_ih_wptr(rdev);
7560 if (atomic_xchg(&rdev->ih.lock, 1))
7563 rptr = rdev->ih.rptr;
7570 cik_irq_ack(rdev);
7576 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
7577 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
7578 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff;
7584 if (!(rdev->irq.stat_regs.cik.disp_int & LB_D1_VBLANK_INTERRUPT))
7587 if (rdev->irq.crtc_vblank_int[0]) {
7588 drm_handle_vblank(rdev->ddev, 0);
7589 rdev->pm.vblank_sync = true;
7590 wake_up(&rdev->irq.vblank_queue);
7592 if (atomic_read(&rdev->irq.pflip[0]))
7593 radeon_crtc_handle_vblank(rdev, 0);
7594 rdev->irq.stat_regs.cik.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
7599 if (!(rdev->irq.stat_regs.cik.disp_int & LB_D1_VLINE_INTERRUPT))
7602 rdev->irq.stat_regs.cik.disp_int &= ~LB_D1_VLINE_INTERRUPT;
7614 if (!(rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
7617 if (rdev->irq.crtc_vblank_int[1]) {
7618 drm_handle_vblank(rdev->ddev, 1);
7619 rdev->pm.vblank_sync = true;
7620 wake_up(&rdev->irq.vblank_queue);
7622 if (atomic_read(&rdev->irq.pflip[1]))
7623 radeon_crtc_handle_vblank(rdev, 1);
7624 rdev->irq.stat_regs.cik.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
7629 if (!(rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VLINE_INTERRUPT))
7632 rdev->irq.stat_regs.cik.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
7644 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
7647 if (rdev->irq.crtc_vblank_int[2]) {
7648 drm_handle_vblank(rdev->ddev, 2);
7649 rdev->pm.vblank_sync = true;
7650 wake_up(&rdev->irq.vblank_queue);
7652 if (atomic_read(&rdev->irq.pflip[2]))
7653 radeon_crtc_handle_vblank(rdev, 2);
7654 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
7659 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
7662 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
7674 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
7677 if (rdev->irq.crtc_vblank_int[3]) {
7678 drm_handle_vblank(rdev->ddev, 3);
7679 rdev->pm.vblank_sync = true;
7680 wake_up(&rdev->irq.vblank_queue);
7682 if (atomic_read(&rdev->irq.pflip[3]))
7683 radeon_crtc_handle_vblank(rdev, 3);
7684 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
7689 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
7692 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
7704 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
7707 if (rdev->irq.crtc_vblank_int[4]) {
7708 drm_handle_vblank(rdev->ddev, 4);
7709 rdev->pm.vblank_sync = true;
7710 wake_up(&rdev->irq.vblank_queue);
7712 if (atomic_read(&rdev->irq.pflip[4]))
7713 radeon_crtc_handle_vblank(rdev, 4);
7714 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
7719 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
7722 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
7734 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
7737 if (rdev->irq.crtc_vblank_int[5]) {
7738 drm_handle_vblank(rdev->ddev, 5);
7739 rdev->pm.vblank_sync = true;
7740 wake_up(&rdev->irq.vblank_queue);
7742 if (atomic_read(&rdev->irq.pflip[5]))
7743 radeon_crtc_handle_vblank(rdev, 5);
7744 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
7749 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
7752 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
7769 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
7774 if (!(rdev->irq.stat_regs.cik.disp_int & DC_HPD1_INTERRUPT))
7777 rdev->irq.stat_regs.cik.disp_int &= ~DC_HPD1_INTERRUPT;
7783 if (!(rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_INTERRUPT))
7786 rdev->irq.stat_regs.cik.disp_int_cont &= ~DC_HPD2_INTERRUPT;
7792 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_INTERRUPT))
7795 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
7801 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_INTERRUPT))
7804 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
7810 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_INTERRUPT))
7813 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
7819 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_INTERRUPT))
7822 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
7828 if (!(rdev->irq.stat_regs.cik.disp_int & DC_HPD1_RX_INTERRUPT))
7831 rdev->irq.stat_regs.cik.disp_int &= ~DC_HPD1_RX_INTERRUPT;
7837 if (!(rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_RX_INTERRUPT))
7840 rdev->irq.stat_regs.cik.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
7846 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
7849 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
7855 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
7858 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
7864 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
7867 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
7873 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
7876 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
7892 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
7903 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
7904 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
7906 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
7908 cik_vm_decode_fault(rdev, status, addr, mc_client);
7914 radeon_fence_process(rdev, TN_RING_TYPE_VCE1_INDEX);
7917 radeon_fence_process(rdev, TN_RING_TYPE_VCE2_INDEX);
7926 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
7936 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
7941 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
7943 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
7998 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
8011 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
8025 rdev->pm.dpm.thermal.high_to_low = false;
8030 rdev->pm.dpm.thermal.high_to_low = true;
8082 rptr &= rdev->ih.ptr_mask;
8086 schedule_work(&rdev->dp_work);
8088 schedule_delayed_work(&rdev->hotplug_work, 0);
8090 rdev->needs_reset = true;
8091 wake_up_all(&rdev->fence_queue);
8094 schedule_work(&rdev->pm.dpm.thermal.work);
8095 rdev->ih.rptr = rptr;
8096 atomic_set(&rdev->ih.lock, 0);
8099 wptr = cik_get_ih_wptr(rdev);
8109 static void cik_uvd_init(struct radeon_device *rdev)
8113 if (!rdev->has_uvd)
8116 r = radeon_uvd_init(rdev);
8118 dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
8120 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
8125 rdev->has_uvd = false;
8128 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
8129 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
8132 static void cik_uvd_start(struct radeon_device *rdev)
8136 if (!rdev->has_uvd)
8139 r = radeon_uvd_resume(rdev);
8141 dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
8144 r = uvd_v4_2_resume(rdev);
8146 dev_err(rdev->dev, "failed UVD 4.2 resume (%d).\n", r);
8149 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
8151 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
8157 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
8160 static void cik_uvd_resume(struct radeon_device *rdev)
8165 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
8168 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
8169 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
8171 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
8174 r = uvd_v1_0_init(rdev);
8176 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
8181 static void cik_vce_init(struct radeon_device *rdev)
8185 if (!rdev->has_vce)
8188 r = radeon_vce_init(rdev);
8190 dev_err(rdev->dev, "failed VCE (%d) init.\n", r);
8192 * At this point rdev->vce.vcpu_bo is NULL which trickles down
8197 rdev->has_vce = false;
8200 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_obj = NULL;
8201 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE1_INDEX], 4096);
8202 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_obj = NULL;
8203 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE2_INDEX], 4096);
8206 static void cik_vce_start(struct radeon_device *rdev)
8210 if (!rdev->has_vce)
8213 r = radeon_vce_resume(rdev);
8215 dev_err(rdev->dev, "failed VCE resume (%d).\n", r);
8218 r = vce_v2_0_resume(rdev);
8220 dev_err(rdev->dev, "failed VCE resume (%d).\n", r);
8223 r = radeon_fence_driver_start_ring(rdev, TN_RING_TYPE_VCE1_INDEX);
8225 dev_err(rdev->dev, "failed initializing VCE1 fences (%d).\n", r);
8228 r = radeon_fence_driver_start_ring(rdev, TN_RING_TYPE_VCE2_INDEX);
8230 dev_err(rdev->dev, "failed initializing VCE2 fences (%d).\n", r);
8236 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0;
8237 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0;
8240 static void cik_vce_resume(struct radeon_device *rdev)
8245 if (!rdev->has_vce || !rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size)
8248 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX];
8249 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, VCE_CMD_NO_OP);
8251 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r);
8254 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX];
8255 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, VCE_CMD_NO_OP);
8257 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r);
8260 r = vce_v1_0_init(rdev);
8262 dev_err(rdev->dev, "failed initializing VCE (%d).\n", r);
8270 * @rdev: radeon_device pointer
8276 static int cik_startup(struct radeon_device *rdev)
8283 cik_pcie_gen3_enable(rdev);
8285 cik_program_aspm(rdev);
8288 r = r600_vram_scratch_init(rdev);
8292 cik_mc_program(rdev);
8294 if (!(rdev->flags & RADEON_IS_IGP) && !rdev->pm.dpm_enabled) {
8295 r = ci_mc_load_microcode(rdev);
8302 r = cik_pcie_gart_enable(rdev);
8305 cik_gpu_init(rdev);
8308 if (rdev->flags & RADEON_IS_IGP) {
8309 if (rdev->family == CHIP_KAVERI) {
8310 rdev->rlc.reg_list = spectre_rlc_save_restore_register_list;
8311 rdev->rlc.reg_list_size =
8314 rdev->rlc.reg_list = kalindi_rlc_save_restore_register_list;
8315 rdev->rlc.reg_list_size =
8319 rdev->rlc.cs_data = ci_cs_data;
8320 rdev->rlc.cp_table_size = ALIGN(CP_ME_TABLE_SIZE * 5 * 4, 2048); /* CP JT */
8321 rdev->rlc.cp_table_size += 64 * 1024; /* GDS */
8322 r = sumo_rlc_init(rdev);
8329 r = radeon_wb_init(rdev);
8334 r = cik_mec_init(rdev);
8340 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
8342 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
8346 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
8348 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
8352 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
8354 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
8358 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
8360 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
8364 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
8366 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
8370 cik_uvd_start(rdev);
8371 cik_vce_start(rdev);
8374 if (!rdev->irq.installed) {
8375 r = radeon_irq_kms_init(rdev);
8380 r = cik_irq_init(rdev);
8383 radeon_irq_kms_fini(rdev);
8386 cik_irq_set(rdev);
8388 if (rdev->family == CHIP_HAWAII) {
8389 if (rdev->new_fw)
8397 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
8398 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
8405 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
8406 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET,
8416 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
8417 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET,
8427 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
8428 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
8433 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
8434 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET,
8439 r = cik_cp_resume(rdev);
8443 r = cik_sdma_resume(rdev);
8447 cik_uvd_resume(rdev);
8448 cik_vce_resume(rdev);
8450 r = radeon_ib_pool_init(rdev);
8452 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
8456 r = radeon_vm_manager_init(rdev);
8458 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r);
8462 r = radeon_audio_init(rdev);
8472 * @rdev: radeon_device pointer
8478 int cik_resume(struct radeon_device *rdev)
8483 atom_asic_init(rdev->mode_info.atom_context);
8486 cik_init_golden_registers(rdev);
8488 if (rdev->pm.pm_method == PM_METHOD_DPM)
8489 radeon_pm_resume(rdev);
8491 rdev->accel_working = true;
8492 r = cik_startup(rdev);
8495 rdev->accel_working = false;
8506 * @rdev: radeon_device pointer
8512 int cik_suspend(struct radeon_device *rdev)
8514 radeon_pm_suspend(rdev);
8515 radeon_audio_fini(rdev);
8516 radeon_vm_manager_fini(rdev);
8517 cik_cp_enable(rdev, false);
8518 cik_sdma_enable(rdev, false);
8519 if (rdev->has_uvd) {
8520 radeon_uvd_suspend(rdev);
8521 uvd_v1_0_fini(rdev);
8523 if (rdev->has_vce)
8524 radeon_vce_suspend(rdev);
8525 cik_fini_pg(rdev);
8526 cik_fini_cg(rdev);
8527 cik_irq_suspend(rdev);
8528 radeon_wb_disable(rdev);
8529 cik_pcie_gart_disable(rdev);
8542 * @rdev: radeon_device pointer
8549 int cik_init(struct radeon_device *rdev)
8555 if (!radeon_get_bios(rdev)) {
8556 if (ASIC_IS_AVIVO(rdev))
8560 if (!rdev->is_atom_bios) {
8561 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n");
8564 r = radeon_atombios_init(rdev);
8569 if (!radeon_card_posted(rdev)) {
8570 if (!rdev->bios) {
8571 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
8575 atom_asic_init(rdev->mode_info.atom_context);
8578 cik_init_golden_registers(rdev);
8580 cik_scratch_init(rdev);
8582 radeon_surface_init(rdev);
8584 radeon_get_clock_info(rdev->ddev);
8587 radeon_fence_driver_init(rdev);
8590 r = cik_mc_init(rdev);
8594 r = radeon_bo_init(rdev);
8598 if (rdev->flags & RADEON_IS_IGP) {
8599 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw ||
8600 !rdev->mec_fw || !rdev->sdma_fw || !rdev->rlc_fw) {
8601 r = cik_init_microcode(rdev);
8608 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw ||
8609 !rdev->mec_fw || !rdev->sdma_fw || !rdev->rlc_fw ||
8610 !rdev->mc_fw) {
8611 r = cik_init_microcode(rdev);
8620 radeon_pm_init(rdev);
8622 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
8624 r600_ring_init(rdev, ring, 1024 * 1024);
8626 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
8628 r600_ring_init(rdev, ring, 1024 * 1024);
8629 r = radeon_doorbell_get(rdev, &ring->doorbell_index);
8633 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
8635 r600_ring_init(rdev, ring, 1024 * 1024);
8636 r = radeon_doorbell_get(rdev, &ring->doorbell_index);
8640 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
8642 r600_ring_init(rdev, ring, 256 * 1024);
8644 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
8646 r600_ring_init(rdev, ring, 256 * 1024);
8648 cik_uvd_init(rdev);
8649 cik_vce_init(rdev);
8651 rdev->ih.ring_obj = NULL;
8652 r600_ih_ring_init(rdev, 64 * 1024);
8654 r = r600_pcie_gart_init(rdev);
8658 rdev->accel_working = true;
8659 r = cik_startup(rdev);
8661 dev_err(rdev->dev, "disabling GPU acceleration\n");
8662 cik_cp_fini(rdev);
8663 cik_sdma_fini(rdev);
8664 cik_irq_fini(rdev);
8665 sumo_rlc_fini(rdev);
8666 cik_mec_fini(rdev);
8667 radeon_wb_fini(rdev);
8668 radeon_ib_pool_fini(rdev);
8669 radeon_vm_manager_fini(rdev);
8670 radeon_irq_kms_fini(rdev);
8671 cik_pcie_gart_fini(rdev);
8672 rdev->accel_working = false;
8679 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
8690 * @rdev: radeon_device pointer
8696 void cik_fini(struct radeon_device *rdev)
8698 radeon_pm_fini(rdev);
8699 cik_cp_fini(rdev);
8700 cik_sdma_fini(rdev);
8701 cik_fini_pg(rdev);
8702 cik_fini_cg(rdev);
8703 cik_irq_fini(rdev);
8704 sumo_rlc_fini(rdev);
8705 cik_mec_fini(rdev);
8706 radeon_wb_fini(rdev);
8707 radeon_vm_manager_fini(rdev);
8708 radeon_ib_pool_fini(rdev);
8709 radeon_irq_kms_fini(rdev);
8710 uvd_v1_0_fini(rdev);
8711 radeon_uvd_fini(rdev);
8712 radeon_vce_fini(rdev);
8713 cik_pcie_gart_fini(rdev);
8714 r600_vram_scratch_fini(rdev);
8715 radeon_gem_fini(rdev);
8716 radeon_fence_driver_fini(rdev);
8717 radeon_bo_fini(rdev);
8718 radeon_atombios_fini(rdev);
8719 kfree(rdev->bios);
8720 rdev->bios = NULL;
8726 struct radeon_device *rdev = dev->dev_private;
8791 * @rdev: radeon_device pointer
8800 static u32 dce8_line_buffer_adjust(struct radeon_device *rdev,
8823 buffer_alloc = (rdev->flags & RADEON_IS_IGP) ? 2 : 4;
8827 buffer_alloc = (rdev->flags & RADEON_IS_IGP) ? 2 : 4;
8839 for (i = 0; i < rdev->usec_timeout; i++) {
8865 * @rdev: radeon_device pointer
8871 static u32 cik_get_number_of_dram_channels(struct radeon_device *rdev)
9221 * @rdev: radeon_device pointer
9229 static void dce8_program_watermarks(struct radeon_device *rdev,
9248 if ((rdev->pm.pm_method == PM_METHOD_DPM) &&
9249 rdev->pm.dpm_enabled) {
9251 radeon_dpm_get_mclk(rdev, false) * 10;
9253 radeon_dpm_get_sclk(rdev, false) * 10;
9255 wm_high.yclk = rdev->pm.current_mclk * 10;
9256 wm_high.sclk = rdev->pm.current_sclk * 10;
9272 wm_high.dram_channels = cik_get_number_of_dram_channels(rdev);
9283 (rdev->disp_priority == 2)) {
9288 if ((rdev->pm.pm_method == PM_METHOD_DPM) &&
9289 rdev->pm.dpm_enabled) {
9291 radeon_dpm_get_mclk(rdev, true) * 10;
9293 radeon_dpm_get_sclk(rdev, true) * 10;
9295 wm_low.yclk = rdev->pm.current_mclk * 10;
9296 wm_low.sclk = rdev->pm.current_sclk * 10;
9312 wm_low.dram_channels = cik_get_number_of_dram_channels(rdev);
9323 (rdev->disp_priority == 2)) {
9360 * @rdev: radeon_device pointer
9365 void dce8_bandwidth_update(struct radeon_device *rdev)
9371 if (!rdev->mode_info.mode_config_initialized)
9374 radeon_update_display_priority(rdev);
9376 for (i = 0; i < rdev->num_crtc; i++) {
9377 if (rdev->mode_info.crtcs[i]->base.enabled)
9380 for (i = 0; i < rdev->num_crtc; i++) {
9381 mode = &rdev->mode_info.crtcs[i]->base.mode;
9382 lb_size = dce8_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode);
9383 dce8_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
9390 * @rdev: radeon_device pointer
9395 uint64_t cik_get_gpu_clock_counter(struct radeon_device *rdev)
9399 mutex_lock(&rdev->gpu_clock_mutex);
9403 mutex_unlock(&rdev->gpu_clock_mutex);
9407 static int cik_set_uvd_clock(struct radeon_device *rdev, u32 clock,
9414 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_GPUCLK_INPUT_FLAG_DEFAULT_GPUCLK,
9435 int cik_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
9439 r = cik_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
9443 r = cik_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
9447 int cik_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk)
9453 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_GPUCLK_INPUT_FLAG_DEFAULT_GPUCLK,
9482 static void cik_pcie_gen3_enable(struct radeon_device *rdev)
9484 struct pci_dev *root = rdev->pdev->bus->self;
9490 if (pci_is_root_bus(rdev->pdev->bus))
9496 if (rdev->flags & RADEON_IS_IGP)
9499 if (!(rdev->flags & RADEON_IS_PCIE))
9527 if (!pci_is_pcie(root) || !pci_is_pcie(rdev->pdev))
9538 pcie_capability_set_word(rdev->pdev, PCI_EXP_LNKCTL, PCI_EXP_LNKCTL_HAWD);
9556 pcie_capability_read_word(rdev->pdev,
9564 pcie_capability_read_word(rdev->pdev,
9570 pcie_capability_read_word(rdev->pdev,
9589 pcie_capability_clear_and_set_word(rdev->pdev, PCI_EXP_LNKCTL,
9606 pcie_capability_read_word(rdev->pdev,
9614 pcie_capability_write_word(rdev->pdev,
9630 pcie_capability_read_word(rdev->pdev, PCI_EXP_LNKCTL2, &tmp16);
9638 pcie_capability_write_word(rdev->pdev, PCI_EXP_LNKCTL2, tmp16);
9644 for (i = 0; i < rdev->usec_timeout; i++) {
9652 static void cik_program_aspm(struct radeon_device *rdev)
9662 if (rdev->flags & RADEON_IS_IGP)
9665 if (!(rdev->flags & RADEON_IS_PCIE))
9730 !pci_is_root_bus(rdev->pdev->bus)) {
9731 struct pci_dev *root = rdev->pdev->bus->self;