Lines Matching defs:rdev
127 extern int r600_ih_ring_alloc(struct radeon_device *rdev);
128 extern void r600_ih_ring_fini(struct radeon_device *rdev);
129 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
130 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
131 extern bool evergreen_is_display_hung(struct radeon_device *rdev);
132 extern void sumo_rlc_fini(struct radeon_device *rdev);
133 extern int sumo_rlc_init(struct radeon_device *rdev);
134 extern void si_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
135 extern void si_rlc_reset(struct radeon_device *rdev);
136 extern void si_init_uvd_internal_cg(struct radeon_device *rdev);
137 static u32 cik_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh);
138 extern int cik_sdma_resume(struct radeon_device *rdev);
139 extern void cik_sdma_enable(struct radeon_device *rdev, bool enable);
140 extern void cik_sdma_fini(struct radeon_device *rdev);
141 extern void vce_v2_0_enable_mgcg(struct radeon_device *rdev, bool enable);
142 static void cik_rlc_stop(struct radeon_device *rdev);
143 static void cik_pcie_gen3_enable(struct radeon_device *rdev);
144 static void cik_program_aspm(struct radeon_device *rdev);
145 static void cik_init_pg(struct radeon_device *rdev);
146 static void cik_init_cg(struct radeon_device *rdev);
147 static void cik_fini_pg(struct radeon_device *rdev);
148 static void cik_fini_cg(struct radeon_device *rdev);
149 static void cik_enable_gui_idle_interrupt(struct radeon_device *rdev,
155 * @rdev: radeon_device pointer
162 int cik_get_allowed_info_register(struct radeon_device *rdev,
188 u32 cik_didt_rreg(struct radeon_device *rdev, u32 reg)
193 spin_lock_irqsave(&rdev->didt_idx_lock, flags);
196 spin_unlock_irqrestore(&rdev->didt_idx_lock, flags);
200 void cik_didt_wreg(struct radeon_device *rdev, u32 reg, u32 v)
204 spin_lock_irqsave(&rdev->didt_idx_lock, flags);
207 spin_unlock_irqrestore(&rdev->didt_idx_lock, flags);
211 int ci_get_temp(struct radeon_device *rdev)
228 int kv_get_temp(struct radeon_device *rdev)
246 u32 cik_pciep_rreg(struct radeon_device *rdev, u32 reg)
251 spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
255 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
259 void cik_pciep_wreg(struct radeon_device *rdev, u32 reg, u32 v)
263 spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
268 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
1628 static void cik_init_golden_registers(struct radeon_device *rdev)
1630 switch (rdev->family) {
1632 radeon_program_register_sequence(rdev,
1635 radeon_program_register_sequence(rdev,
1638 radeon_program_register_sequence(rdev,
1641 radeon_program_register_sequence(rdev,
1646 radeon_program_register_sequence(rdev,
1649 radeon_program_register_sequence(rdev,
1652 radeon_program_register_sequence(rdev,
1655 radeon_program_register_sequence(rdev,
1660 radeon_program_register_sequence(rdev,
1663 radeon_program_register_sequence(rdev,
1666 radeon_program_register_sequence(rdev,
1669 radeon_program_register_sequence(rdev,
1674 radeon_program_register_sequence(rdev,
1677 radeon_program_register_sequence(rdev,
1680 radeon_program_register_sequence(rdev,
1683 radeon_program_register_sequence(rdev,
1688 radeon_program_register_sequence(rdev,
1691 radeon_program_register_sequence(rdev,
1694 radeon_program_register_sequence(rdev,
1697 radeon_program_register_sequence(rdev,
1709 * @rdev: radeon_device pointer
1714 u32 cik_get_xclk(struct radeon_device *rdev)
1716 u32 reference_clock = rdev->clock.spll.reference_freq;
1718 if (rdev->flags & RADEON_IS_IGP) {
1731 * @rdev: radeon_device pointer
1737 u32 cik_mm_rdoorbell(struct radeon_device *rdev, u32 index)
1739 if (index < rdev->doorbell.num_doorbells) {
1740 return readl(rdev->doorbell.ptr + index);
1750 * @rdev: radeon_device pointer
1757 void cik_mm_wdoorbell(struct radeon_device *rdev, u32 index, u32 v)
1759 if (index < rdev->doorbell.num_doorbells) {
1760 writel(v, rdev->doorbell.ptr + index);
1840 * @rdev: radeon_device pointer
1850 static void cik_srbm_select(struct radeon_device *rdev,
1864 * @rdev: radeon_device pointer
1869 int ci_mc_load_microcode(struct radeon_device *rdev)
1878 if (!rdev->mc_fw)
1881 if (rdev->new_fw) {
1883 (const struct mc_firmware_header_v1_0 *)rdev->mc_fw->data;
1889 (rdev->mc_fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes));
1892 (rdev->mc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
1894 ucode_size = rdev->mc_fw->size / 4;
1896 switch (rdev->family) {
1908 fw_data = (const __be32 *)rdev->mc_fw->data;
1920 if (rdev->new_fw) {
1930 if ((rdev->pdev->device == 0x6649) && ((tmp & 0xff00) == 0x5600)) {
1939 if (rdev->new_fw)
1951 for (i = 0; i < rdev->usec_timeout; i++) {
1956 for (i = 0; i < rdev->usec_timeout; i++) {
1969 * @rdev: radeon_device pointer
1975 static int cik_init_microcode(struct radeon_device *rdev)
1990 switch (rdev->family) {
1993 if ((rdev->pdev->revision == 0x80) ||
1994 (rdev->pdev->revision == 0x81) ||
1995 (rdev->pdev->device == 0x665f))
2011 if (rdev->pdev->revision == 0x80)
2064 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
2067 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
2070 if (rdev->pfp_fw->size != pfp_req_size) {
2072 rdev->pfp_fw->size, fw_name);
2077 err = radeon_ucode_validate(rdev->pfp_fw);
2088 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
2091 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
2094 if (rdev->me_fw->size != me_req_size) {
2096 rdev->me_fw->size, fw_name);
2100 err = radeon_ucode_validate(rdev->me_fw);
2111 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev);
2114 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev);
2117 if (rdev->ce_fw->size != ce_req_size) {
2119 rdev->ce_fw->size, fw_name);
2123 err = radeon_ucode_validate(rdev->ce_fw);
2134 err = request_firmware(&rdev->mec_fw, fw_name, rdev->dev);
2137 err = request_firmware(&rdev->mec_fw, fw_name, rdev->dev);
2140 if (rdev->mec_fw->size != mec_req_size) {
2142 rdev->mec_fw->size, fw_name);
2146 err = radeon_ucode_validate(rdev->mec_fw);
2156 if (rdev->family == CHIP_KAVERI) {
2158 err = request_firmware(&rdev->mec2_fw, fw_name, rdev->dev);
2162 err = radeon_ucode_validate(rdev->mec2_fw);
2172 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
2175 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
2178 if (rdev->rlc_fw->size != rlc_req_size) {
2180 rdev->rlc_fw->size, fw_name);
2184 err = radeon_ucode_validate(rdev->rlc_fw);
2195 err = request_firmware(&rdev->sdma_fw, fw_name, rdev->dev);
2198 err = request_firmware(&rdev->sdma_fw, fw_name, rdev->dev);
2201 if (rdev->sdma_fw->size != sdma_req_size) {
2203 rdev->sdma_fw->size, fw_name);
2207 err = radeon_ucode_validate(rdev->sdma_fw);
2218 if (!(rdev->flags & RADEON_IS_IGP)) {
2220 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
2223 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
2226 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
2230 if ((rdev->mc_fw->size != mc_req_size) &&
2231 (rdev->mc_fw->size != mc2_req_size)){
2233 rdev->mc_fw->size, fw_name);
2236 DRM_INFO("%s: %zu bytes\n", fw_name, rdev->mc_fw->size);
2238 err = radeon_ucode_validate(rdev->mc_fw);
2252 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
2255 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
2259 release_firmware(rdev->smc_fw);
2260 rdev->smc_fw = NULL;
2262 } else if (rdev->smc_fw->size != smc_req_size) {
2264 rdev->smc_fw->size, fw_name);
2268 err = radeon_ucode_validate(rdev->smc_fw);
2280 rdev->new_fw = false;
2285 rdev->new_fw = true;
2293 release_firmware(rdev->pfp_fw);
2294 rdev->pfp_fw = NULL;
2295 release_firmware(rdev->me_fw);
2296 rdev->me_fw = NULL;
2297 release_firmware(rdev->ce_fw);
2298 rdev->ce_fw = NULL;
2299 release_firmware(rdev->mec_fw);
2300 rdev->mec_fw = NULL;
2301 release_firmware(rdev->mec2_fw);
2302 rdev->mec2_fw = NULL;
2303 release_firmware(rdev->rlc_fw);
2304 rdev->rlc_fw = NULL;
2305 release_firmware(rdev->sdma_fw);
2306 rdev->sdma_fw = NULL;
2307 release_firmware(rdev->mc_fw);
2308 rdev->mc_fw = NULL;
2309 release_firmware(rdev->smc_fw);
2310 rdev->smc_fw = NULL;
2321 * @rdev: radeon_device pointer
2329 static void cik_tiling_mode_table_init(struct radeon_device *rdev)
2331 u32 *tile = rdev->config.cik.tile_mode_array;
2332 u32 *macrotile = rdev->config.cik.macrotile_mode_array;
2334 ARRAY_SIZE(rdev->config.cik.tile_mode_array);
2336 ARRAY_SIZE(rdev->config.cik.macrotile_mode_array);
2339 u32 num_rbs = rdev->config.cik.max_backends_per_se *
2340 rdev->config.cik.max_shader_engines;
2342 switch (rdev->config.cik.mem_row_size_in_kb) {
2355 num_pipe_configs = rdev->config.cik.max_tile_pipes;
3027 * @rdev: radeon_device pointer
3035 static void cik_select_se_sh(struct radeon_device *rdev,
3073 * @rdev: radeon_device pointer
3081 static u32 cik_get_rb_disabled(struct radeon_device *rdev,
3104 * @rdev: radeon_device pointer
3111 static void cik_setup_rb(struct radeon_device *rdev,
3122 cik_select_se_sh(rdev, i, j);
3123 data = cik_get_rb_disabled(rdev, max_rb_num_per_se, sh_per_se);
3124 if (rdev->family == CHIP_HAWAII)
3130 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
3139 rdev->config.cik.backend_enable_mask = enabled_rbs;
3142 cik_select_se_sh(rdev, i, 0xffffffff);
3167 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
3173 * @rdev: radeon_device pointer
3178 static void cik_gpu_init(struct radeon_device *rdev)
3186 switch (rdev->family) {
3188 rdev->config.cik.max_shader_engines = 2;
3189 rdev->config.cik.max_tile_pipes = 4;
3190 rdev->config.cik.max_cu_per_sh = 7;
3191 rdev->config.cik.max_sh_per_se = 1;
3192 rdev->config.cik.max_backends_per_se = 2;
3193 rdev->config.cik.max_texture_channel_caches = 4;
3194 rdev->config.cik.max_gprs = 256;
3195 rdev->config.cik.max_gs_threads = 32;
3196 rdev->config.cik.max_hw_contexts = 8;
3198 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20;
3199 rdev->config.cik.sc_prim_fifo_size_backend = 0x100;
3200 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30;
3201 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130;
3205 rdev->config.cik.max_shader_engines = 4;
3206 rdev->config.cik.max_tile_pipes = 16;
3207 rdev->config.cik.max_cu_per_sh = 11;
3208 rdev->config.cik.max_sh_per_se = 1;
3209 rdev->config.cik.max_backends_per_se = 4;
3210 rdev->config.cik.max_texture_channel_caches = 16;
3211 rdev->config.cik.max_gprs = 256;
3212 rdev->config.cik.max_gs_threads = 32;
3213 rdev->config.cik.max_hw_contexts = 8;
3215 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20;
3216 rdev->config.cik.sc_prim_fifo_size_backend = 0x100;
3217 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30;
3218 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130;
3222 rdev->config.cik.max_shader_engines = 1;
3223 rdev->config.cik.max_tile_pipes = 4;
3224 rdev->config.cik.max_cu_per_sh = 8;
3225 rdev->config.cik.max_backends_per_se = 2;
3226 rdev->config.cik.max_sh_per_se = 1;
3227 rdev->config.cik.max_texture_channel_caches = 4;
3228 rdev->config.cik.max_gprs = 256;
3229 rdev->config.cik.max_gs_threads = 16;
3230 rdev->config.cik.max_hw_contexts = 8;
3232 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20;
3233 rdev->config.cik.sc_prim_fifo_size_backend = 0x100;
3234 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30;
3235 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130;
3241 rdev->config.cik.max_shader_engines = 1;
3242 rdev->config.cik.max_tile_pipes = 2;
3243 rdev->config.cik.max_cu_per_sh = 2;
3244 rdev->config.cik.max_sh_per_se = 1;
3245 rdev->config.cik.max_backends_per_se = 1;
3246 rdev->config.cik.max_texture_channel_caches = 2;
3247 rdev->config.cik.max_gprs = 256;
3248 rdev->config.cik.max_gs_threads = 16;
3249 rdev->config.cik.max_hw_contexts = 8;
3251 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20;
3252 rdev->config.cik.sc_prim_fifo_size_backend = 0x100;
3253 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30;
3254 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130;
3277 rdev->config.cik.num_tile_pipes = rdev->config.cik.max_tile_pipes;
3278 rdev->config.cik.mem_max_burst_length_bytes = 256;
3280 rdev->config.cik.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024;
3281 if (rdev->config.cik.mem_row_size_in_kb > 4)
3282 rdev->config.cik.mem_row_size_in_kb = 4;
3284 rdev->config.cik.shader_engine_tile_size = 32;
3285 rdev->config.cik.num_gpus = 1;
3286 rdev->config.cik.multi_gpu_tile_size = 64;
3290 switch (rdev->config.cik.mem_row_size_in_kb) {
3310 rdev->config.cik.tile_config = 0;
3311 switch (rdev->config.cik.num_tile_pipes) {
3313 rdev->config.cik.tile_config |= (0 << 0);
3316 rdev->config.cik.tile_config |= (1 << 0);
3319 rdev->config.cik.tile_config |= (2 << 0);
3324 rdev->config.cik.tile_config |= (3 << 0);
3327 rdev->config.cik.tile_config |=
3329 rdev->config.cik.tile_config |=
3331 rdev->config.cik.tile_config |=
3343 cik_tiling_mode_table_init(rdev);
3345 cik_setup_rb(rdev, rdev->config.cik.max_shader_engines,
3346 rdev->config.cik.max_sh_per_se,
3347 rdev->config.cik.max_backends_per_se);
3349 rdev->config.cik.active_cus = 0;
3350 for (i = 0; i < rdev->config.cik.max_shader_engines; i++) {
3351 for (j = 0; j < rdev->config.cik.max_sh_per_se; j++) {
3352 rdev->config.cik.active_cus +=
3353 hweight32(cik_get_cu_active_bitmap(rdev, i, j));
3386 WREG32(PA_SC_FIFO_SIZE, (SC_FRONTEND_PRIM_FIFO_SIZE(rdev->config.cik.sc_prim_fifo_size_frontend) |
3387 SC_BACKEND_PRIM_FIFO_SIZE(rdev->config.cik.sc_prim_fifo_size_backend) |
3388 SC_HIZ_TILE_FIFO_SIZE(rdev->config.cik.sc_hiz_tile_fifo_size) |
3389 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.cik.sc_earlyz_tile_fifo_size)));
3425 * @rdev: radeon_device pointer
3432 static void cik_scratch_init(struct radeon_device *rdev)
3436 rdev->scratch.num_reg = 7;
3437 rdev->scratch.reg_base = SCRATCH_REG0;
3438 for (i = 0; i < rdev->scratch.num_reg; i++) {
3439 rdev->scratch.free[i] = true;
3440 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4);
3447 * @rdev: radeon_device pointer
3455 int cik_ring_test(struct radeon_device *rdev, struct radeon_ring *ring)
3462 r = radeon_scratch_get(rdev, &scratch);
3468 r = radeon_ring_lock(rdev, ring, 3);
3471 radeon_scratch_free(rdev, scratch);
3477 radeon_ring_unlock_commit(rdev, ring, false);
3479 for (i = 0; i < rdev->usec_timeout; i++) {
3485 if (i < rdev->usec_timeout) {
3492 radeon_scratch_free(rdev, scratch);
3499 * @rdev: radeon_device pointer
3504 static void cik_hdp_flush_cp_ring_emit(struct radeon_device *rdev,
3507 struct radeon_ring *ring = &rdev->ring[ridx];
3544 * @rdev: radeon_device pointer
3550 void cik_fence_gfx_ring_emit(struct radeon_device *rdev,
3553 struct radeon_ring *ring = &rdev->ring[fence->ring];
3554 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
3585 * @rdev: radeon_device pointer
3591 void cik_fence_compute_ring_emit(struct radeon_device *rdev,
3594 struct radeon_ring *ring = &rdev->ring[fence->ring];
3595 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
3613 * @rdev: radeon_device pointer
3621 bool cik_semaphore_ring_emit(struct radeon_device *rdev,
3645 * @rdev: radeon_device pointer
3655 struct radeon_fence *cik_copy_cpdma(struct radeon_device *rdev,
3662 int ring_index = rdev->asic->copy.blit_ring_index;
3663 struct radeon_ring *ring = &rdev->ring[ring_index];
3672 r = radeon_ring_lock(rdev, ring, num_loops * 7 + 18);
3675 radeon_sync_free(rdev, &sync, NULL);
3679 radeon_sync_resv(rdev, &sync, resv, false);
3680 radeon_sync_rings(rdev, &sync, ring->idx);
3701 r = radeon_fence_emit(rdev, &fence, ring->idx);
3703 radeon_ring_unlock_undo(rdev, ring);
3704 radeon_sync_free(rdev, &sync, NULL);
3708 radeon_ring_unlock_commit(rdev, ring, false);
3709 radeon_sync_free(rdev, &sync, fence);
3720 * @rdev: radeon_device pointer
3729 void cik_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3731 struct radeon_ring *ring = &rdev->ring[ib->ring];
3749 } else if (rdev->wb.enabled) {
3772 * @rdev: radeon_device pointer
3779 int cik_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
3787 r = radeon_scratch_get(rdev, &scratch);
3793 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
3796 radeon_scratch_free(rdev, scratch);
3803 r = radeon_ib_schedule(rdev, &ib, NULL, false);
3805 radeon_scratch_free(rdev, scratch);
3806 radeon_ib_free(rdev, &ib);
3814 radeon_scratch_free(rdev, scratch);
3815 radeon_ib_free(rdev, &ib);
3819 radeon_scratch_free(rdev, scratch);
3820 radeon_ib_free(rdev, &ib);
3824 for (i = 0; i < rdev->usec_timeout; i++) {
3830 if (i < rdev->usec_timeout) {
3837 radeon_scratch_free(rdev, scratch);
3838 radeon_ib_free(rdev, &ib);
3868 * @rdev: radeon_device pointer
3873 static void cik_cp_gfx_enable(struct radeon_device *rdev, bool enable)
3878 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
3879 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
3881 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
3889 * @rdev: radeon_device pointer
3894 static int cik_cp_gfx_load_microcode(struct radeon_device *rdev)
3898 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw)
3901 cik_cp_gfx_enable(rdev, false);
3903 if (rdev->new_fw) {
3905 (const struct gfx_firmware_header_v1_0 *)rdev->pfp_fw->data;
3907 (const struct gfx_firmware_header_v1_0 *)rdev->ce_fw->data;
3909 (const struct gfx_firmware_header_v1_0 *)rdev->me_fw->data;
3919 (rdev->pfp_fw->data + le32_to_cpu(pfp_hdr->header.ucode_array_offset_bytes));
3928 (rdev->ce_fw->data + le32_to_cpu(ce_hdr->header.ucode_array_offset_bytes));
3937 (rdev->me_fw->data + le32_to_cpu(me_hdr->header.ucode_array_offset_bytes));
3948 fw_data = (const __be32 *)rdev->pfp_fw->data;
3955 fw_data = (const __be32 *)rdev->ce_fw->data;
3962 fw_data = (const __be32 *)rdev->me_fw->data;
3975 * @rdev: radeon_device pointer
3981 static int cik_cp_gfx_start(struct radeon_device *rdev)
3983 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3987 WREG32(CP_MAX_CONTEXT, rdev->config.cik.max_hw_contexts - 1);
3991 cik_cp_gfx_enable(rdev, true);
3993 r = radeon_ring_lock(rdev, ring, cik_default_size + 17);
4028 radeon_ring_unlock_commit(rdev, ring, false);
4036 * @rdev: radeon_device pointer
4041 static void cik_cp_gfx_fini(struct radeon_device *rdev)
4043 cik_cp_gfx_enable(rdev, false);
4044 radeon_ring_fini(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
4050 * @rdev: radeon_device pointer
4056 static int cik_cp_gfx_resume(struct radeon_device *rdev)
4065 if (rdev->family != CHIP_HAWAII)
4074 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
4078 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
4092 WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC);
4093 WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
4098 if (!rdev->wb.enabled)
4109 cik_cp_gfx_start(rdev);
4110 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true;
4111 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
4113 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
4117 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
4118 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
4123 u32 cik_gfx_get_rptr(struct radeon_device *rdev,
4128 if (rdev->wb.enabled)
4129 rptr = rdev->wb.wb[ring->rptr_offs/4];
4136 u32 cik_gfx_get_wptr(struct radeon_device *rdev,
4142 void cik_gfx_set_wptr(struct radeon_device *rdev,
4149 u32 cik_compute_get_rptr(struct radeon_device *rdev,
4154 if (rdev->wb.enabled) {
4155 rptr = rdev->wb.wb[ring->rptr_offs/4];
4157 mutex_lock(&rdev->srbm_mutex);
4158 cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0);
4160 cik_srbm_select(rdev, 0, 0, 0, 0);
4161 mutex_unlock(&rdev->srbm_mutex);
4167 u32 cik_compute_get_wptr(struct radeon_device *rdev,
4172 if (rdev->wb.enabled) {
4174 wptr = rdev->wb.wb[ring->wptr_offs/4];
4176 mutex_lock(&rdev->srbm_mutex);
4177 cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0);
4179 cik_srbm_select(rdev, 0, 0, 0, 0);
4180 mutex_unlock(&rdev->srbm_mutex);
4186 void cik_compute_set_wptr(struct radeon_device *rdev,
4190 rdev->wb.wb[ring->wptr_offs/4] = ring->wptr;
4194 static void cik_compute_stop(struct radeon_device *rdev,
4199 cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0);
4207 for (j = 0; j < rdev->usec_timeout; j++) {
4216 cik_srbm_select(rdev, 0, 0, 0, 0);
4222 * @rdev: radeon_device pointer
4227 static void cik_cp_compute_enable(struct radeon_device *rdev, bool enable)
4236 mutex_lock(&rdev->srbm_mutex);
4237 cik_compute_stop(rdev,&rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]);
4238 cik_compute_stop(rdev,&rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]);
4239 mutex_unlock(&rdev->srbm_mutex);
4242 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
4243 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
4251 * @rdev: radeon_device pointer
4256 static int cik_cp_compute_load_microcode(struct radeon_device *rdev)
4260 if (!rdev->mec_fw)
4263 cik_cp_compute_enable(rdev, false);
4265 if (rdev->new_fw) {
4267 (const struct gfx_firmware_header_v1_0 *)rdev->mec_fw->data;
4275 (rdev->mec_fw->data + le32_to_cpu(mec_hdr->header.ucode_array_offset_bytes));
4283 if (rdev->family == CHIP_KAVERI) {
4285 (const struct gfx_firmware_header_v1_0 *)rdev->mec2_fw->data;
4288 (rdev->mec2_fw->data +
4300 fw_data = (const __be32 *)rdev->mec_fw->data;
4306 if (rdev->family == CHIP_KAVERI) {
4308 fw_data = (const __be32 *)rdev->mec_fw->data;
4322 * @rdev: radeon_device pointer
4327 static int cik_cp_compute_start(struct radeon_device *rdev)
4329 cik_cp_compute_enable(rdev, true);
4337 * @rdev: radeon_device pointer
4342 static void cik_cp_compute_fini(struct radeon_device *rdev)
4346 cik_cp_compute_enable(rdev, false);
4354 if (rdev->ring[idx].mqd_obj) {
4355 r = radeon_bo_reserve(rdev->ring[idx].mqd_obj, false);
4357 dev_warn(rdev->dev, "(%d) reserve MQD bo failed\n", r);
4359 radeon_bo_unpin(rdev->ring[idx].mqd_obj);
4360 radeon_bo_unreserve(rdev->ring[idx].mqd_obj);
4362 radeon_bo_unref(&rdev->ring[idx].mqd_obj);
4363 rdev->ring[idx].mqd_obj = NULL;
4368 static void cik_mec_fini(struct radeon_device *rdev)
4372 if (rdev->mec.hpd_eop_obj) {
4373 r = radeon_bo_reserve(rdev->mec.hpd_eop_obj, false);
4375 dev_warn(rdev->dev, "(%d) reserve HPD EOP bo failed\n", r);
4376 radeon_bo_unpin(rdev->mec.hpd_eop_obj);
4377 radeon_bo_unreserve(rdev->mec.hpd_eop_obj);
4379 radeon_bo_unref(&rdev->mec.hpd_eop_obj);
4380 rdev->mec.hpd_eop_obj = NULL;
4386 static int cik_mec_init(struct radeon_device *rdev)
4395 if (rdev->family == CHIP_KAVERI)
4396 rdev->mec.num_mec = 2;
4398 rdev->mec.num_mec = 1;
4399 rdev->mec.num_pipe = 4;
4400 rdev->mec.num_queue = rdev->mec.num_mec * rdev->mec.num_pipe * 8;
4402 if (rdev->mec.hpd_eop_obj == NULL) {
4403 r = radeon_bo_create(rdev,
4404 rdev->mec.num_mec *rdev->mec.num_pipe * MEC_HPD_SIZE * 2,
4407 &rdev->mec.hpd_eop_obj);
4409 dev_warn(rdev->dev, "(%d) create HDP EOP bo failed\n", r);
4414 r = radeon_bo_reserve(rdev->mec.hpd_eop_obj, false);
4416 cik_mec_fini(rdev);
4419 r = radeon_bo_pin(rdev->mec.hpd_eop_obj, RADEON_GEM_DOMAIN_GTT,
4420 &rdev->mec.hpd_eop_gpu_addr);
4422 dev_warn(rdev->dev, "(%d) pin HDP EOP bo failed\n", r);
4423 cik_mec_fini(rdev);
4426 r = radeon_bo_kmap(rdev->mec.hpd_eop_obj, (void **)&hpd);
4428 dev_warn(rdev->dev, "(%d) map HDP EOP bo failed\n", r);
4429 cik_mec_fini(rdev);
4434 memset(hpd, 0, rdev->mec.num_mec *rdev->mec.num_pipe * MEC_HPD_SIZE * 2);
4436 radeon_bo_kunmap(rdev->mec.hpd_eop_obj);
4437 radeon_bo_unreserve(rdev->mec.hpd_eop_obj);
4512 * @rdev: radeon_device pointer
4518 static int cik_cp_compute_resume(struct radeon_device *rdev)
4530 r = cik_cp_compute_start(rdev);
4540 mutex_lock(&rdev->srbm_mutex);
4542 for (i = 0; i < (rdev->mec.num_pipe * rdev->mec.num_mec); ++i) {
4546 cik_srbm_select(rdev, me, pipe, 0, 0);
4548 eop_gpu_addr = rdev->mec.hpd_eop_gpu_addr + (i * MEC_HPD_SIZE * 2) ;
4563 cik_srbm_select(rdev, 0, 0, 0, 0);
4564 mutex_unlock(&rdev->srbm_mutex);
4573 if (rdev->ring[idx].mqd_obj == NULL) {
4574 r = radeon_bo_create(rdev,
4578 NULL, &rdev->ring[idx].mqd_obj);
4580 dev_warn(rdev->dev, "(%d) create MQD bo failed\n", r);
4585 r = radeon_bo_reserve(rdev->ring[idx].mqd_obj, false);
4587 cik_cp_compute_fini(rdev);
4590 r = radeon_bo_pin(rdev->ring[idx].mqd_obj, RADEON_GEM_DOMAIN_GTT,
4593 dev_warn(rdev->dev, "(%d) pin MQD bo failed\n", r);
4594 cik_cp_compute_fini(rdev);
4597 r = radeon_bo_kmap(rdev->ring[idx].mqd_obj, (void **)&buf);
4599 dev_warn(rdev->dev, "(%d) map MQD bo failed\n", r);
4600 cik_cp_compute_fini(rdev);
4614 mutex_lock(&rdev->srbm_mutex);
4615 cik_srbm_select(rdev, rdev->ring[idx].me,
4616 rdev->ring[idx].pipe,
4617 rdev->ring[idx].queue, 0);
4640 for (j = 0; j < rdev->usec_timeout; j++) {
4661 hqd_gpu_addr = rdev->ring[idx].gpu_addr >> 8;
4673 order_base_2(rdev->ring[idx].ring_size / 8);
4687 wb_gpu_addr = rdev->wb.gpu_addr + CIK_WB_CP1_WPTR_OFFSET;
4689 wb_gpu_addr = rdev->wb.gpu_addr + CIK_WB_CP2_WPTR_OFFSET;
4698 wb_gpu_addr = rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET;
4700 wb_gpu_addr = rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET;
4715 DOORBELL_OFFSET(rdev->ring[idx].doorbell_index);
4727 rdev->ring[idx].wptr = 0;
4728 mqd->queue_state.cp_hqd_pq_wptr = rdev->ring[idx].wptr;
4740 cik_srbm_select(rdev, 0, 0, 0, 0);
4741 mutex_unlock(&rdev->srbm_mutex);
4743 radeon_bo_kunmap(rdev->ring[idx].mqd_obj);
4744 radeon_bo_unreserve(rdev->ring[idx].mqd_obj);
4746 rdev->ring[idx].ready = true;
4747 r = radeon_ring_test(rdev, idx, &rdev->ring[idx]);
4749 rdev->ring[idx].ready = false;
4755 static void cik_cp_enable(struct radeon_device *rdev, bool enable)
4757 cik_cp_gfx_enable(rdev, enable);
4758 cik_cp_compute_enable(rdev, enable);
4761 static int cik_cp_load_microcode(struct radeon_device *rdev)
4765 r = cik_cp_gfx_load_microcode(rdev);
4768 r = cik_cp_compute_load_microcode(rdev);
4775 static void cik_cp_fini(struct radeon_device *rdev)
4777 cik_cp_gfx_fini(rdev);
4778 cik_cp_compute_fini(rdev);
4781 static int cik_cp_resume(struct radeon_device *rdev)
4785 cik_enable_gui_idle_interrupt(rdev, false);
4787 r = cik_cp_load_microcode(rdev);
4791 r = cik_cp_gfx_resume(rdev);
4794 r = cik_cp_compute_resume(rdev);
4798 cik_enable_gui_idle_interrupt(rdev, true);
4803 static void cik_print_gpu_status_regs(struct radeon_device *rdev)
4805 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n",
4807 dev_info(rdev->dev, " GRBM_STATUS2=0x%08X\n",
4809 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n",
4811 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n",
4813 dev_info(rdev->dev, " GRBM_STATUS_SE2=0x%08X\n",
4815 dev_info(rdev->dev, " GRBM_STATUS_SE3=0x%08X\n",
4817 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n",
4819 dev_info(rdev->dev, " SRBM_STATUS2=0x%08X\n",
4821 dev_info(rdev->dev, " SDMA0_STATUS_REG = 0x%08X\n",
4823 dev_info(rdev->dev, " SDMA1_STATUS_REG = 0x%08X\n",
4825 dev_info(rdev->dev, " CP_STAT = 0x%08x\n", RREG32(CP_STAT));
4826 dev_info(rdev->dev, " CP_STALLED_STAT1 = 0x%08x\n",
4828 dev_info(rdev->dev, " CP_STALLED_STAT2 = 0x%08x\n",
4830 dev_info(rdev->dev, " CP_STALLED_STAT3 = 0x%08x\n",
4832 dev_info(rdev->dev, " CP_CPF_BUSY_STAT = 0x%08x\n",
4834 dev_info(rdev->dev, " CP_CPF_STALLED_STAT1 = 0x%08x\n",
4836 dev_info(rdev->dev, " CP_CPF_STATUS = 0x%08x\n", RREG32(CP_CPF_STATUS));
4837 dev_info(rdev->dev, " CP_CPC_BUSY_STAT = 0x%08x\n", RREG32(CP_CPC_BUSY_STAT));
4838 dev_info(rdev->dev, " CP_CPC_STALLED_STAT1 = 0x%08x\n",
4840 dev_info(rdev->dev, " CP_CPC_STATUS = 0x%08x\n", RREG32(CP_CPC_STATUS));
4846 * @rdev: radeon_device pointer
4852 u32 cik_gpu_check_soft_reset(struct radeon_device *rdev)
4912 if (evergreen_is_display_hung(rdev))
4927 * @rdev: radeon_device pointer
4932 static void cik_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
4941 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
4943 cik_print_gpu_status_regs(rdev);
4944 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
4946 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4950 cik_fini_pg(rdev);
4951 cik_fini_cg(rdev);
4954 cik_rlc_stop(rdev);
4975 evergreen_mc_stop(rdev, &save);
4976 if (evergreen_mc_wait_for_idle(rdev)) {
4977 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
5013 if (!(rdev->flags & RADEON_IS_IGP)) {
5021 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
5035 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
5049 evergreen_mc_resume(rdev, &save);
5052 cik_print_gpu_status_regs(rdev);
5061 static void kv_save_regs_for_reset(struct radeon_device *rdev,
5073 static void kv_restore_regs_for_reset(struct radeon_device *rdev,
5146 static void cik_gpu_pci_config_reset(struct radeon_device *rdev)
5152 dev_info(rdev->dev, "GPU pci config reset\n");
5157 cik_fini_pg(rdev);
5158 cik_fini_cg(rdev);
5177 cik_rlc_stop(rdev);
5182 evergreen_mc_stop(rdev, &save);
5183 if (evergreen_mc_wait_for_idle(rdev)) {
5184 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
5187 if (rdev->flags & RADEON_IS_IGP)
5188 kv_save_regs_for_reset(rdev, &kv_save);
5191 pci_clear_master(rdev->pdev);
5193 radeon_pci_config_reset(rdev);
5198 for (i = 0; i < rdev->usec_timeout; i++) {
5205 if (rdev->flags & RADEON_IS_IGP)
5206 kv_restore_regs_for_reset(rdev, &kv_save);
5212 * @rdev: radeon_device pointer
5219 int cik_asic_reset(struct radeon_device *rdev, bool hard)
5224 cik_gpu_pci_config_reset(rdev);
5228 reset_mask = cik_gpu_check_soft_reset(rdev);
5231 r600_set_bios_scratch_engine_hung(rdev, true);
5234 cik_gpu_soft_reset(rdev, reset_mask);
5236 reset_mask = cik_gpu_check_soft_reset(rdev);
5240 cik_gpu_pci_config_reset(rdev);
5242 reset_mask = cik_gpu_check_soft_reset(rdev);
5245 r600_set_bios_scratch_engine_hung(rdev, false);
5253 * @rdev: radeon_device pointer
5259 bool cik_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
5261 u32 reset_mask = cik_gpu_check_soft_reset(rdev);
5266 radeon_ring_lockup_update(rdev, ring);
5269 return radeon_ring_test_lockup(rdev, ring);
5276 * @rdev: radeon_device pointer
5281 static void cik_mc_program(struct radeon_device *rdev)
5297 evergreen_mc_stop(rdev, &save);
5298 if (radeon_mc_wait_for_idle(rdev)) {
5299 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
5305 rdev->mc.vram_start >> 12);
5307 rdev->mc.vram_end >> 12);
5309 rdev->vram_scratch.gpu_addr >> 12);
5310 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
5311 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
5314 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
5320 if (radeon_mc_wait_for_idle(rdev)) {
5321 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
5323 evergreen_mc_resume(rdev, &save);
5326 rv515_vga_render_disable(rdev);
5332 * @rdev: radeon_device pointer
5338 static int cik_mc_init(struct radeon_device *rdev)
5344 rdev->mc.vram_is_ddr = true;
5382 rdev->mc.vram_width = numchan * chansize;
5384 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
5385 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
5387 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
5388 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
5389 rdev->mc.visible_vram_size = rdev->mc.aper_size;
5390 si_vram_gtt_location(rdev, &rdev->mc);
5391 radeon_update_bandwidth_info(rdev);
5405 * @rdev: radeon_device pointer
5409 void cik_pcie_gart_tlb_flush(struct radeon_device *rdev)
5421 * @rdev: radeon_device pointer
5429 static int cik_pcie_gart_enable(struct radeon_device *rdev)
5433 if (rdev->gart.robj == NULL) {
5434 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
5437 r = radeon_gart_table_vram_pin(rdev);
5460 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
5461 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
5462 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
5464 (u32)(rdev->dummy_page.addr >> 12));
5476 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn - 1);
5480 rdev->vm_manager.saved_table_addr[i]);
5483 rdev->vm_manager.saved_table_addr[i]);
5488 (u32)(rdev->dummy_page.addr >> 12));
5505 if (rdev->family == CHIP_KAVERI) {
5513 mutex_lock(&rdev->srbm_mutex);
5515 cik_srbm_select(rdev, 0, 0, 0, i);
5528 cik_srbm_select(rdev, 0, 0, 0, 0);
5529 mutex_unlock(&rdev->srbm_mutex);
5531 cik_pcie_gart_tlb_flush(rdev);
5533 (unsigned)(rdev->mc.gtt_size >> 20),
5534 (unsigned long long)rdev->gart.table_addr);
5535 rdev->gart.ready = true;
5542 * @rdev: radeon_device pointer
5546 static void cik_pcie_gart_disable(struct radeon_device *rdev)
5556 rdev->vm_manager.saved_table_addr[i] = RREG32(reg);
5575 radeon_gart_table_vram_unpin(rdev);
5581 * @rdev: radeon_device pointer
5585 static void cik_pcie_gart_fini(struct radeon_device *rdev)
5587 cik_pcie_gart_disable(rdev);
5588 radeon_gart_table_vram_free(rdev);
5589 radeon_gart_fini(rdev);
5596 * @rdev: radeon_device pointer
5601 int cik_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib)
5615 * @rdev: radeon_device pointer
5621 int cik_vm_init(struct radeon_device *rdev)
5628 rdev->vm_manager.nvm = 16;
5630 if (rdev->flags & RADEON_IS_IGP) {
5633 rdev->vm_manager.vram_base_offset = tmp;
5635 rdev->vm_manager.vram_base_offset = 0;
5643 * @rdev: radeon_device pointer
5647 void cik_vm_fini(struct radeon_device *rdev)
5654 * @rdev: radeon_device pointer
5660 static void cik_vm_decode_fault(struct radeon_device *rdev,
5669 if (rdev->family == CHIP_HAWAII)
5683 * @rdev: radeon_device pointer
5688 void cik_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
5733 cik_hdp_flush_cp_ring_emit(rdev, ring->idx);
5768 static void cik_enable_gui_idle_interrupt(struct radeon_device *rdev,
5780 static void cik_enable_lbpw(struct radeon_device *rdev, bool enable)
5792 static void cik_wait_for_rlc_serdes(struct radeon_device *rdev)
5797 for (i = 0; i < rdev->config.cik.max_shader_engines; i++) {
5798 for (j = 0; j < rdev->config.cik.max_sh_per_se; j++) {
5799 cik_select_se_sh(rdev, i, j);
5800 for (k = 0; k < rdev->usec_timeout; k++) {
5807 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
5810 for (k = 0; k < rdev->usec_timeout; k++) {
5817 static void cik_update_rlc(struct radeon_device *rdev, u32 rlc)
5826 static u32 cik_halt_rlc(struct radeon_device *rdev)
5838 for (i = 0; i < rdev->usec_timeout; i++) {
5844 cik_wait_for_rlc_serdes(rdev);
5850 void cik_enter_rlc_safe_mode(struct radeon_device *rdev)
5858 for (i = 0; i < rdev->usec_timeout; i++) {
5864 for (i = 0; i < rdev->usec_timeout; i++) {
5871 void cik_exit_rlc_safe_mode(struct radeon_device *rdev)
5882 * @rdev: radeon_device pointer
5886 static void cik_rlc_stop(struct radeon_device *rdev)
5890 cik_enable_gui_idle_interrupt(rdev, false);
5892 cik_wait_for_rlc_serdes(rdev);
5898 * @rdev: radeon_device pointer
5902 static void cik_rlc_start(struct radeon_device *rdev)
5906 cik_enable_gui_idle_interrupt(rdev, true);
5914 * @rdev: radeon_device pointer
5920 static int cik_rlc_resume(struct radeon_device *rdev)
5924 if (!rdev->rlc_fw)
5927 cik_rlc_stop(rdev);
5933 si_rlc_reset(rdev);
5935 cik_init_pg(rdev);
5937 cik_init_cg(rdev);
5942 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
5950 if (rdev->new_fw) {
5952 (const struct rlc_firmware_header_v1_0 *)rdev->rlc_fw->data;
5954 (rdev->rlc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
5966 switch (rdev->family) {
5983 fw_data = (const __be32 *)rdev->rlc_fw->data;
5991 cik_enable_lbpw(rdev, false);
5993 if (rdev->family == CHIP_BONAIRE)
5996 cik_rlc_start(rdev);
6001 static void cik_enable_cgcg(struct radeon_device *rdev, bool enable)
6007 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGCG)) {
6008 cik_enable_gui_idle_interrupt(rdev, true);
6010 tmp = cik_halt_rlc(rdev);
6012 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
6018 cik_update_rlc(rdev, tmp);
6022 cik_enable_gui_idle_interrupt(rdev, false);
6037 static void cik_enable_mgcg(struct radeon_device *rdev, bool enable)
6041 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGCG)) {
6042 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGLS) {
6043 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CP_LS) {
6057 tmp = cik_halt_rlc(rdev);
6059 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
6065 cik_update_rlc(rdev, tmp);
6067 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGTS) {
6073 if ((rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGLS) &&
6074 (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGTS_LS))
6105 tmp = cik_halt_rlc(rdev);
6107 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
6113 cik_update_rlc(rdev, tmp);
6130 static void cik_enable_mc_ls(struct radeon_device *rdev,
6138 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_LS))
6147 static void cik_enable_mc_mgcg(struct radeon_device *rdev,
6155 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_MGCG))
6164 static void cik_enable_sdma_mgcg(struct radeon_device *rdev,
6169 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_MGCG)) {
6185 static void cik_enable_sdma_mgls(struct radeon_device *rdev,
6190 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_LS)) {
6213 static void cik_enable_uvd_mgcg(struct radeon_device *rdev,
6218 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_UVD_MGCG)) {
6239 static void cik_enable_bif_mgls(struct radeon_device *rdev,
6246 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_BIF_LS))
6257 static void cik_enable_hdp_mgcg(struct radeon_device *rdev,
6264 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_MGCG))
6273 static void cik_enable_hdp_ls(struct radeon_device *rdev,
6280 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_LS))
6289 void cik_update_cg(struct radeon_device *rdev,
6294 cik_enable_gui_idle_interrupt(rdev, false);
6297 cik_enable_mgcg(rdev, true);
6298 cik_enable_cgcg(rdev, true);
6300 cik_enable_cgcg(rdev, false);
6301 cik_enable_mgcg(rdev, false);
6303 cik_enable_gui_idle_interrupt(rdev, true);
6307 if (!(rdev->flags & RADEON_IS_IGP)) {
6308 cik_enable_mc_mgcg(rdev, enable);
6309 cik_enable_mc_ls(rdev, enable);
6314 cik_enable_sdma_mgcg(rdev, enable);
6315 cik_enable_sdma_mgls(rdev, enable);
6319 cik_enable_bif_mgls(rdev, enable);
6323 if (rdev->has_uvd)
6324 cik_enable_uvd_mgcg(rdev, enable);
6328 cik_enable_hdp_mgcg(rdev, enable);
6329 cik_enable_hdp_ls(rdev, enable);
6333 vce_v2_0_enable_mgcg(rdev, enable);
6337 static void cik_init_cg(struct radeon_device *rdev)
6340 cik_update_cg(rdev, RADEON_CG_BLOCK_GFX, true);
6342 if (rdev->has_uvd)
6343 si_init_uvd_internal_cg(rdev);
6345 cik_update_cg(rdev, (RADEON_CG_BLOCK_MC |
6352 static void cik_fini_cg(struct radeon_device *rdev)
6354 cik_update_cg(rdev, (RADEON_CG_BLOCK_MC |
6360 cik_update_cg(rdev, RADEON_CG_BLOCK_GFX, false);
6363 static void cik_enable_sck_slowdown_on_pu(struct radeon_device *rdev,
6369 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_RLC_SMU_HS))
6377 static void cik_enable_sck_slowdown_on_pd(struct radeon_device *rdev,
6383 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_RLC_SMU_HS))
6391 static void cik_enable_cp_pg(struct radeon_device *rdev, bool enable)
6396 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_CP))
6404 static void cik_enable_gds_pg(struct radeon_device *rdev, bool enable)
6409 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GDS))
6421 void cik_init_cp_pg_table(struct radeon_device *rdev)
6428 if (rdev->family == CHIP_KAVERI)
6431 if (rdev->rlc.cp_table_ptr == NULL)
6435 dst_ptr = rdev->rlc.cp_table_ptr;
6437 if (rdev->new_fw) {
6442 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->ce_fw->data;
6444 (rdev->ce_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
6448 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->pfp_fw->data;
6450 (rdev->pfp_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
6454 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->me_fw->data;
6456 (rdev->me_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
6460 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->mec_fw->data;
6462 (rdev->mec_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
6466 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->mec2_fw->data;
6468 (rdev->mec2_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
6483 fw_data = (const __be32 *)rdev->ce_fw->data;
6486 fw_data = (const __be32 *)rdev->pfp_fw->data;
6489 fw_data = (const __be32 *)rdev->me_fw->data;
6492 fw_data = (const __be32 *)rdev->mec_fw->data;
6505 static void cik_enable_gfx_cgpg(struct radeon_device *rdev,
6510 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG)) {
6535 static u32 cik_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh)
6540 cik_select_se_sh(rdev, se, sh);
6543 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff);
6550 for (i = 0; i < rdev->config.cik.max_cu_per_sh; i ++) {
6558 static void cik_init_ao_cu_mask(struct radeon_device *rdev)
6564 for (i = 0; i < rdev->config.cik.max_shader_engines; i++) {
6565 for (j = 0; j < rdev->config.cik.max_sh_per_se; j++) {
6569 for (k = 0; k < rdev->config.cik.max_cu_per_sh; k ++) {
6570 if (cik_get_cu_active_bitmap(rdev, i, j) & mask) {
6591 static void cik_enable_gfx_static_mgpg(struct radeon_device *rdev,
6597 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_SMG))
6605 static void cik_enable_gfx_dynamic_mgpg(struct radeon_device *rdev,
6611 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_DMG))
6622 static void cik_init_gfx_cgpg(struct radeon_device *rdev)
6627 if (rdev->rlc.cs_data) {
6629 WREG32(RLC_GPM_SCRATCH_DATA, upper_32_bits(rdev->rlc.clear_state_gpu_addr));
6630 WREG32(RLC_GPM_SCRATCH_DATA, lower_32_bits(rdev->rlc.clear_state_gpu_addr));
6631 WREG32(RLC_GPM_SCRATCH_DATA, rdev->rlc.clear_state_size);
6637 if (rdev->rlc.reg_list) {
6639 for (i = 0; i < rdev->rlc.reg_list_size; i++)
6640 WREG32(RLC_GPM_SCRATCH_DATA, rdev->rlc.reg_list[i]);
6648 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
6649 WREG32(RLC_CP_TABLE_RESTORE, rdev->rlc.cp_table_gpu_addr >> 8);
6671 static void cik_update_gfx_pg(struct radeon_device *rdev, bool enable)
6673 cik_enable_gfx_cgpg(rdev, enable);
6674 cik_enable_gfx_static_mgpg(rdev, enable);
6675 cik_enable_gfx_dynamic_mgpg(rdev, enable);
6678 u32 cik_get_csb_size(struct radeon_device *rdev)
6684 if (rdev->rlc.cs_data == NULL)
6692 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) {
6710 void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer)
6716 if (rdev->rlc.cs_data == NULL)
6728 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) {
6744 switch (rdev->family) {
6775 static void cik_init_pg(struct radeon_device *rdev)
6777 if (rdev->pg_flags) {
6778 cik_enable_sck_slowdown_on_pu(rdev, true);
6779 cik_enable_sck_slowdown_on_pd(rdev, true);
6780 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) {
6781 cik_init_gfx_cgpg(rdev);
6782 cik_enable_cp_pg(rdev, true);
6783 cik_enable_gds_pg(rdev, true);
6785 cik_init_ao_cu_mask(rdev);
6786 cik_update_gfx_pg(rdev, true);
6790 static void cik_fini_pg(struct radeon_device *rdev)
6792 if (rdev->pg_flags) {
6793 cik_update_gfx_pg(rdev, false);
6794 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) {
6795 cik_enable_cp_pg(rdev, false);
6796 cik_enable_gds_pg(rdev, false);
6819 * @rdev: radeon_device pointer
6823 static void cik_enable_interrupts(struct radeon_device *rdev)
6832 rdev->ih.enabled = true;
6838 * @rdev: radeon_device pointer
6842 static void cik_disable_interrupts(struct radeon_device *rdev)
6854 rdev->ih.enabled = false;
6855 rdev->ih.rptr = 0;
6861 * @rdev: radeon_device pointer
6865 static void cik_disable_interrupt_state(struct radeon_device *rdev)
6894 if (rdev->num_crtc >= 4) {
6898 if (rdev->num_crtc >= 6) {
6903 if (rdev->num_crtc >= 2) {
6907 if (rdev->num_crtc >= 4) {
6911 if (rdev->num_crtc >= 6) {
6938 * @rdev: radeon_device pointer
6946 static int cik_irq_init(struct radeon_device *rdev)
6953 ret = r600_ih_ring_alloc(rdev);
6958 cik_disable_interrupts(rdev);
6961 ret = cik_rlc_resume(rdev);
6963 r600_ih_ring_fini(rdev);
6969 WREG32(INTERRUPT_CNTL2, rdev->dummy_page.addr >> 8);
6979 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
6980 rb_bufsz = order_base_2(rdev->ih.ring_size / 4);
6986 if (rdev->wb.enabled)
6990 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC);
6991 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF);
7002 if (rdev->msi_enabled)
7007 cik_disable_interrupt_state(rdev);
7009 pci_set_master(rdev->pdev);
7012 cik_enable_interrupts(rdev);
7020 * @rdev: radeon_device pointer
7026 int cik_irq_set(struct radeon_device *rdev)
7036 if (!rdev->irq.installed) {
7041 if (!rdev->ih.enabled) {
7042 cik_disable_interrupts(rdev);
7044 cik_disable_interrupt_state(rdev);
7072 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
7076 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
7077 struct radeon_ring *ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
7119 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
7120 struct radeon_ring *ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
7163 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
7168 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
7173 if (rdev->irq.crtc_vblank_int[0] ||
7174 atomic_read(&rdev->irq.pflip[0])) {
7178 if (rdev->irq.crtc_vblank_int[1] ||
7179 atomic_read(&rdev->irq.pflip[1])) {
7183 if (rdev->irq.crtc_vblank_int[2] ||
7184 atomic_read(&rdev->irq.pflip[2])) {
7188 if (rdev->irq.crtc_vblank_int[3] ||
7189 atomic_read(&rdev->irq.pflip[3])) {
7193 if (rdev->irq.crtc_vblank_int[4] ||
7194 atomic_read(&rdev->irq.pflip[4])) {
7198 if (rdev->irq.crtc_vblank_int[5] ||
7199 atomic_read(&rdev->irq.pflip[5])) {
7203 if (rdev->irq.hpd[0]) {
7207 if (rdev->irq.hpd[1]) {
7211 if (rdev->irq.hpd[2]) {
7215 if (rdev->irq.hpd[3]) {
7219 if (rdev->irq.hpd[4]) {
7223 if (rdev->irq.hpd[5]) {
7246 if (rdev->num_crtc >= 4) {
7250 if (rdev->num_crtc >= 6) {
7255 if (rdev->num_crtc >= 2) {
7261 if (rdev->num_crtc >= 4) {
7267 if (rdev->num_crtc >= 6) {
7290 * @rdev: radeon_device pointer
7296 static inline void cik_irq_ack(struct radeon_device *rdev)
7300 rdev->irq.stat_regs.cik.disp_int = RREG32(DISP_INTERRUPT_STATUS);
7301 rdev->irq.stat_regs.cik.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
7302 rdev->irq.stat_regs.cik.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
7303 rdev->irq.stat_regs.cik.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
7304 rdev->irq.stat_regs.cik.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
7305 rdev->irq.stat_regs.cik.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
7306 rdev->irq.stat_regs.cik.disp_int_cont6 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE6);
7308 rdev->irq.stat_regs.cik.d1grph_int = RREG32(GRPH_INT_STATUS +
7310 rdev->irq.stat_regs.cik.d2grph_int = RREG32(GRPH_INT_STATUS +
7312 if (rdev->num_crtc >= 4) {
7313 rdev->irq.stat_regs.cik.d3grph_int = RREG32(GRPH_INT_STATUS +
7315 rdev->irq.stat_regs.cik.d4grph_int = RREG32(GRPH_INT_STATUS +
7318 if (rdev->num_crtc >= 6) {
7319 rdev->irq.stat_regs.cik.d5grph_int = RREG32(GRPH_INT_STATUS +
7321 rdev->irq.stat_regs.cik.d6grph_int = RREG32(GRPH_INT_STATUS +
7325 if (rdev->irq.stat_regs.cik.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
7328 if (rdev->irq.stat_regs.cik.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
7331 if (rdev->irq.stat_regs.cik.disp_int & LB_D1_VBLANK_INTERRUPT)
7333 if (rdev->irq.stat_regs.cik.disp_int & LB_D1_VLINE_INTERRUPT)
7335 if (rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
7337 if (rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VLINE_INTERRUPT)
7340 if (rdev->num_crtc >= 4) {
7341 if (rdev->irq.stat_regs.cik.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
7344 if (rdev->irq.stat_regs.cik.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
7347 if (rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
7349 if (rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
7351 if (rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
7353 if (rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
7357 if (rdev->num_crtc >= 6) {
7358 if (rdev->irq.stat_regs.cik.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
7361 if (rdev->irq.stat_regs.cik.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
7364 if (rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
7366 if (rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
7368 if (rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
7370 if (rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
7374 if (rdev->irq.stat_regs.cik.disp_int & DC_HPD1_INTERRUPT) {
7379 if (rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_INTERRUPT) {
7384 if (rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_INTERRUPT) {
7389 if (rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_INTERRUPT) {
7394 if (rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_INTERRUPT) {
7399 if (rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_INTERRUPT) {
7404 if (rdev->irq.stat_regs.cik.disp_int & DC_HPD1_RX_INTERRUPT) {
7409 if (rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
7414 if (rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
7419 if (rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
7424 if (rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
7429 if (rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
7439 * @rdev: radeon_device pointer
7443 static void cik_irq_disable(struct radeon_device *rdev)
7445 cik_disable_interrupts(rdev);
7448 cik_irq_ack(rdev);
7449 cik_disable_interrupt_state(rdev);
7455 * @rdev: radeon_device pointer
7460 static void cik_irq_suspend(struct radeon_device *rdev)
7462 cik_irq_disable(rdev);
7463 cik_rlc_stop(rdev);
7469 * @rdev: radeon_device pointer
7475 static void cik_irq_fini(struct radeon_device *rdev)
7477 cik_irq_suspend(rdev);
7478 r600_ih_ring_fini(rdev);
7484 * @rdev: radeon_device pointer
7492 static inline u32 cik_get_ih_wptr(struct radeon_device *rdev)
7496 if (rdev->wb.enabled)
7497 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
7507 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
7508 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
7509 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
7514 return (wptr & rdev->ih.ptr_mask);
7542 * @rdev: radeon_device pointer
7549 int cik_irq_process(struct radeon_device *rdev)
7551 struct radeon_ring *cp1_ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
7552 struct radeon_ring *cp2_ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
7564 if (!rdev->ih.enabled || rdev->shutdown)
7567 wptr = cik_get_ih_wptr(rdev);
7571 if (atomic_xchg(&rdev->ih.lock, 1))
7574 rptr = rdev->ih.rptr;
7581 cik_irq_ack(rdev);
7587 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
7588 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
7589 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff;
7595 if (!(rdev->irq.stat_regs.cik.disp_int & LB_D1_VBLANK_INTERRUPT))
7598 if (rdev->irq.crtc_vblank_int[0]) {
7599 drm_handle_vblank(rdev->ddev, 0);
7600 rdev->pm.vblank_sync = true;
7601 wake_up(&rdev->irq.vblank_queue);
7603 if (atomic_read(&rdev->irq.pflip[0]))
7604 radeon_crtc_handle_vblank(rdev, 0);
7605 rdev->irq.stat_regs.cik.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
7610 if (!(rdev->irq.stat_regs.cik.disp_int & LB_D1_VLINE_INTERRUPT))
7613 rdev->irq.stat_regs.cik.disp_int &= ~LB_D1_VLINE_INTERRUPT;
7625 if (!(rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
7628 if (rdev->irq.crtc_vblank_int[1]) {
7629 drm_handle_vblank(rdev->ddev, 1);
7630 rdev->pm.vblank_sync = true;
7631 wake_up(&rdev->irq.vblank_queue);
7633 if (atomic_read(&rdev->irq.pflip[1]))
7634 radeon_crtc_handle_vblank(rdev, 1);
7635 rdev->irq.stat_regs.cik.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
7640 if (!(rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VLINE_INTERRUPT))
7643 rdev->irq.stat_regs.cik.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
7655 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
7658 if (rdev->irq.crtc_vblank_int[2]) {
7659 drm_handle_vblank(rdev->ddev, 2);
7660 rdev->pm.vblank_sync = true;
7661 wake_up(&rdev->irq.vblank_queue);
7663 if (atomic_read(&rdev->irq.pflip[2]))
7664 radeon_crtc_handle_vblank(rdev, 2);
7665 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
7670 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
7673 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
7685 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
7688 if (rdev->irq.crtc_vblank_int[3]) {
7689 drm_handle_vblank(rdev->ddev, 3);
7690 rdev->pm.vblank_sync = true;
7691 wake_up(&rdev->irq.vblank_queue);
7693 if (atomic_read(&rdev->irq.pflip[3]))
7694 radeon_crtc_handle_vblank(rdev, 3);
7695 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
7700 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
7703 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
7715 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
7718 if (rdev->irq.crtc_vblank_int[4]) {
7719 drm_handle_vblank(rdev->ddev, 4);
7720 rdev->pm.vblank_sync = true;
7721 wake_up(&rdev->irq.vblank_queue);
7723 if (atomic_read(&rdev->irq.pflip[4]))
7724 radeon_crtc_handle_vblank(rdev, 4);
7725 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
7730 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
7733 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
7745 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
7748 if (rdev->irq.crtc_vblank_int[5]) {
7749 drm_handle_vblank(rdev->ddev, 5);
7750 rdev->pm.vblank_sync = true;
7751 wake_up(&rdev->irq.vblank_queue);
7753 if (atomic_read(&rdev->irq.pflip[5]))
7754 radeon_crtc_handle_vblank(rdev, 5);
7755 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
7760 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
7763 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
7780 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
7785 if (!(rdev->irq.stat_regs.cik.disp_int & DC_HPD1_INTERRUPT))
7788 rdev->irq.stat_regs.cik.disp_int &= ~DC_HPD1_INTERRUPT;
7794 if (!(rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_INTERRUPT))
7797 rdev->irq.stat_regs.cik.disp_int_cont &= ~DC_HPD2_INTERRUPT;
7803 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_INTERRUPT))
7806 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
7812 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_INTERRUPT))
7815 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
7821 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_INTERRUPT))
7824 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
7830 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_INTERRUPT))
7833 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
7839 if (!(rdev->irq.stat_regs.cik.disp_int & DC_HPD1_RX_INTERRUPT))
7842 rdev->irq.stat_regs.cik.disp_int &= ~DC_HPD1_RX_INTERRUPT;
7848 if (!(rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_RX_INTERRUPT))
7851 rdev->irq.stat_regs.cik.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
7857 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
7860 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
7866 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
7869 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
7875 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
7878 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
7884 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
7887 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
7903 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
7914 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
7915 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
7917 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
7919 cik_vm_decode_fault(rdev, status, addr, mc_client);
7925 radeon_fence_process(rdev, TN_RING_TYPE_VCE1_INDEX);
7928 radeon_fence_process(rdev, TN_RING_TYPE_VCE2_INDEX);
7937 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
7947 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
7952 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
7954 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
8013 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
8026 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
8040 rdev->pm.dpm.thermal.high_to_low = false;
8045 rdev->pm.dpm.thermal.high_to_low = true;
8097 rptr &= rdev->ih.ptr_mask;
8101 schedule_work(&rdev->dp_work);
8103 schedule_delayed_work(&rdev->hotplug_work, 0);
8105 rdev->needs_reset = true;
8106 wake_up_all(&rdev->fence_queue);
8109 schedule_work(&rdev->pm.dpm.thermal.work);
8110 rdev->ih.rptr = rptr;
8112 atomic_set(&rdev->ih.lock, 0);
8115 wptr = cik_get_ih_wptr(rdev);
8125 static void cik_uvd_init(struct radeon_device *rdev)
8129 if (!rdev->has_uvd)
8132 r = radeon_uvd_init(rdev);
8134 dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
8136 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
8141 rdev->has_uvd = false;
8144 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
8145 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
8148 static void cik_uvd_start(struct radeon_device *rdev)
8152 if (!rdev->has_uvd)
8155 r = radeon_uvd_resume(rdev);
8157 dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
8160 r = uvd_v4_2_resume(rdev);
8162 dev_err(rdev->dev, "failed UVD 4.2 resume (%d).\n", r);
8165 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
8167 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
8173 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
8176 static void cik_uvd_resume(struct radeon_device *rdev)
8181 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
8184 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
8185 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
8187 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
8190 r = uvd_v1_0_init(rdev);
8192 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
8197 static void cik_vce_init(struct radeon_device *rdev)
8201 if (!rdev->has_vce)
8204 r = radeon_vce_init(rdev);
8206 dev_err(rdev->dev, "failed VCE (%d) init.\n", r);
8208 * At this point rdev->vce.vcpu_bo is NULL which trickles down
8213 rdev->has_vce = false;
8216 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_obj = NULL;
8217 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE1_INDEX], 4096);
8218 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_obj = NULL;
8219 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE2_INDEX], 4096);
8222 static void cik_vce_start(struct radeon_device *rdev)
8226 if (!rdev->has_vce)
8229 r = radeon_vce_resume(rdev);
8231 dev_err(rdev->dev, "failed VCE resume (%d).\n", r);
8234 r = vce_v2_0_resume(rdev);
8236 dev_err(rdev->dev, "failed VCE resume (%d).\n", r);
8239 r = radeon_fence_driver_start_ring(rdev, TN_RING_TYPE_VCE1_INDEX);
8241 dev_err(rdev->dev, "failed initializing VCE1 fences (%d).\n", r);
8244 r = radeon_fence_driver_start_ring(rdev, TN_RING_TYPE_VCE2_INDEX);
8246 dev_err(rdev->dev, "failed initializing VCE2 fences (%d).\n", r);
8252 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0;
8253 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0;
8256 static void cik_vce_resume(struct radeon_device *rdev)
8261 if (!rdev->has_vce || !rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size)
8264 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX];
8265 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, VCE_CMD_NO_OP);
8267 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r);
8270 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX];
8271 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, VCE_CMD_NO_OP);
8273 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r);
8276 r = vce_v1_0_init(rdev);
8278 dev_err(rdev->dev, "failed initializing VCE (%d).\n", r);
8286 * @rdev: radeon_device pointer
8292 static int cik_startup(struct radeon_device *rdev)
8299 cik_pcie_gen3_enable(rdev);
8301 cik_program_aspm(rdev);
8304 r = r600_vram_scratch_init(rdev);
8308 cik_mc_program(rdev);
8310 if (!(rdev->flags & RADEON_IS_IGP) && !rdev->pm.dpm_enabled) {
8311 r = ci_mc_load_microcode(rdev);
8318 r = cik_pcie_gart_enable(rdev);
8321 cik_gpu_init(rdev);
8324 if (rdev->flags & RADEON_IS_IGP) {
8325 if (rdev->family == CHIP_KAVERI) {
8326 rdev->rlc.reg_list = spectre_rlc_save_restore_register_list;
8327 rdev->rlc.reg_list_size =
8330 rdev->rlc.reg_list = kalindi_rlc_save_restore_register_list;
8331 rdev->rlc.reg_list_size =
8335 rdev->rlc.cs_data = ci_cs_data;
8336 rdev->rlc.cp_table_size = ALIGN(CP_ME_TABLE_SIZE * 5 * 4, 2048); /* CP JT */
8337 rdev->rlc.cp_table_size += 64 * 1024; /* GDS */
8338 r = sumo_rlc_init(rdev);
8345 r = radeon_wb_init(rdev);
8350 r = cik_mec_init(rdev);
8356 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
8358 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
8362 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
8364 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
8368 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
8370 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
8374 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
8376 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
8380 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
8382 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
8386 cik_uvd_start(rdev);
8387 cik_vce_start(rdev);
8390 if (!rdev->irq.installed) {
8391 r = radeon_irq_kms_init(rdev);
8396 r = cik_irq_init(rdev);
8399 radeon_irq_kms_fini(rdev);
8402 cik_irq_set(rdev);
8404 if (rdev->family == CHIP_HAWAII) {
8405 if (rdev->new_fw)
8413 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
8414 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
8421 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
8422 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET,
8432 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
8433 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET,
8443 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
8444 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
8449 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
8450 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET,
8455 r = cik_cp_resume(rdev);
8459 r = cik_sdma_resume(rdev);
8463 cik_uvd_resume(rdev);
8464 cik_vce_resume(rdev);
8466 r = radeon_ib_pool_init(rdev);
8468 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
8472 r = radeon_vm_manager_init(rdev);
8474 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r);
8478 r = radeon_audio_init(rdev);
8488 * @rdev: radeon_device pointer
8494 int cik_resume(struct radeon_device *rdev)
8499 atom_asic_init(rdev->mode_info.atom_context);
8502 cik_init_golden_registers(rdev);
8504 if (rdev->pm.pm_method == PM_METHOD_DPM)
8505 radeon_pm_resume(rdev);
8507 rdev->accel_working = true;
8508 r = cik_startup(rdev);
8511 rdev->accel_working = false;
8522 * @rdev: radeon_device pointer
8528 int cik_suspend(struct radeon_device *rdev)
8530 radeon_pm_suspend(rdev);
8531 radeon_audio_fini(rdev);
8532 radeon_vm_manager_fini(rdev);
8533 cik_cp_enable(rdev, false);
8534 cik_sdma_enable(rdev, false);
8535 if (rdev->has_uvd) {
8536 uvd_v1_0_fini(rdev);
8537 radeon_uvd_suspend(rdev);
8539 if (rdev->has_vce)
8540 radeon_vce_suspend(rdev);
8541 cik_fini_pg(rdev);
8542 cik_fini_cg(rdev);
8543 cik_irq_suspend(rdev);
8544 radeon_wb_disable(rdev);
8545 cik_pcie_gart_disable(rdev);
8558 * @rdev: radeon_device pointer
8565 int cik_init(struct radeon_device *rdev)
8571 if (!radeon_get_bios(rdev)) {
8572 if (ASIC_IS_AVIVO(rdev))
8576 if (!rdev->is_atom_bios) {
8577 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n");
8580 r = radeon_atombios_init(rdev);
8585 if (!radeon_card_posted(rdev)) {
8586 if (!rdev->bios) {
8587 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
8591 atom_asic_init(rdev->mode_info.atom_context);
8594 cik_init_golden_registers(rdev);
8596 cik_scratch_init(rdev);
8598 radeon_surface_init(rdev);
8600 radeon_get_clock_info(rdev->ddev);
8603 r = radeon_fence_driver_init(rdev);
8608 r = cik_mc_init(rdev);
8612 r = radeon_bo_init(rdev);
8616 if (rdev->flags & RADEON_IS_IGP) {
8617 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw ||
8618 !rdev->mec_fw || !rdev->sdma_fw || !rdev->rlc_fw) {
8619 r = cik_init_microcode(rdev);
8626 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw ||
8627 !rdev->mec_fw || !rdev->sdma_fw || !rdev->rlc_fw ||
8628 !rdev->mc_fw) {
8629 r = cik_init_microcode(rdev);
8638 radeon_pm_init(rdev);
8640 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
8642 r600_ring_init(rdev, ring, 1024 * 1024);
8644 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
8646 r600_ring_init(rdev, ring, 1024 * 1024);
8647 r = radeon_doorbell_get(rdev, &ring->doorbell_index);
8651 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
8653 r600_ring_init(rdev, ring, 1024 * 1024);
8654 r = radeon_doorbell_get(rdev, &ring->doorbell_index);
8658 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
8660 r600_ring_init(rdev, ring, 256 * 1024);
8662 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
8664 r600_ring_init(rdev, ring, 256 * 1024);
8666 cik_uvd_init(rdev);
8667 cik_vce_init(rdev);
8669 rdev->ih.ring_obj = NULL;
8670 r600_ih_ring_init(rdev, 64 * 1024);
8672 r = r600_pcie_gart_init(rdev);
8676 rdev->accel_working = true;
8677 r = cik_startup(rdev);
8679 dev_err(rdev->dev, "disabling GPU acceleration\n");
8680 cik_cp_fini(rdev);
8681 cik_sdma_fini(rdev);
8682 cik_irq_fini(rdev);
8683 sumo_rlc_fini(rdev);
8684 cik_mec_fini(rdev);
8685 radeon_wb_fini(rdev);
8686 radeon_ib_pool_fini(rdev);
8687 radeon_vm_manager_fini(rdev);
8688 radeon_irq_kms_fini(rdev);
8689 cik_pcie_gart_fini(rdev);
8690 rdev->accel_working = false;
8697 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
8708 * @rdev: radeon_device pointer
8714 void cik_fini(struct radeon_device *rdev)
8716 radeon_pm_fini(rdev);
8717 cik_cp_fini(rdev);
8718 cik_sdma_fini(rdev);
8719 cik_fini_pg(rdev);
8720 cik_fini_cg(rdev);
8721 cik_irq_fini(rdev);
8722 sumo_rlc_fini(rdev);
8723 cik_mec_fini(rdev);
8724 radeon_wb_fini(rdev);
8725 radeon_vm_manager_fini(rdev);
8726 radeon_ib_pool_fini(rdev);
8727 radeon_irq_kms_fini(rdev);
8728 uvd_v1_0_fini(rdev);
8729 radeon_uvd_fini(rdev);
8730 radeon_vce_fini(rdev);
8731 cik_pcie_gart_fini(rdev);
8732 r600_vram_scratch_fini(rdev);
8733 radeon_gem_fini(rdev);
8734 radeon_fence_driver_fini(rdev);
8735 radeon_bo_fini(rdev);
8736 radeon_atombios_fini(rdev);
8737 kfree(rdev->bios);
8738 rdev->bios = NULL;
8744 struct radeon_device *rdev = dev->dev_private;
8809 * @rdev: radeon_device pointer
8818 static u32 dce8_line_buffer_adjust(struct radeon_device *rdev,
8841 buffer_alloc = (rdev->flags & RADEON_IS_IGP) ? 2 : 4;
8845 buffer_alloc = (rdev->flags & RADEON_IS_IGP) ? 2 : 4;
8857 for (i = 0; i < rdev->usec_timeout; i++) {
8883 * @rdev: radeon_device pointer
8889 static u32 cik_get_number_of_dram_channels(struct radeon_device *rdev)
9239 * @rdev: radeon_device pointer
9247 static void dce8_program_watermarks(struct radeon_device *rdev,
9266 if ((rdev->pm.pm_method == PM_METHOD_DPM) &&
9267 rdev->pm.dpm_enabled) {
9269 radeon_dpm_get_mclk(rdev, false) * 10;
9271 radeon_dpm_get_sclk(rdev, false) * 10;
9273 wm_high.yclk = rdev->pm.current_mclk * 10;
9274 wm_high.sclk = rdev->pm.current_sclk * 10;
9290 wm_high.dram_channels = cik_get_number_of_dram_channels(rdev);
9301 (rdev->disp_priority == 2)) {
9306 if ((rdev->pm.pm_method == PM_METHOD_DPM) &&
9307 rdev->pm.dpm_enabled) {
9309 radeon_dpm_get_mclk(rdev, true) * 10;
9311 radeon_dpm_get_sclk(rdev, true) * 10;
9313 wm_low.yclk = rdev->pm.current_mclk * 10;
9314 wm_low.sclk = rdev->pm.current_sclk * 10;
9330 wm_low.dram_channels = cik_get_number_of_dram_channels(rdev);
9341 (rdev->disp_priority == 2)) {
9378 * @rdev: radeon_device pointer
9383 void dce8_bandwidth_update(struct radeon_device *rdev)
9389 if (!rdev->mode_info.mode_config_initialized)
9392 radeon_update_display_priority(rdev);
9394 for (i = 0; i < rdev->num_crtc; i++) {
9395 if (rdev->mode_info.crtcs[i]->base.enabled)
9398 for (i = 0; i < rdev->num_crtc; i++) {
9399 mode = &rdev->mode_info.crtcs[i]->base.mode;
9400 lb_size = dce8_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode);
9401 dce8_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
9408 * @rdev: radeon_device pointer
9413 uint64_t cik_get_gpu_clock_counter(struct radeon_device *rdev)
9417 mutex_lock(&rdev->gpu_clock_mutex);
9421 mutex_unlock(&rdev->gpu_clock_mutex);
9425 static int cik_set_uvd_clock(struct radeon_device *rdev, u32 clock,
9432 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_GPUCLK_INPUT_FLAG_DEFAULT_GPUCLK,
9453 int cik_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
9457 r = cik_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
9461 r = cik_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
9465 int cik_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk)
9471 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_GPUCLK_INPUT_FLAG_DEFAULT_GPUCLK,
9500 static void cik_pcie_gen3_enable(struct radeon_device *rdev)
9502 struct pci_dev *root = rdev->pdev->bus->self;
9508 if (pci_is_root_bus(rdev->pdev->bus))
9514 if (rdev->flags & RADEON_IS_IGP)
9517 if (!(rdev->flags & RADEON_IS_PCIE))
9545 if (!pci_is_pcie(root) || !pci_is_pcie(rdev->pdev))
9556 pcie_capability_set_word(rdev->pdev, PCI_EXP_LNKCTL, PCI_EXP_LNKCTL_HAWD);
9574 pcie_capability_read_word(rdev->pdev,
9582 pcie_capability_read_word(rdev->pdev,
9588 pcie_capability_read_word(rdev->pdev,
9607 pcie_capability_clear_and_set_word(rdev->pdev, PCI_EXP_LNKCTL,
9624 pcie_capability_read_word(rdev->pdev,
9632 pcie_capability_write_word(rdev->pdev,
9648 pcie_capability_read_word(rdev->pdev, PCI_EXP_LNKCTL2, &tmp16);
9656 pcie_capability_write_word(rdev->pdev, PCI_EXP_LNKCTL2, tmp16);
9662 for (i = 0; i < rdev->usec_timeout; i++) {
9670 static void cik_program_aspm(struct radeon_device *rdev)
9680 if (rdev->flags & RADEON_IS_IGP)
9683 if (!(rdev->flags & RADEON_IS_PCIE))
9748 !pci_is_root_bus(rdev->pdev->bus)) {
9749 struct pci_dev *root = rdev->pdev->bus->self;