Lines Matching defs:rdev

47 u32 tn_smc_rreg(struct radeon_device *rdev, u32 reg)
52 spin_lock_irqsave(&rdev->smc_idx_lock, flags);
55 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags);
59 void tn_smc_wreg(struct radeon_device *rdev, u32 reg, u32 v)
63 spin_lock_irqsave(&rdev->smc_idx_lock, flags);
66 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags);
443 static void ni_init_golden_registers(struct radeon_device *rdev)
445 switch (rdev->family) {
447 radeon_program_register_sequence(rdev,
450 radeon_program_register_sequence(rdev,
455 if ((rdev->pdev->device == 0x9900) ||
456 (rdev->pdev->device == 0x9901) ||
457 (rdev->pdev->device == 0x9903) ||
458 (rdev->pdev->device == 0x9904) ||
459 (rdev->pdev->device == 0x9905) ||
460 (rdev->pdev->device == 0x9906) ||
461 (rdev->pdev->device == 0x9907) ||
462 (rdev->pdev->device == 0x9908) ||
463 (rdev->pdev->device == 0x9909) ||
464 (rdev->pdev->device == 0x990A) ||
465 (rdev->pdev->device == 0x990B) ||
466 (rdev->pdev->device == 0x990C) ||
467 (rdev->pdev->device == 0x990D) ||
468 (rdev->pdev->device == 0x990E) ||
469 (rdev->pdev->device == 0x990F) ||
470 (rdev->pdev->device == 0x9910) ||
471 (rdev->pdev->device == 0x9913) ||
472 (rdev->pdev->device == 0x9917) ||
473 (rdev->pdev->device == 0x9918)) {
474 radeon_program_register_sequence(rdev,
477 radeon_program_register_sequence(rdev,
481 radeon_program_register_sequence(rdev,
484 radeon_program_register_sequence(rdev,
624 int ni_mc_load_microcode(struct radeon_device *rdev)
631 if (!rdev->mc_fw)
634 switch (rdev->family) {
677 fw_data = (const __be32 *)rdev->mc_fw->data;
687 for (i = 0; i < rdev->usec_timeout; i++) {
700 int ni_init_microcode(struct radeon_device *rdev)
711 switch (rdev->family) {
763 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
766 if (rdev->pfp_fw->size != pfp_req_size) {
768 rdev->pfp_fw->size, fw_name);
774 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
777 if (rdev->me_fw->size != me_req_size) {
779 rdev->me_fw->size, fw_name);
784 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
787 if (rdev->rlc_fw->size != rlc_req_size) {
789 rdev->rlc_fw->size, fw_name);
794 if (!(rdev->flags & RADEON_IS_IGP)) {
796 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
799 if (rdev->mc_fw->size != mc_req_size) {
801 rdev->mc_fw->size, fw_name);
806 if ((rdev->family >= CHIP_BARTS) && (rdev->family <= CHIP_CAYMAN)) {
808 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
811 release_firmware(rdev->smc_fw);
812 rdev->smc_fw = NULL;
814 } else if (rdev->smc_fw->size != smc_req_size) {
816 rdev->smc_fw->size, fw_name);
826 release_firmware(rdev->pfp_fw);
827 rdev->pfp_fw = NULL;
828 release_firmware(rdev->me_fw);
829 rdev->me_fw = NULL;
830 release_firmware(rdev->rlc_fw);
831 rdev->rlc_fw = NULL;
832 release_firmware(rdev->mc_fw);
833 rdev->mc_fw = NULL;
841 * @rdev: radeon_device pointer
848 int cayman_get_allowed_info_register(struct radeon_device *rdev,
867 int tn_get_temp(struct radeon_device *rdev)
878 static void cayman_gpu_init(struct radeon_device *rdev)
891 switch (rdev->family) {
893 rdev->config.cayman.max_shader_engines = 2;
894 rdev->config.cayman.max_pipes_per_simd = 4;
895 rdev->config.cayman.max_tile_pipes = 8;
896 rdev->config.cayman.max_simds_per_se = 12;
897 rdev->config.cayman.max_backends_per_se = 4;
898 rdev->config.cayman.max_texture_channel_caches = 8;
899 rdev->config.cayman.max_gprs = 256;
900 rdev->config.cayman.max_threads = 256;
901 rdev->config.cayman.max_gs_threads = 32;
902 rdev->config.cayman.max_stack_entries = 512;
903 rdev->config.cayman.sx_num_of_sets = 8;
904 rdev->config.cayman.sx_max_export_size = 256;
905 rdev->config.cayman.sx_max_export_pos_size = 64;
906 rdev->config.cayman.sx_max_export_smx_size = 192;
907 rdev->config.cayman.max_hw_contexts = 8;
908 rdev->config.cayman.sq_num_cf_insts = 2;
910 rdev->config.cayman.sc_prim_fifo_size = 0x100;
911 rdev->config.cayman.sc_hiz_tile_fifo_size = 0x30;
912 rdev->config.cayman.sc_earlyz_tile_fifo_size = 0x130;
917 rdev->config.cayman.max_shader_engines = 1;
918 rdev->config.cayman.max_pipes_per_simd = 4;
919 rdev->config.cayman.max_tile_pipes = 2;
920 if ((rdev->pdev->device == 0x9900) ||
921 (rdev->pdev->device == 0x9901) ||
922 (rdev->pdev->device == 0x9905) ||
923 (rdev->pdev->device == 0x9906) ||
924 (rdev->pdev->device == 0x9907) ||
925 (rdev->pdev->device == 0x9908) ||
926 (rdev->pdev->device == 0x9909) ||
927 (rdev->pdev->device == 0x990B) ||
928 (rdev->pdev->device == 0x990C) ||
929 (rdev->pdev->device == 0x990F) ||
930 (rdev->pdev->device == 0x9910) ||
931 (rdev->pdev->device == 0x9917) ||
932 (rdev->pdev->device == 0x9999) ||
933 (rdev->pdev->device == 0x999C)) {
934 rdev->config.cayman.max_simds_per_se = 6;
935 rdev->config.cayman.max_backends_per_se = 2;
936 rdev->config.cayman.max_hw_contexts = 8;
937 rdev->config.cayman.sx_max_export_size = 256;
938 rdev->config.cayman.sx_max_export_pos_size = 64;
939 rdev->config.cayman.sx_max_export_smx_size = 192;
940 } else if ((rdev->pdev->device == 0x9903) ||
941 (rdev->pdev->device == 0x9904) ||
942 (rdev->pdev->device == 0x990A) ||
943 (rdev->pdev->device == 0x990D) ||
944 (rdev->pdev->device == 0x990E) ||
945 (rdev->pdev->device == 0x9913) ||
946 (rdev->pdev->device == 0x9918) ||
947 (rdev->pdev->device == 0x999D)) {
948 rdev->config.cayman.max_simds_per_se = 4;
949 rdev->config.cayman.max_backends_per_se = 2;
950 rdev->config.cayman.max_hw_contexts = 8;
951 rdev->config.cayman.sx_max_export_size = 256;
952 rdev->config.cayman.sx_max_export_pos_size = 64;
953 rdev->config.cayman.sx_max_export_smx_size = 192;
954 } else if ((rdev->pdev->device == 0x9919) ||
955 (rdev->pdev->device == 0x9990) ||
956 (rdev->pdev->device == 0x9991) ||
957 (rdev->pdev->device == 0x9994) ||
958 (rdev->pdev->device == 0x9995) ||
959 (rdev->pdev->device == 0x9996) ||
960 (rdev->pdev->device == 0x999A) ||
961 (rdev->pdev->device == 0x99A0)) {
962 rdev->config.cayman.max_simds_per_se = 3;
963 rdev->config.cayman.max_backends_per_se = 1;
964 rdev->config.cayman.max_hw_contexts = 4;
965 rdev->config.cayman.sx_max_export_size = 128;
966 rdev->config.cayman.sx_max_export_pos_size = 32;
967 rdev->config.cayman.sx_max_export_smx_size = 96;
969 rdev->config.cayman.max_simds_per_se = 2;
970 rdev->config.cayman.max_backends_per_se = 1;
971 rdev->config.cayman.max_hw_contexts = 4;
972 rdev->config.cayman.sx_max_export_size = 128;
973 rdev->config.cayman.sx_max_export_pos_size = 32;
974 rdev->config.cayman.sx_max_export_smx_size = 96;
976 rdev->config.cayman.max_texture_channel_caches = 2;
977 rdev->config.cayman.max_gprs = 256;
978 rdev->config.cayman.max_threads = 256;
979 rdev->config.cayman.max_gs_threads = 32;
980 rdev->config.cayman.max_stack_entries = 512;
981 rdev->config.cayman.sx_num_of_sets = 8;
982 rdev->config.cayman.sq_num_cf_insts = 2;
984 rdev->config.cayman.sc_prim_fifo_size = 0x40;
985 rdev->config.cayman.sc_hiz_tile_fifo_size = 0x30;
986 rdev->config.cayman.sc_earlyz_tile_fifo_size = 0x130;
1004 evergreen_fix_pci_max_read_req_size(rdev);
1010 rdev->config.cayman.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024;
1011 if (rdev->config.cayman.mem_row_size_in_kb > 4)
1012 rdev->config.cayman.mem_row_size_in_kb = 4;
1014 rdev->config.cayman.shader_engine_tile_size = 32;
1015 rdev->config.cayman.num_gpus = 1;
1016 rdev->config.cayman.multi_gpu_tile_size = 64;
1019 rdev->config.cayman.num_tile_pipes = (1 << tmp);
1021 rdev->config.cayman.mem_max_burst_length_bytes = (tmp + 1) * 256;
1023 rdev->config.cayman.num_shader_engines = tmp + 1;
1025 rdev->config.cayman.num_gpus = tmp + 1;
1027 rdev->config.cayman.multi_gpu_tile_size = 1 << tmp;
1029 rdev->config.cayman.mem_row_size_in_kb = 1 << tmp;
1039 rdev->config.cayman.tile_config = 0;
1040 switch (rdev->config.cayman.num_tile_pipes) {
1043 rdev->config.cayman.tile_config |= (0 << 0);
1046 rdev->config.cayman.tile_config |= (1 << 0);
1049 rdev->config.cayman.tile_config |= (2 << 0);
1052 rdev->config.cayman.tile_config |= (3 << 0);
1057 if (rdev->flags & RADEON_IS_IGP)
1058 rdev->config.cayman.tile_config |= 1 << 4;
1062 rdev->config.cayman.tile_config |= 0 << 4;
1065 rdev->config.cayman.tile_config |= 1 << 4;
1069 rdev->config.cayman.tile_config |= 2 << 4;
1073 rdev->config.cayman.tile_config |=
1075 rdev->config.cayman.tile_config |=
1079 for (i = (rdev->config.cayman.max_shader_engines - 1); i >= 0; i--) {
1091 for (i = 0; i < (rdev->config.cayman.max_backends_per_se * rdev->config.cayman.max_shader_engines); i++)
1095 for (i = 0; i < (rdev->config.cayman.max_backends_per_se * rdev->config.cayman.max_shader_engines); i++)
1099 for (i = 0; i < rdev->config.cayman.max_shader_engines; i++) {
1105 simd_disable_bitmap |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
1109 rdev->config.cayman.active_simds = hweight32(~tmp);
1116 if (ASIC_IS_DCE6(rdev))
1125 if ((rdev->config.cayman.max_backends_per_se == 1) &&
1126 (rdev->flags & RADEON_IS_IGP)) {
1136 tmp = r6xx_remap_render_backend(rdev, tmp,
1137 rdev->config.cayman.max_backends_per_se *
1138 rdev->config.cayman.max_shader_engines,
1141 rdev->config.cayman.backend_map = tmp;
1145 for (i = 0; i < rdev->config.cayman.max_texture_channel_caches; i++)
1167 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.cayman.sx_num_of_sets);
1183 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.cayman.sx_max_export_size / 4) - 1) |
1184 POSITION_BUFFER_SIZE((rdev->config.cayman.sx_max_export_pos_size / 4) - 1) |
1185 SMX_BUFFER_SIZE((rdev->config.cayman.sx_max_export_smx_size / 4) - 1)));
1187 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.cayman.sc_prim_fifo_size) |
1188 SC_HIZ_TILE_FIFO_SIZE(rdev->config.cayman.sc_hiz_tile_fifo_size) |
1189 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.cayman.sc_earlyz_tile_fifo_size)));
1196 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.cayman.sq_num_cf_insts) |
1239 if (rdev->family == CHIP_ARUBA) {
1252 void cayman_pcie_gart_tlb_flush(struct radeon_device *rdev)
1261 static int cayman_pcie_gart_enable(struct radeon_device *rdev)
1265 if (rdev->gart.robj == NULL) {
1266 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
1269 r = radeon_gart_table_vram_pin(rdev);
1292 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
1293 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
1294 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
1296 (u32)(rdev->dummy_page.addr >> 12));
1313 rdev->vm_manager.max_pfn - 1);
1315 rdev->vm_manager.saved_table_addr[i]);
1320 (u32)(rdev->dummy_page.addr >> 12));
1337 cayman_pcie_gart_tlb_flush(rdev);
1339 (unsigned)(rdev->mc.gtt_size >> 20),
1340 (unsigned long long)rdev->gart.table_addr);
1341 rdev->gart.ready = true;
1345 static void cayman_pcie_gart_disable(struct radeon_device *rdev)
1350 rdev->vm_manager.saved_table_addr[i] = RREG32(
1369 radeon_gart_table_vram_unpin(rdev);
1372 static void cayman_pcie_gart_fini(struct radeon_device *rdev)
1374 cayman_pcie_gart_disable(rdev);
1375 radeon_gart_table_vram_free(rdev);
1376 radeon_gart_fini(rdev);
1379 void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
1389 void cayman_fence_ring_emit(struct radeon_device *rdev,
1392 struct radeon_ring *ring = &rdev->ring[fence->ring];
1393 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
1412 void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
1414 struct radeon_ring *ring = &rdev->ring[ib->ring];
1448 static void cayman_cp_enable(struct radeon_device *rdev, bool enable)
1453 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
1454 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
1457 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
1461 u32 cayman_gfx_get_rptr(struct radeon_device *rdev,
1466 if (rdev->wb.enabled)
1467 rptr = rdev->wb.wb[ring->rptr_offs/4];
1480 u32 cayman_gfx_get_wptr(struct radeon_device *rdev,
1495 void cayman_gfx_set_wptr(struct radeon_device *rdev,
1510 static int cayman_cp_load_microcode(struct radeon_device *rdev)
1515 if (!rdev->me_fw || !rdev->pfp_fw)
1518 cayman_cp_enable(rdev, false);
1520 fw_data = (const __be32 *)rdev->pfp_fw->data;
1526 fw_data = (const __be32 *)rdev->me_fw->data;
1537 static int cayman_cp_start(struct radeon_device *rdev)
1539 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1542 r = radeon_ring_lock(rdev, ring, 7);
1550 radeon_ring_write(ring, rdev->config.cayman.max_hw_contexts - 1);
1554 radeon_ring_unlock_commit(rdev, ring, false);
1556 cayman_cp_enable(rdev, true);
1558 r = radeon_ring_lock(rdev, ring, cayman_default_size + 19);
1596 radeon_ring_unlock_commit(rdev, ring, false);
1603 static void cayman_cp_fini(struct radeon_device *rdev)
1605 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1606 cayman_cp_enable(rdev, false);
1607 radeon_ring_fini(rdev, ring);
1608 radeon_scratch_free(rdev, ring->rptr_save_reg);
1611 static int cayman_cp_resume(struct radeon_device *rdev)
1672 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
1680 ring = &rdev->ring[ridx[i]];
1689 addr = rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET;
1696 ring = &rdev->ring[ridx[i]];
1702 ring = &rdev->ring[ridx[i]];
1714 cayman_cp_start(rdev);
1715 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true;
1716 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
1717 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
1719 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
1721 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
1722 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
1723 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
1727 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
1728 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
1733 u32 cayman_gpu_check_soft_reset(struct radeon_device *rdev)
1794 if (evergreen_is_display_hung(rdev))
1811 static void cayman_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
1820 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
1822 evergreen_print_gpu_status_regs(rdev);
1823 dev_info(rdev->dev, " VM_CONTEXT0_PROTECTION_FAULT_ADDR 0x%08X\n",
1825 dev_info(rdev->dev, " VM_CONTEXT0_PROTECTION_FAULT_STATUS 0x%08X\n",
1827 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
1829 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
1851 evergreen_mc_stop(rdev, &save);
1852 if (evergreen_mc_wait_for_idle(rdev)) {
1853 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1901 if (!(rdev->flags & RADEON_IS_IGP)) {
1909 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
1923 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
1937 evergreen_mc_resume(rdev, &save);
1940 evergreen_print_gpu_status_regs(rdev);
1943 int cayman_asic_reset(struct radeon_device *rdev, bool hard)
1948 evergreen_gpu_pci_config_reset(rdev);
1952 reset_mask = cayman_gpu_check_soft_reset(rdev);
1955 r600_set_bios_scratch_engine_hung(rdev, true);
1957 cayman_gpu_soft_reset(rdev, reset_mask);
1959 reset_mask = cayman_gpu_check_soft_reset(rdev);
1962 evergreen_gpu_pci_config_reset(rdev);
1964 r600_set_bios_scratch_engine_hung(rdev, false);
1972 * @rdev: radeon_device pointer
1978 bool cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
1980 u32 reset_mask = cayman_gpu_check_soft_reset(rdev);
1985 radeon_ring_lockup_update(rdev, ring);
1988 return radeon_ring_test_lockup(rdev, ring);
1991 static void cayman_uvd_init(struct radeon_device *rdev)
1995 if (!rdev->has_uvd)
1998 r = radeon_uvd_init(rdev);
2000 dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
2002 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
2007 rdev->has_uvd = false;
2010 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
2011 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
2014 static void cayman_uvd_start(struct radeon_device *rdev)
2018 if (!rdev->has_uvd)
2021 r = uvd_v2_2_resume(rdev);
2023 dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
2026 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
2028 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
2034 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
2037 static void cayman_uvd_resume(struct radeon_device *rdev)
2042 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
2045 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
2046 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
2048 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
2051 r = uvd_v1_0_init(rdev);
2053 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
2058 static void cayman_vce_init(struct radeon_device *rdev)
2063 if (!rdev->has_vce)
2066 r = radeon_vce_init(rdev);
2068 dev_err(rdev->dev, "failed VCE (%d) init.\n", r);
2070 * At this point rdev->vce.vcpu_bo is NULL which trickles down
2075 rdev->has_vce = false;
2078 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_obj = NULL;
2079 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE1_INDEX], 4096);
2080 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_obj = NULL;
2081 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE2_INDEX], 4096);
2084 static void cayman_vce_start(struct radeon_device *rdev)
2088 if (!rdev->has_vce)
2091 r = radeon_vce_resume(rdev);
2093 dev_err(rdev->dev, "failed VCE resume (%d).\n", r);
2096 r = vce_v1_0_resume(rdev);
2098 dev_err(rdev->dev, "failed VCE resume (%d).\n", r);
2101 r = radeon_fence_driver_start_ring(rdev, TN_RING_TYPE_VCE1_INDEX);
2103 dev_err(rdev->dev, "failed initializing VCE1 fences (%d).\n", r);
2106 r = radeon_fence_driver_start_ring(rdev, TN_RING_TYPE_VCE2_INDEX);
2108 dev_err(rdev->dev, "failed initializing VCE2 fences (%d).\n", r);
2114 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0;
2115 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0;
2118 static void cayman_vce_resume(struct radeon_device *rdev)
2123 if (!rdev->has_vce || !rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size)
2126 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX];
2127 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0);
2129 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r);
2132 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX];
2133 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0);
2135 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r);
2138 r = vce_v1_0_init(rdev);
2140 dev_err(rdev->dev, "failed initializing VCE (%d).\n", r);
2145 static int cayman_startup(struct radeon_device *rdev)
2147 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2151 evergreen_pcie_gen2_enable(rdev);
2153 evergreen_program_aspm(rdev);
2156 r = r600_vram_scratch_init(rdev);
2160 evergreen_mc_program(rdev);
2162 if (!(rdev->flags & RADEON_IS_IGP) && !rdev->pm.dpm_enabled) {
2163 r = ni_mc_load_microcode(rdev);
2170 r = cayman_pcie_gart_enable(rdev);
2173 cayman_gpu_init(rdev);
2176 if (rdev->flags & RADEON_IS_IGP) {
2177 rdev->rlc.reg_list = tn_rlc_save_restore_register_list;
2178 rdev->rlc.reg_list_size =
2180 rdev->rlc.cs_data = cayman_cs_data;
2181 r = sumo_rlc_init(rdev);
2189 r = radeon_wb_init(rdev);
2193 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
2195 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
2199 cayman_uvd_start(rdev);
2200 cayman_vce_start(rdev);
2202 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
2204 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
2208 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
2210 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
2214 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
2216 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
2220 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
2222 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
2227 if (!rdev->irq.installed) {
2228 r = radeon_irq_kms_init(rdev);
2233 r = r600_irq_init(rdev);
2236 radeon_irq_kms_fini(rdev);
2239 evergreen_irq_set(rdev);
2241 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
2246 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
2247 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
2252 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
2253 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET,
2258 r = cayman_cp_load_microcode(rdev);
2261 r = cayman_cp_resume(rdev);
2265 r = cayman_dma_resume(rdev);
2269 cayman_uvd_resume(rdev);
2270 cayman_vce_resume(rdev);
2272 r = radeon_ib_pool_init(rdev);
2274 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
2278 r = radeon_vm_manager_init(rdev);
2280 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r);
2284 r = radeon_audio_init(rdev);
2291 int cayman_resume(struct radeon_device *rdev)
2300 atom_asic_init(rdev->mode_info.atom_context);
2303 ni_init_golden_registers(rdev);
2305 if (rdev->pm.pm_method == PM_METHOD_DPM)
2306 radeon_pm_resume(rdev);
2308 rdev->accel_working = true;
2309 r = cayman_startup(rdev);
2312 rdev->accel_working = false;
2318 int cayman_suspend(struct radeon_device *rdev)
2320 radeon_pm_suspend(rdev);
2321 radeon_audio_fini(rdev);
2322 radeon_vm_manager_fini(rdev);
2323 cayman_cp_enable(rdev, false);
2324 cayman_dma_stop(rdev);
2325 if (rdev->has_uvd) {
2326 radeon_uvd_suspend(rdev);
2327 uvd_v1_0_fini(rdev);
2329 evergreen_irq_suspend(rdev);
2330 radeon_wb_disable(rdev);
2331 cayman_pcie_gart_disable(rdev);
2341 int cayman_init(struct radeon_device *rdev)
2343 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2347 if (!radeon_get_bios(rdev)) {
2348 if (ASIC_IS_AVIVO(rdev))
2352 if (!rdev->is_atom_bios) {
2353 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n");
2356 r = radeon_atombios_init(rdev);
2361 if (!radeon_card_posted(rdev)) {
2362 if (!rdev->bios) {
2363 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
2367 atom_asic_init(rdev->mode_info.atom_context);
2370 ni_init_golden_registers(rdev);
2372 r600_scratch_init(rdev);
2374 radeon_surface_init(rdev);
2376 radeon_get_clock_info(rdev->ddev);
2378 radeon_fence_driver_init(rdev);
2380 r = evergreen_mc_init(rdev);
2384 r = radeon_bo_init(rdev);
2388 if (rdev->flags & RADEON_IS_IGP) {
2389 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
2390 r = ni_init_microcode(rdev);
2397 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
2398 r = ni_init_microcode(rdev);
2407 radeon_pm_init(rdev);
2410 r600_ring_init(rdev, ring, 1024 * 1024);
2412 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
2414 r600_ring_init(rdev, ring, 64 * 1024);
2416 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
2418 r600_ring_init(rdev, ring, 64 * 1024);
2420 cayman_uvd_init(rdev);
2421 cayman_vce_init(rdev);
2423 rdev->ih.ring_obj = NULL;
2424 r600_ih_ring_init(rdev, 64 * 1024);
2426 r = r600_pcie_gart_init(rdev);
2430 rdev->accel_working = true;
2431 r = cayman_startup(rdev);
2433 dev_err(rdev->dev, "disabling GPU acceleration\n");
2434 cayman_cp_fini(rdev);
2435 cayman_dma_fini(rdev);
2436 r600_irq_fini(rdev);
2437 if (rdev->flags & RADEON_IS_IGP)
2438 sumo_rlc_fini(rdev);
2439 radeon_wb_fini(rdev);
2440 radeon_ib_pool_fini(rdev);
2441 radeon_vm_manager_fini(rdev);
2442 radeon_irq_kms_fini(rdev);
2443 cayman_pcie_gart_fini(rdev);
2444 rdev->accel_working = false;
2454 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
2462 void cayman_fini(struct radeon_device *rdev)
2464 radeon_pm_fini(rdev);
2465 cayman_cp_fini(rdev);
2466 cayman_dma_fini(rdev);
2467 r600_irq_fini(rdev);
2468 if (rdev->flags & RADEON_IS_IGP)
2469 sumo_rlc_fini(rdev);
2470 radeon_wb_fini(rdev);
2471 radeon_vm_manager_fini(rdev);
2472 radeon_ib_pool_fini(rdev);
2473 radeon_irq_kms_fini(rdev);
2474 uvd_v1_0_fini(rdev);
2475 radeon_uvd_fini(rdev);
2476 if (rdev->has_vce)
2477 radeon_vce_fini(rdev);
2478 cayman_pcie_gart_fini(rdev);
2479 r600_vram_scratch_fini(rdev);
2480 radeon_gem_fini(rdev);
2481 radeon_fence_driver_fini(rdev);
2482 radeon_bo_fini(rdev);
2483 radeon_atombios_fini(rdev);
2484 kfree(rdev->bios);
2485 rdev->bios = NULL;
2491 int cayman_vm_init(struct radeon_device *rdev)
2494 rdev->vm_manager.nvm = 8;
2496 if (rdev->flags & RADEON_IS_IGP) {
2499 rdev->vm_manager.vram_base_offset = tmp;
2501 rdev->vm_manager.vram_base_offset = 0;
2505 void cayman_vm_fini(struct radeon_device *rdev)
2512 * @rdev: radeon_device pointer
2518 void cayman_vm_decode_fault(struct radeon_device *rdev,
2676 void cayman_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
2705 int tn_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk)
2710 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,