Lines Matching defs:rdev
46 static void rv770_gpu_init(struct radeon_device *rdev);
47 void rv770_fini(struct radeon_device *rdev);
48 static void rv770_pcie_gen2_enable(struct radeon_device *rdev);
49 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
51 int rv770_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
57 if (rdev->family == CHIP_RV740)
58 return evergreen_set_uvd_clocks(rdev, vclk, dclk);
71 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 50000, 160000,
91 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
122 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
720 static void rv770_init_golden_registers(struct radeon_device *rdev)
722 switch (rdev->family) {
724 radeon_program_register_sequence(rdev,
727 radeon_program_register_sequence(rdev,
730 if (rdev->pdev->device == 0x994e)
731 radeon_program_register_sequence(rdev,
735 radeon_program_register_sequence(rdev,
738 radeon_program_register_sequence(rdev,
743 radeon_program_register_sequence(rdev,
746 radeon_program_register_sequence(rdev,
749 radeon_program_register_sequence(rdev,
752 radeon_program_register_sequence(rdev,
757 radeon_program_register_sequence(rdev,
760 radeon_program_register_sequence(rdev,
763 radeon_program_register_sequence(rdev,
766 radeon_program_register_sequence(rdev,
771 radeon_program_register_sequence(rdev,
774 radeon_program_register_sequence(rdev,
789 * @rdev: radeon_device pointer
794 u32 rv770_get_xclk(struct radeon_device *rdev)
796 u32 reference_clock = rdev->clock.spll.reference_freq;
808 void rv770_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base, bool async)
810 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
834 for (i = 0; i < rdev->usec_timeout; i++) {
846 bool rv770_page_flip_pending(struct radeon_device *rdev, int crtc_id)
848 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
856 int rv770_get_temp(struct radeon_device *rdev)
875 void rv770_pm_misc(struct radeon_device *rdev)
877 int req_ps_idx = rdev->pm.requested_power_state_index;
878 int req_cm_idx = rdev->pm.requested_clock_mode_index;
879 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
886 if (voltage->voltage != rdev->pm.current_vddc) {
887 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
888 rdev->pm.current_vddc = voltage->voltage;
897 static int rv770_pcie_gart_enable(struct radeon_device *rdev)
902 if (rdev->gart.robj == NULL) {
903 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
906 r = radeon_gart_table_vram_pin(rdev);
923 if (rdev->family == CHIP_RV740)
929 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
930 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
931 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
935 (u32)(rdev->dummy_page.addr >> 12));
939 r600_pcie_gart_tlb_flush(rdev);
941 (unsigned)(rdev->mc.gtt_size >> 20),
942 (unsigned long long)rdev->gart.table_addr);
943 rdev->gart.ready = true;
947 static void rv770_pcie_gart_disable(struct radeon_device *rdev)
970 radeon_gart_table_vram_unpin(rdev);
973 static void rv770_pcie_gart_fini(struct radeon_device *rdev)
975 radeon_gart_fini(rdev);
976 rv770_pcie_gart_disable(rdev);
977 radeon_gart_table_vram_free(rdev);
981 static void rv770_agp_enable(struct radeon_device *rdev)
1008 static void rv770_mc_program(struct radeon_device *rdev)
1027 rv515_mc_stop(rdev, &save);
1028 if (r600_mc_wait_for_idle(rdev)) {
1029 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1034 if (rdev->flags & RADEON_IS_AGP) {
1035 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
1038 rdev->mc.vram_start >> 12);
1040 rdev->mc.gtt_end >> 12);
1044 rdev->mc.gtt_start >> 12);
1046 rdev->mc.vram_end >> 12);
1050 rdev->mc.vram_start >> 12);
1052 rdev->mc.vram_end >> 12);
1054 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
1055 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
1056 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
1058 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1061 if (rdev->flags & RADEON_IS_AGP) {
1062 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
1063 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
1064 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
1070 if (r600_mc_wait_for_idle(rdev)) {
1071 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1073 rv515_mc_resume(rdev, &save);
1076 rv515_vga_render_disable(rdev);
1083 void r700_cp_stop(struct radeon_device *rdev)
1085 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
1086 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
1089 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
1092 static int rv770_cp_load_microcode(struct radeon_device *rdev)
1097 if (!rdev->me_fw || !rdev->pfp_fw)
1100 r700_cp_stop(rdev);
1113 fw_data = (const __be32 *)rdev->pfp_fw->data;
1119 fw_data = (const __be32 *)rdev->me_fw->data;
1130 void r700_cp_fini(struct radeon_device *rdev)
1132 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1133 r700_cp_stop(rdev);
1134 radeon_ring_fini(rdev, ring);
1135 radeon_scratch_free(rdev, ring->rptr_save_reg);
1138 void rv770_set_clk_bypass_mode(struct radeon_device *rdev)
1142 if (rdev->flags & RADEON_IS_IGP)
1150 for (i = 0; i < rdev->usec_timeout; i++) {
1160 if ((rdev->family == CHIP_RV710) || (rdev->family == CHIP_RV730))
1170 static void rv770_gpu_init(struct radeon_device *rdev)
1194 rdev->config.rv770.tiling_group_size = 256;
1195 switch (rdev->family) {
1197 rdev->config.rv770.max_pipes = 4;
1198 rdev->config.rv770.max_tile_pipes = 8;
1199 rdev->config.rv770.max_simds = 10;
1200 rdev->config.rv770.max_backends = 4;
1201 rdev->config.rv770.max_gprs = 256;
1202 rdev->config.rv770.max_threads = 248;
1203 rdev->config.rv770.max_stack_entries = 512;
1204 rdev->config.rv770.max_hw_contexts = 8;
1205 rdev->config.rv770.max_gs_threads = 16 * 2;
1206 rdev->config.rv770.sx_max_export_size = 128;
1207 rdev->config.rv770.sx_max_export_pos_size = 16;
1208 rdev->config.rv770.sx_max_export_smx_size = 112;
1209 rdev->config.rv770.sq_num_cf_insts = 2;
1211 rdev->config.rv770.sx_num_of_sets = 7;
1212 rdev->config.rv770.sc_prim_fifo_size = 0xF9;
1213 rdev->config.rv770.sc_hiz_tile_fifo_size = 0x30;
1214 rdev->config.rv770.sc_earlyz_tile_fifo_fize = 0x130;
1217 rdev->config.rv770.max_pipes = 2;
1218 rdev->config.rv770.max_tile_pipes = 4;
1219 rdev->config.rv770.max_simds = 8;
1220 rdev->config.rv770.max_backends = 2;
1221 rdev->config.rv770.max_gprs = 128;
1222 rdev->config.rv770.max_threads = 248;
1223 rdev->config.rv770.max_stack_entries = 256;
1224 rdev->config.rv770.max_hw_contexts = 8;
1225 rdev->config.rv770.max_gs_threads = 16 * 2;
1226 rdev->config.rv770.sx_max_export_size = 256;
1227 rdev->config.rv770.sx_max_export_pos_size = 32;
1228 rdev->config.rv770.sx_max_export_smx_size = 224;
1229 rdev->config.rv770.sq_num_cf_insts = 2;
1231 rdev->config.rv770.sx_num_of_sets = 7;
1232 rdev->config.rv770.sc_prim_fifo_size = 0xf9;
1233 rdev->config.rv770.sc_hiz_tile_fifo_size = 0x30;
1234 rdev->config.rv770.sc_earlyz_tile_fifo_fize = 0x130;
1235 if (rdev->config.rv770.sx_max_export_pos_size > 16) {
1236 rdev->config.rv770.sx_max_export_pos_size -= 16;
1237 rdev->config.rv770.sx_max_export_smx_size += 16;
1241 rdev->config.rv770.max_pipes = 2;
1242 rdev->config.rv770.max_tile_pipes = 2;
1243 rdev->config.rv770.max_simds = 2;
1244 rdev->config.rv770.max_backends = 1;
1245 rdev->config.rv770.max_gprs = 256;
1246 rdev->config.rv770.max_threads = 192;
1247 rdev->config.rv770.max_stack_entries = 256;
1248 rdev->config.rv770.max_hw_contexts = 4;
1249 rdev->config.rv770.max_gs_threads = 8 * 2;
1250 rdev->config.rv770.sx_max_export_size = 128;
1251 rdev->config.rv770.sx_max_export_pos_size = 16;
1252 rdev->config.rv770.sx_max_export_smx_size = 112;
1253 rdev->config.rv770.sq_num_cf_insts = 1;
1255 rdev->config.rv770.sx_num_of_sets = 7;
1256 rdev->config.rv770.sc_prim_fifo_size = 0x40;
1257 rdev->config.rv770.sc_hiz_tile_fifo_size = 0x30;
1258 rdev->config.rv770.sc_earlyz_tile_fifo_fize = 0x130;
1261 rdev->config.rv770.max_pipes = 4;
1262 rdev->config.rv770.max_tile_pipes = 4;
1263 rdev->config.rv770.max_simds = 8;
1264 rdev->config.rv770.max_backends = 4;
1265 rdev->config.rv770.max_gprs = 256;
1266 rdev->config.rv770.max_threads = 248;
1267 rdev->config.rv770.max_stack_entries = 512;
1268 rdev->config.rv770.max_hw_contexts = 8;
1269 rdev->config.rv770.max_gs_threads = 16 * 2;
1270 rdev->config.rv770.sx_max_export_size = 256;
1271 rdev->config.rv770.sx_max_export_pos_size = 32;
1272 rdev->config.rv770.sx_max_export_smx_size = 224;
1273 rdev->config.rv770.sq_num_cf_insts = 2;
1275 rdev->config.rv770.sx_num_of_sets = 7;
1276 rdev->config.rv770.sc_prim_fifo_size = 0x100;
1277 rdev->config.rv770.sc_hiz_tile_fifo_size = 0x30;
1278 rdev->config.rv770.sc_earlyz_tile_fifo_fize = 0x130;
1280 if (rdev->config.rv770.sx_max_export_pos_size > 16) {
1281 rdev->config.rv770.sx_max_export_pos_size -= 16;
1282 rdev->config.rv770.sx_max_export_smx_size += 16;
1320 tmp = rdev->config.rv770.max_simds -
1322 rdev->config.rv770.active_simds = tmp;
1324 switch (rdev->config.rv770.max_tile_pipes) {
1339 rdev->config.rv770.tiling_npipes = rdev->config.rv770.max_tile_pipes;
1343 for (i = 0; i < rdev->config.rv770.max_backends; i++)
1347 for (i = 0; i < rdev->config.rv770.max_backends; i++)
1351 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.rv770.max_backends,
1354 rdev->config.rv770.backend_map = tmp;
1356 if (rdev->family == CHIP_RV770)
1364 rdev->config.rv770.tiling_nbanks = 4 << ((gb_tiling_config >> 4) & 0x3);
1377 rdev->config.rv770.tile_config = gb_tiling_config;
1384 if (rdev->family == CHIP_RV730) {
1415 smx_dc_ctl0 |= CACHE_DEPTH((rdev->config.rv770.sx_num_of_sets * 64) - 1);
1418 if (rdev->family != CHIP_RV740)
1424 if (rdev->family != CHIP_RV770)
1429 switch (rdev->family) {
1442 if (rdev->family != CHIP_RV770) {
1448 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.rv770.sx_max_export_size / 4) - 1) |
1449 POSITION_BUFFER_SIZE((rdev->config.rv770.sx_max_export_pos_size / 4) - 1) |
1450 SMX_BUFFER_SIZE((rdev->config.rv770.sx_max_export_smx_size / 4) - 1)));
1452 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.rv770.sc_prim_fifo_size) |
1453 SC_HIZ_TILE_FIFO_SIZE(rdev->config.rv770.sc_hiz_tile_fifo_size) |
1454 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.rv770.sc_earlyz_tile_fifo_fize)));
1464 sq_ms_fifo_sizes = (CACHE_FIFO_SIZE(16 * rdev->config.rv770.sq_num_cf_insts) |
1467 switch (rdev->family) {
1495 if (rdev->family == CHIP_RV710)
1501 WREG32(SQ_GPR_RESOURCE_MGMT_1, (NUM_PS_GPRS((rdev->config.rv770.max_gprs * 24)/64) |
1502 NUM_VS_GPRS((rdev->config.rv770.max_gprs * 24)/64) |
1503 NUM_CLAUSE_TEMP_GPRS(((rdev->config.rv770.max_gprs * 24)/64)/2)));
1505 WREG32(SQ_GPR_RESOURCE_MGMT_2, (NUM_GS_GPRS((rdev->config.rv770.max_gprs * 7)/64) |
1506 NUM_ES_GPRS((rdev->config.rv770.max_gprs * 7)/64)));
1508 sq_thread_resource_mgmt = (NUM_PS_THREADS((rdev->config.rv770.max_threads * 4)/8) |
1509 NUM_VS_THREADS((rdev->config.rv770.max_threads * 2)/8) |
1510 NUM_ES_THREADS((rdev->config.rv770.max_threads * 1)/8));
1511 if (((rdev->config.rv770.max_threads * 1) / 8) > rdev->config.rv770.max_gs_threads)
1512 sq_thread_resource_mgmt |= NUM_GS_THREADS(rdev->config.rv770.max_gs_threads);
1514 sq_thread_resource_mgmt |= NUM_GS_THREADS((rdev->config.rv770.max_gs_threads * 1)/8);
1517 WREG32(SQ_STACK_RESOURCE_MGMT_1, (NUM_PS_STACK_ENTRIES((rdev->config.rv770.max_stack_entries * 1)/4) |
1518 NUM_VS_STACK_ENTRIES((rdev->config.rv770.max_stack_entries * 1)/4)));
1520 WREG32(SQ_STACK_RESOURCE_MGMT_2, (NUM_GS_STACK_ENTRIES((rdev->config.rv770.max_stack_entries * 1)/4) |
1521 NUM_ES_STACK_ENTRIES((rdev->config.rv770.max_stack_entries * 1)/4)));
1523 sq_dyn_gpr_size_simd_ab_0 = (SIMDA_RING0((rdev->config.rv770.max_gprs * 38)/64) |
1524 SIMDA_RING1((rdev->config.rv770.max_gprs * 38)/64) |
1525 SIMDB_RING0((rdev->config.rv770.max_gprs * 38)/64) |
1526 SIMDB_RING1((rdev->config.rv770.max_gprs * 38)/64));
1540 if (rdev->family == CHIP_RV710)
1547 switch (rdev->family) {
1560 num_gs_verts_per_thread = rdev->config.rv770.max_pipes * 16;
1606 void r700_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc)
1612 dev_warn(rdev->dev, "limiting VRAM\n");
1616 if (rdev->flags & RADEON_IS_AGP) {
1621 dev_warn(rdev->dev, "limiting VRAM\n");
1628 dev_warn(rdev->dev, "limiting VRAM\n");
1635 dev_info(rdev->dev, "VRAM: %lluM 0x%08llX - 0x%08llX (%lluM used)\n",
1639 radeon_vram_location(rdev, &rdev->mc, 0);
1640 rdev->mc.gtt_base_align = 0;
1641 radeon_gtt_location(rdev, mc);
1645 static int rv770_mc_init(struct radeon_device *rdev)
1651 rdev->mc.vram_is_ddr = true;
1676 rdev->mc.vram_width = numchan * chansize;
1678 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
1679 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
1681 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
1682 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
1683 rdev->mc.visible_vram_size = rdev->mc.aper_size;
1684 r700_vram_gtt_location(rdev, &rdev->mc);
1685 radeon_update_bandwidth_info(rdev);
1690 static void rv770_uvd_init(struct radeon_device *rdev)
1694 if (!rdev->has_uvd)
1697 r = radeon_uvd_init(rdev);
1699 dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
1701 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
1706 rdev->has_uvd = false;
1709 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
1710 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
1713 static void rv770_uvd_start(struct radeon_device *rdev)
1717 if (!rdev->has_uvd)
1720 r = uvd_v2_2_resume(rdev);
1722 dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
1725 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
1727 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
1733 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
1736 static void rv770_uvd_resume(struct radeon_device *rdev)
1741 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
1744 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
1745 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
1747 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
1750 r = uvd_v1_0_init(rdev);
1752 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
1757 static int rv770_startup(struct radeon_device *rdev)
1763 rv770_pcie_gen2_enable(rdev);
1766 r = r600_vram_scratch_init(rdev);
1770 rv770_mc_program(rdev);
1772 if (rdev->flags & RADEON_IS_AGP) {
1773 rv770_agp_enable(rdev);
1775 r = rv770_pcie_gart_enable(rdev);
1780 rv770_gpu_init(rdev);
1783 r = radeon_wb_init(rdev);
1787 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
1789 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
1793 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
1795 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
1799 rv770_uvd_start(rdev);
1802 if (!rdev->irq.installed) {
1803 r = radeon_irq_kms_init(rdev);
1808 r = r600_irq_init(rdev);
1811 radeon_irq_kms_fini(rdev);
1814 r600_irq_set(rdev);
1816 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1817 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
1822 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
1823 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
1828 r = rv770_cp_load_microcode(rdev);
1831 r = r600_cp_resume(rdev);
1835 r = r600_dma_resume(rdev);
1839 rv770_uvd_resume(rdev);
1841 r = radeon_ib_pool_init(rdev);
1843 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
1847 r = radeon_audio_init(rdev);
1856 int rv770_resume(struct radeon_device *rdev)
1865 atom_asic_init(rdev->mode_info.atom_context);
1868 rv770_init_golden_registers(rdev);
1870 if (rdev->pm.pm_method == PM_METHOD_DPM)
1871 radeon_pm_resume(rdev);
1873 rdev->accel_working = true;
1874 r = rv770_startup(rdev);
1877 rdev->accel_working = false;
1885 int rv770_suspend(struct radeon_device *rdev)
1887 radeon_pm_suspend(rdev);
1888 radeon_audio_fini(rdev);
1889 if (rdev->has_uvd) {
1890 uvd_v1_0_fini(rdev);
1891 radeon_uvd_suspend(rdev);
1893 r700_cp_stop(rdev);
1894 r600_dma_stop(rdev);
1895 r600_irq_suspend(rdev);
1896 radeon_wb_disable(rdev);
1897 rv770_pcie_gart_disable(rdev);
1908 int rv770_init(struct radeon_device *rdev)
1913 if (!radeon_get_bios(rdev)) {
1914 if (ASIC_IS_AVIVO(rdev))
1918 if (!rdev->is_atom_bios) {
1919 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
1922 r = radeon_atombios_init(rdev);
1926 if (!radeon_card_posted(rdev)) {
1927 if (!rdev->bios) {
1928 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
1932 atom_asic_init(rdev->mode_info.atom_context);
1935 rv770_init_golden_registers(rdev);
1937 r600_scratch_init(rdev);
1939 radeon_surface_init(rdev);
1941 radeon_get_clock_info(rdev->ddev);
1943 r = radeon_fence_driver_init(rdev);
1947 if (rdev->flags & RADEON_IS_AGP) {
1948 r = radeon_agp_init(rdev);
1950 radeon_agp_disable(rdev);
1952 r = rv770_mc_init(rdev);
1956 r = radeon_bo_init(rdev);
1960 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
1961 r = r600_init_microcode(rdev);
1969 radeon_pm_init(rdev);
1971 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
1972 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
1974 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
1975 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
1977 rv770_uvd_init(rdev);
1979 rdev->ih.ring_obj = NULL;
1980 r600_ih_ring_init(rdev, 64 * 1024);
1982 r = r600_pcie_gart_init(rdev);
1986 rdev->accel_working = true;
1987 r = rv770_startup(rdev);
1989 dev_err(rdev->dev, "disabling GPU acceleration\n");
1990 r700_cp_fini(rdev);
1991 r600_dma_fini(rdev);
1992 r600_irq_fini(rdev);
1993 radeon_wb_fini(rdev);
1994 radeon_ib_pool_fini(rdev);
1995 radeon_irq_kms_fini(rdev);
1996 rv770_pcie_gart_fini(rdev);
1997 rdev->accel_working = false;
2003 void rv770_fini(struct radeon_device *rdev)
2005 radeon_pm_fini(rdev);
2006 r700_cp_fini(rdev);
2007 r600_dma_fini(rdev);
2008 r600_irq_fini(rdev);
2009 radeon_wb_fini(rdev);
2010 radeon_ib_pool_fini(rdev);
2011 radeon_irq_kms_fini(rdev);
2012 uvd_v1_0_fini(rdev);
2013 radeon_uvd_fini(rdev);
2014 rv770_pcie_gart_fini(rdev);
2015 r600_vram_scratch_fini(rdev);
2016 radeon_gem_fini(rdev);
2017 radeon_fence_driver_fini(rdev);
2018 radeon_agp_fini(rdev);
2019 radeon_bo_fini(rdev);
2020 radeon_atombios_fini(rdev);
2021 kfree(rdev->bios);
2022 rdev->bios = NULL;
2025 static void rv770_pcie_gen2_enable(struct radeon_device *rdev)
2033 if (rdev->flags & RADEON_IS_IGP)
2036 if (!(rdev->flags & RADEON_IS_PCIE))
2040 if (ASIC_IS_X2(rdev))
2043 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
2044 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))