Lines Matching defs:rdev
36 u32 cik_gpu_check_soft_reset(struct radeon_device *rdev);
58 * @rdev: radeon_device pointer
63 uint32_t cik_sdma_get_rptr(struct radeon_device *rdev,
68 if (rdev->wb.enabled) {
69 rptr = rdev->wb.wb[ring->rptr_offs/4];
85 * @rdev: radeon_device pointer
90 uint32_t cik_sdma_get_wptr(struct radeon_device *rdev,
106 * @rdev: radeon_device pointer
111 void cik_sdma_set_wptr(struct radeon_device *rdev,
128 * @rdev: radeon_device pointer
133 void cik_sdma_ring_ib_execute(struct radeon_device *rdev,
136 struct radeon_ring *ring = &rdev->ring[ib->ring];
139 if (rdev->wb.enabled) {
164 * @rdev: radeon_device pointer
169 static void cik_sdma_hdp_flush_ring_emit(struct radeon_device *rdev,
172 struct radeon_ring *ring = &rdev->ring[ridx];
193 * @rdev: radeon_device pointer
200 void cik_sdma_fence_ring_emit(struct radeon_device *rdev,
203 struct radeon_ring *ring = &rdev->ring[fence->ring];
204 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
214 cik_sdma_hdp_flush_ring_emit(rdev, fence->ring);
220 * @rdev: radeon_device pointer
228 bool cik_sdma_semaphore_ring_emit(struct radeon_device *rdev,
246 * @rdev: radeon_device pointer
250 static void cik_sdma_gfx_stop(struct radeon_device *rdev)
255 if ((rdev->asic->copy.copy_ring_index == R600_RING_TYPE_DMA_INDEX) ||
256 (rdev->asic->copy.copy_ring_index == CAYMAN_RING_TYPE_DMA1_INDEX))
257 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
269 rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false;
270 rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX].ready = false;
287 * @rdev: radeon_device pointer
291 static void cik_sdma_rlc_stop(struct radeon_device *rdev)
299 * @rdev: radeon_device pointer
304 static void cik_sdma_ctx_switch_enable(struct radeon_device *rdev, bool enable)
326 * @rdev: radeon_device pointer
331 void cik_sdma_enable(struct radeon_device *rdev, bool enable)
337 cik_sdma_gfx_stop(rdev);
338 cik_sdma_rlc_stop(rdev);
354 cik_sdma_ctx_switch_enable(rdev, enable);
360 * @rdev: radeon_device pointer
365 static int cik_sdma_gfx_resume(struct radeon_device *rdev)
375 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
379 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
401 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF);
403 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC));
405 if (rdev->wb.enabled)
426 r = radeon_ring_test(rdev, ring->idx, ring);
433 if ((rdev->asic->copy.copy_ring_index == R600_RING_TYPE_DMA_INDEX) ||
434 (rdev->asic->copy.copy_ring_index == CAYMAN_RING_TYPE_DMA1_INDEX))
435 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
443 * @rdev: radeon_device pointer
448 static int cik_sdma_rlc_resume(struct radeon_device *rdev)
457 * @rdev: radeon_device pointer
462 static int cik_sdma_load_microcode(struct radeon_device *rdev)
466 if (!rdev->sdma_fw)
470 cik_sdma_enable(rdev, false);
472 if (rdev->new_fw) {
474 (const struct sdma_firmware_header_v1_0 *)rdev->sdma_fw->data;
482 (rdev->sdma_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
491 (rdev->sdma_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes));
501 fw_data = (const __be32 *)rdev->sdma_fw->data;
508 fw_data = (const __be32 *)rdev->sdma_fw->data;
523 * @rdev: radeon_device pointer
528 int cik_sdma_resume(struct radeon_device *rdev)
532 r = cik_sdma_load_microcode(rdev);
537 cik_sdma_enable(rdev, true);
540 r = cik_sdma_gfx_resume(rdev);
543 r = cik_sdma_rlc_resume(rdev);
553 * @rdev: radeon_device pointer
557 void cik_sdma_fini(struct radeon_device *rdev)
560 cik_sdma_enable(rdev, false);
561 radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX]);
562 radeon_ring_fini(rdev, &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]);
569 * @rdev: radeon_device pointer
579 struct radeon_fence *cik_copy_dma(struct radeon_device *rdev,
586 int ring_index = rdev->asic->copy.dma_ring_index;
587 struct radeon_ring *ring = &rdev->ring[ring_index];
596 r = radeon_ring_lock(rdev, ring, num_loops * 7 + 14);
599 radeon_sync_free(rdev, &sync, NULL);
603 radeon_sync_resv(rdev, &sync, resv, false);
604 radeon_sync_rings(rdev, &sync, ring->idx);
622 r = radeon_fence_emit(rdev, &fence, ring->idx);
624 radeon_ring_unlock_undo(rdev, ring);
625 radeon_sync_free(rdev, &sync, NULL);
629 radeon_ring_unlock_commit(rdev, ring, false);
630 radeon_sync_free(rdev, &sync, fence);
638 * @rdev: radeon_device pointer
645 int cik_sdma_ring_test(struct radeon_device *rdev,
659 gpu_addr = rdev->wb.gpu_addr + index;
662 rdev->wb.wb[index/4] = cpu_to_le32(tmp);
664 r = radeon_ring_lock(rdev, ring, 5);
674 radeon_ring_unlock_commit(rdev, ring, false);
676 for (i = 0; i < rdev->usec_timeout; i++) {
677 tmp = le32_to_cpu(rdev->wb.wb[index/4]);
683 if (i < rdev->usec_timeout) {
696 * @rdev: radeon_device pointer
702 int cik_sdma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
716 gpu_addr = rdev->wb.gpu_addr + index;
719 rdev->wb.wb[index/4] = cpu_to_le32(tmp);
721 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
734 r = radeon_ib_schedule(rdev, &ib, NULL, false);
736 radeon_ib_free(rdev, &ib);
750 for (i = 0; i < rdev->usec_timeout; i++) {
751 tmp = le32_to_cpu(rdev->wb.wb[index/4]);
756 if (i < rdev->usec_timeout) {
762 radeon_ib_free(rdev, &ib);
769 * @rdev: radeon_device pointer
775 bool cik_sdma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
777 u32 reset_mask = cik_gpu_check_soft_reset(rdev);
786 radeon_ring_lockup_update(rdev, ring);
789 return radeon_ring_test_lockup(rdev, ring);
795 * @rdev: radeon_device pointer
803 void cik_sdma_vm_copy_pages(struct radeon_device *rdev,
831 * @rdev: radeon_device pointer
841 void cik_sdma_vm_write_pages(struct radeon_device *rdev,
863 value = radeon_vm_map_gart(rdev, addr);
880 * @rdev: radeon_device pointer
890 void cik_sdma_vm_set_pages(struct radeon_device *rdev,
942 * @rdev: radeon_device pointer
947 void cik_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
987 cik_sdma_hdp_flush_ring_emit(rdev, ring->idx);