Lines Matching defs:rdev
29 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev);
34 * @rdev: radeon_device pointer
41 void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
44 struct radeon_ring *ring = &rdev->ring[fence->ring];
45 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
62 * @rdev: radeon_device pointer
67 void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
70 struct radeon_ring *ring = &rdev->ring[ib->ring];
72 if (rdev->wb.enabled) {
97 * @rdev: radeon_device pointer
107 struct radeon_fence *evergreen_copy_dma(struct radeon_device *rdev,
115 int ring_index = rdev->asic->copy.dma_ring_index;
116 struct radeon_ring *ring = &rdev->ring[ring_index];
125 r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11);
128 radeon_sync_free(rdev, &sync, NULL);
132 radeon_sync_resv(rdev, &sync, resv, false);
133 radeon_sync_rings(rdev, &sync, ring->idx);
149 r = radeon_fence_emit(rdev, &fence, ring->idx);
151 radeon_ring_unlock_undo(rdev, ring);
152 radeon_sync_free(rdev, &sync, NULL);
156 radeon_ring_unlock_commit(rdev, ring, false);
157 radeon_sync_free(rdev, &sync, fence);
165 * @rdev: radeon_device pointer
171 bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
173 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
176 radeon_ring_lockup_update(rdev, ring);
179 return radeon_ring_test_lockup(rdev, ring);