Lines Matching defs:rdev
47 * @rdev: radeon_device pointer
52 uint32_t cayman_dma_get_rptr(struct radeon_device *rdev,
57 if (rdev->wb.enabled) {
58 rptr = rdev->wb.wb[ring->rptr_offs/4];
74 * @rdev: radeon_device pointer
79 uint32_t cayman_dma_get_wptr(struct radeon_device *rdev,
95 * @rdev: radeon_device pointer
100 void cayman_dma_set_wptr(struct radeon_device *rdev,
116 * @rdev: radeon_device pointer
121 void cayman_dma_ring_ib_execute(struct radeon_device *rdev,
124 struct radeon_ring *ring = &rdev->ring[ib->ring];
127 if (rdev->wb.enabled) {
152 * @rdev: radeon_device pointer
156 void cayman_dma_stop(struct radeon_device *rdev)
160 if ((rdev->asic->copy.copy_ring_index == R600_RING_TYPE_DMA_INDEX) ||
161 (rdev->asic->copy.copy_ring_index == CAYMAN_RING_TYPE_DMA1_INDEX))
162 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
174 rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false;
175 rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX].ready = false;
181 * @rdev: radeon_device pointer
186 int cayman_dma_resume(struct radeon_device *rdev)
196 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
200 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
222 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF);
224 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC));
226 if (rdev->wb.enabled)
249 r = radeon_ring_test(rdev, ring->idx, ring);
256 if ((rdev->asic->copy.copy_ring_index == R600_RING_TYPE_DMA_INDEX) ||
257 (rdev->asic->copy.copy_ring_index == CAYMAN_RING_TYPE_DMA1_INDEX))
258 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
266 * @rdev: radeon_device pointer
270 void cayman_dma_fini(struct radeon_device *rdev)
272 cayman_dma_stop(rdev);
273 radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX]);
274 radeon_ring_fini(rdev, &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]);
280 * @rdev: radeon_device pointer
286 bool cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
288 u32 reset_mask = cayman_gpu_check_soft_reset(rdev);
297 radeon_ring_lockup_update(rdev, ring);
300 return radeon_ring_test_lockup(rdev, ring);
306 * @rdev: radeon_device pointer
314 void cayman_dma_vm_copy_pages(struct radeon_device *rdev,
342 * @rdev: radeon_device pointer
352 void cayman_dma_vm_write_pages(struct radeon_device *rdev,
373 value = radeon_vm_map_gart(rdev, addr);
390 * @rdev: radeon_device pointer
400 void cayman_dma_vm_set_pages(struct radeon_device *rdev,
448 void cayman_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,