Lines Matching defs:rdev

30 u32 cayman_gpu_check_soft_reset(struct radeon_device *rdev);
48 * @rdev: radeon_device pointer
53 uint32_t cayman_dma_get_rptr(struct radeon_device *rdev,
58 if (rdev->wb.enabled) {
59 rptr = rdev->wb.wb[ring->rptr_offs/4];
75 * @rdev: radeon_device pointer
80 uint32_t cayman_dma_get_wptr(struct radeon_device *rdev,
96 * @rdev: radeon_device pointer
101 void cayman_dma_set_wptr(struct radeon_device *rdev,
117 * @rdev: radeon_device pointer
122 void cayman_dma_ring_ib_execute(struct radeon_device *rdev,
125 struct radeon_ring *ring = &rdev->ring[ib->ring];
128 if (rdev->wb.enabled) {
153 * @rdev: radeon_device pointer
157 void cayman_dma_stop(struct radeon_device *rdev)
161 if ((rdev->asic->copy.copy_ring_index == R600_RING_TYPE_DMA_INDEX) ||
162 (rdev->asic->copy.copy_ring_index == CAYMAN_RING_TYPE_DMA1_INDEX))
163 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
175 rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false;
176 rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX].ready = false;
182 * @rdev: radeon_device pointer
187 int cayman_dma_resume(struct radeon_device *rdev)
197 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
201 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
223 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF);
225 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC));
227 if (rdev->wb.enabled)
250 r = radeon_ring_test(rdev, ring->idx, ring);
257 if ((rdev->asic->copy.copy_ring_index == R600_RING_TYPE_DMA_INDEX) ||
258 (rdev->asic->copy.copy_ring_index == CAYMAN_RING_TYPE_DMA1_INDEX))
259 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
267 * @rdev: radeon_device pointer
271 void cayman_dma_fini(struct radeon_device *rdev)
273 cayman_dma_stop(rdev);
274 radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX]);
275 radeon_ring_fini(rdev, &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]);
281 * @rdev: radeon_device pointer
287 bool cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
289 u32 reset_mask = cayman_gpu_check_soft_reset(rdev);
298 radeon_ring_lockup_update(rdev, ring);
301 return radeon_ring_test_lockup(rdev, ring);
307 * @rdev: radeon_device pointer
315 void cayman_dma_vm_copy_pages(struct radeon_device *rdev,
343 * @rdev: radeon_device pointer
353 void cayman_dma_vm_write_pages(struct radeon_device *rdev,
374 value = radeon_vm_map_gart(rdev, addr);
391 * @rdev: radeon_device pointer
401 void cayman_dma_vm_set_pages(struct radeon_device *rdev,
449 void cayman_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,