Lines Matching refs:ib
117 * @ib: IB object to schedule
122 struct radeon_ib *ib)
124 struct radeon_ring *ring = &rdev->ring[ib->ring];
125 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0;
144 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
307 * @ib: indirect buffer to fill with commands
315 struct radeon_ib *ib,
326 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY,
328 ib->ptr[ib->length_dw++] = lower_32_bits(pe);
329 ib->ptr[ib->length_dw++] = lower_32_bits(src);
330 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff;
331 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff;
343 * @ib: indirect buffer to fill with commands
353 struct radeon_ib *ib,
367 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE,
369 ib->ptr[ib->length_dw++] = pe;
370 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff;
381 ib->ptr[ib->length_dw++] = value;
382 ib->ptr[ib->length_dw++] = upper_32_bits(value);
391 * @ib: indirect buffer to fill with commands
401 struct radeon_ib *ib,
420 ib->ptr[ib->length_dw++] = DMA_PTE_PDE_PACKET(ndw);
421 ib->ptr[ib->length_dw++] = pe; /* dst addr */
422 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff;
423 ib->ptr[ib->length_dw++] = flags; /* mask */
424 ib->ptr[ib->length_dw++] = 0;
425 ib->ptr[ib->length_dw++] = value; /* value */
426 ib->ptr[ib->length_dw++] = upper_32_bits(value);
427 ib->ptr[ib->length_dw++] = incr; /* increment size */
428 ib->ptr[ib->length_dw++] = 0;
439 * @ib: indirect buffer to fill with padding
442 void cayman_dma_vm_pad_ib(struct radeon_ib *ib)
444 while (ib->length_dw & 0x7)
445 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0);