Home
last modified time | relevance | path

Searched refs:ndw (Results 1 - 25 of 44) sorted by relevance

12

/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_vm_sdma.c66 unsigned int ndw = AMDGPU_VM_SDMA_MIN_NUM_DW; in amdgpu_vm_sdma_prepare() local
69 r = amdgpu_job_alloc_with_ib(p->adev, ndw * 4, pool, &p->job); in amdgpu_vm_sdma_prepare()
73 p->num_dw_left = ndw; in amdgpu_vm_sdma_prepare()
206 unsigned int i, ndw, nptes; in amdgpu_vm_sdma_update() local
216 ndw = p->num_dw_left; in amdgpu_vm_sdma_update()
217 ndw -= p->job->ibs->length_dw; in amdgpu_vm_sdma_update()
219 if (ndw < 32) { in amdgpu_vm_sdma_update()
225 ndw = 32; in amdgpu_vm_sdma_update()
227 ndw += count * 2; in amdgpu_vm_sdma_update()
228 ndw in amdgpu_vm_sdma_update()
[all...]
H A Dsi_dma.c343 unsigned ndw = count * 2; in si_dma_vm_write_pte() local
345 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pte()
348 for (; ndw > 0; ndw -= 2) { in si_dma_vm_write_pte()
373 unsigned ndw; in si_dma_vm_set_pte_pde() local
376 ndw = count * 2; in si_dma_vm_set_pte_pde()
377 if (ndw > 0xFFFFE) in si_dma_vm_set_pte_pde()
378 ndw = 0xFFFFE; in si_dma_vm_set_pte_pde()
386 ib->ptr[ib->length_dw++] = DMA_PTE_PDE_PACKET(ndw); in si_dma_vm_set_pte_pde()
395 pe += ndw * in si_dma_vm_set_pte_pde()
[all...]
H A Damdgpu_ring.c57 * @ndw: number of dwords to allocate in the ring buffer
59 * Allocate @ndw dwords in the ring buffer (all asics).
62 int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned ndw) in amdgpu_ring_alloc() argument
66 ndw = (ndw + ring->funcs->align_mask) & ~ring->funcs->align_mask; in amdgpu_ring_alloc()
71 if (WARN_ON_ONCE(ndw > ring->max_dw)) in amdgpu_ring_alloc()
74 ring->count_dw = ndw; in amdgpu_ring_alloc()
H A Dmes_v10_1.c88 int ndw = size / 4; in mes_v10_1_submit_pkt_and_poll_completion() local
96 if (amdgpu_ring_alloc(ring, ndw)) in mes_v10_1_submit_pkt_and_poll_completion()
99 amdgpu_ring_write_multiple(ring, pkt, ndw); in mes_v10_1_submit_pkt_and_poll_completion()
H A Dcik_sdma.c758 unsigned ndw = count * 2; in cik_sdma_vm_write_pte() local
764 ib->ptr[ib->length_dw++] = ndw; in cik_sdma_vm_write_pte()
765 for (; ndw > 0; ndw -= 2) { in cik_sdma_vm_write_pte()
H A Dsdma_v2_4.c697 unsigned ndw = count * 2; in sdma_v2_4_vm_write_pte() local
703 ib->ptr[ib->length_dw++] = ndw; in sdma_v2_4_vm_write_pte()
704 for (; ndw > 0; ndw -= 2) { in sdma_v2_4_vm_write_pte()
H A Dgmc_v9_0.c859 unsigned int ndw = kiq->pmf->invalidate_tlbs_size + 8; in gmc_v9_0_flush_gpu_tlb_pasid() local
862 ndw += kiq->pmf->invalidate_tlbs_size; in gmc_v9_0_flush_gpu_tlb_pasid()
866 amdgpu_ring_alloc(ring, ndw); in gmc_v9_0_flush_gpu_tlb_pasid()
/kernel/linux/linux-5.10/drivers/gpu/drm/radeon/
H A Dsi_dma.c112 unsigned ndw; in si_dma_vm_write_pages() local
115 ndw = count * 2; in si_dma_vm_write_pages()
116 if (ndw > 0xFFFFE) in si_dma_vm_write_pages()
117 ndw = 0xFFFFE; in si_dma_vm_write_pages()
120 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages()
123 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in si_dma_vm_write_pages()
159 unsigned ndw; in si_dma_vm_set_pages() local
162 ndw = count * 2; in si_dma_vm_set_pages()
163 if (ndw > in si_dma_vm_set_pages()
[all...]
H A Dni_dma.c320 unsigned ndw; in cayman_dma_vm_copy_pages() local
323 ndw = count * 2; in cayman_dma_vm_copy_pages()
324 if (ndw > 0xFFFFE) in cayman_dma_vm_copy_pages()
325 ndw = 0xFFFFE; in cayman_dma_vm_copy_pages()
328 0, 0, ndw); in cayman_dma_vm_copy_pages()
334 pe += ndw * 4; in cayman_dma_vm_copy_pages()
335 src += ndw * 4; in cayman_dma_vm_copy_pages()
336 count -= ndw / 2; in cayman_dma_vm_copy_pages()
360 unsigned ndw; in cayman_dma_vm_write_pages() local
363 ndw in cayman_dma_vm_write_pages()
408 unsigned ndw; cayman_dma_vm_set_pages() local
[all...]
H A Dcik_sdma.c848 unsigned ndw; in cik_sdma_vm_write_pages() local
851 ndw = count * 2; in cik_sdma_vm_write_pages()
852 if (ndw > 0xFFFFE) in cik_sdma_vm_write_pages()
853 ndw = 0xFFFFE; in cik_sdma_vm_write_pages()
860 ib->ptr[ib->length_dw++] = ndw; in cik_sdma_vm_write_pages()
861 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in cik_sdma_vm_write_pages()
897 unsigned ndw; in cik_sdma_vm_set_pages() local
900 ndw = count; in cik_sdma_vm_set_pages()
901 if (ndw > in cik_sdma_vm_set_pages()
[all...]
H A Dradeon_ring.c103 * @ndw: number of dwords to allocate in the ring buffer
105 * Allocate @ndw dwords in the ring buffer (all asics).
108 int radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) in radeon_ring_alloc() argument
113 if (ndw > (ring->ring_size / 4)) in radeon_ring_alloc()
118 ndw = (ndw + ring->align_mask) & ~ring->align_mask; in radeon_ring_alloc()
119 while (ndw > (ring->ring_free_dw - 1)) { in radeon_ring_alloc()
121 if (ndw < ring->ring_free_dw) { in radeon_ring_alloc()
128 ring->count_dw = ndw; in radeon_ring_alloc()
138 * @ndw
144 radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) radeon_ring_lock() argument
[all...]
H A Dradeon_vm.c648 unsigned count = 0, pt_idx, ndw; in radeon_vm_update_page_directory() local
653 ndw = 64; in radeon_vm_update_page_directory()
656 ndw += vm->max_pde_used * 6; in radeon_vm_update_page_directory()
659 if (ndw > 0xfffff) in radeon_vm_update_page_directory()
662 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, ndw * 4); in radeon_vm_update_page_directory()
706 WARN_ON(ib.length_dw > ndw); in radeon_vm_update_page_directory()
918 unsigned nptes, ncmds, ndw; in radeon_vm_bo_update() local
974 ndw = 64; in radeon_vm_bo_update()
979 ndw += ncmds * 7; in radeon_vm_bo_update()
983 ndw in radeon_vm_bo_update()
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/radeon/
H A Dsi_dma.c111 unsigned ndw; in si_dma_vm_write_pages() local
114 ndw = count * 2; in si_dma_vm_write_pages()
115 if (ndw > 0xFFFFE) in si_dma_vm_write_pages()
116 ndw = 0xFFFFE; in si_dma_vm_write_pages()
119 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages()
122 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in si_dma_vm_write_pages()
158 unsigned ndw; in si_dma_vm_set_pages() local
161 ndw = count * 2; in si_dma_vm_set_pages()
162 if (ndw > in si_dma_vm_set_pages()
[all...]
H A Dni_dma.c319 unsigned ndw; in cayman_dma_vm_copy_pages() local
322 ndw = count * 2; in cayman_dma_vm_copy_pages()
323 if (ndw > 0xFFFFE) in cayman_dma_vm_copy_pages()
324 ndw = 0xFFFFE; in cayman_dma_vm_copy_pages()
327 0, 0, ndw); in cayman_dma_vm_copy_pages()
333 pe += ndw * 4; in cayman_dma_vm_copy_pages()
334 src += ndw * 4; in cayman_dma_vm_copy_pages()
335 count -= ndw / 2; in cayman_dma_vm_copy_pages()
359 unsigned ndw; in cayman_dma_vm_write_pages() local
362 ndw in cayman_dma_vm_write_pages()
407 unsigned ndw; cayman_dma_vm_set_pages() local
[all...]
H A Dradeon_ring.c102 * @ndw: number of dwords to allocate in the ring buffer
104 * Allocate @ndw dwords in the ring buffer (all asics).
107 int radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) in radeon_ring_alloc() argument
112 if (ndw > (ring->ring_size / 4)) in radeon_ring_alloc()
117 ndw = (ndw + ring->align_mask) & ~ring->align_mask; in radeon_ring_alloc()
118 while (ndw > (ring->ring_free_dw - 1)) { in radeon_ring_alloc()
120 if (ndw < ring->ring_free_dw) { in radeon_ring_alloc()
127 ring->count_dw = ndw; in radeon_ring_alloc()
137 * @ndw
143 radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) radeon_ring_lock() argument
[all...]
H A Dcik_sdma.c847 unsigned ndw; in cik_sdma_vm_write_pages() local
850 ndw = count * 2; in cik_sdma_vm_write_pages()
851 if (ndw > 0xFFFFE) in cik_sdma_vm_write_pages()
852 ndw = 0xFFFFE; in cik_sdma_vm_write_pages()
859 ib->ptr[ib->length_dw++] = ndw; in cik_sdma_vm_write_pages()
860 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in cik_sdma_vm_write_pages()
896 unsigned ndw; in cik_sdma_vm_set_pages() local
899 ndw = count; in cik_sdma_vm_set_pages()
900 if (ndw > in cik_sdma_vm_set_pages()
[all...]
H A Dradeon_vm.c647 unsigned count = 0, pt_idx, ndw; in radeon_vm_update_page_directory() local
652 ndw = 64; in radeon_vm_update_page_directory()
655 ndw += vm->max_pde_used * 6; in radeon_vm_update_page_directory()
658 if (ndw > 0xfffff) in radeon_vm_update_page_directory()
661 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, ndw * 4); in radeon_vm_update_page_directory()
705 WARN_ON(ib.length_dw > ndw); in radeon_vm_update_page_directory()
917 unsigned nptes, ncmds, ndw; in radeon_vm_bo_update() local
973 ndw = 64; in radeon_vm_bo_update()
978 ndw += ncmds * 7; in radeon_vm_bo_update()
982 ndw in radeon_vm_bo_update()
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_vm_sdma.c58 unsigned int ndw; in amdgpu_vm_sdma_alloc_job() local
62 ndw = AMDGPU_VM_SDMA_MIN_NUM_DW; in amdgpu_vm_sdma_alloc_job()
64 ndw += count * 2; in amdgpu_vm_sdma_alloc_job()
65 ndw = min(ndw, AMDGPU_VM_SDMA_MAX_NUM_DW); in amdgpu_vm_sdma_alloc_job()
68 ndw * 4, pool, &p->job); in amdgpu_vm_sdma_alloc_job()
72 p->num_dw_left = ndw; in amdgpu_vm_sdma_alloc_job()
233 unsigned int i, ndw, nptes; in amdgpu_vm_sdma_update() local
252 ndw = p->num_dw_left; in amdgpu_vm_sdma_update()
253 ndw in amdgpu_vm_sdma_update()
[all...]
H A Dsi_dma.c340 unsigned ndw = count * 2; in si_dma_vm_write_pte() local
342 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pte()
345 for (; ndw > 0; ndw -= 2) { in si_dma_vm_write_pte()
370 unsigned ndw; in si_dma_vm_set_pte_pde() local
373 ndw = count * 2; in si_dma_vm_set_pte_pde()
374 if (ndw > 0xFFFFE) in si_dma_vm_set_pte_pde()
375 ndw = 0xFFFFE; in si_dma_vm_set_pte_pde()
383 ib->ptr[ib->length_dw++] = DMA_PTE_PDE_PACKET(ndw); in si_dma_vm_set_pte_pde()
392 pe += ndw * in si_dma_vm_set_pte_pde()
[all...]
H A Damdgpu_ring.c76 * @ndw: number of dwords to allocate in the ring buffer
78 * Allocate @ndw dwords in the ring buffer (all asics).
81 int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned int ndw) in amdgpu_ring_alloc() argument
85 ndw = (ndw + ring->funcs->align_mask) & ~ring->funcs->align_mask; in amdgpu_ring_alloc()
90 if (WARN_ON_ONCE(ndw > ring->max_dw)) in amdgpu_ring_alloc()
93 ring->count_dw = ndw; in amdgpu_ring_alloc()
H A Dcik_sdma.c751 unsigned ndw = count * 2; in cik_sdma_vm_write_pte() local
757 ib->ptr[ib->length_dw++] = ndw; in cik_sdma_vm_write_pte()
758 for (; ndw > 0; ndw -= 2) { in cik_sdma_vm_write_pte()
H A Dsdma_v3_0.c959 unsigned ndw = count * 2; in sdma_v3_0_vm_write_pte() local
965 ib->ptr[ib->length_dw++] = ndw; in sdma_v3_0_vm_write_pte()
966 for (; ndw > 0; ndw -= 2) { in sdma_v3_0_vm_write_pte()
H A Dsdma_v2_4.c688 unsigned ndw = count * 2; in sdma_v2_4_vm_write_pte() local
694 ib->ptr[ib->length_dw++] = ndw; in sdma_v2_4_vm_write_pte()
695 for (; ndw > 0; ndw -= 2) { in sdma_v2_4_vm_write_pte()
H A Dmes_v11_0.c101 int ndw = size / 4; in mes_v11_0_submit_pkt_and_poll_completion() local
119 if (amdgpu_ring_alloc(ring, ndw)) { in mes_v11_0_submit_pkt_and_poll_completion()
128 amdgpu_ring_write_multiple(ring, pkt, ndw); in mes_v11_0_submit_pkt_and_poll_completion()
/kernel/linux/linux-6.6/drivers/crypto/aspeed/
H A Daspeed-acry.c253 int nbits, ndw; in aspeed_acry_rsa_ctx_copy() local
273 ndw = DIV_ROUND_UP(nbytes, BYTES_PER_DWORD); in aspeed_acry_rsa_ctx_copy()
279 for (j = ndw; j > 0; j--) { in aspeed_acry_rsa_ctx_copy()

Completed in 26 milliseconds

12