/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_cs.c | 726 struct drm_amdgpu_cs_chunk_ib *chunk_ib; in amdgpu_cs_vm_handling() local 736 chunk_ib = chunk->kdata; in amdgpu_cs_vm_handling() 741 va_start = chunk_ib->va_start & AMDGPU_GMC_HOLE_MASK; in amdgpu_cs_vm_handling() 748 if ((va_start + chunk_ib->ib_bytes) > in amdgpu_cs_vm_handling() 764 memcpy(ib->ptr, kptr, chunk_ib->ib_bytes); in amdgpu_cs_vm_handling() 871 struct drm_amdgpu_cs_chunk_ib *chunk_ib; in amdgpu_cs_ib_fill() local 876 chunk_ib = (struct drm_amdgpu_cs_chunk_ib *)chunk->kdata; in amdgpu_cs_ib_fill() 881 if (chunk_ib->ip_type == AMDGPU_HW_IP_GFX && in amdgpu_cs_ib_fill() 883 if (chunk_ib->flags & AMDGPU_IB_FLAG_PREEMPT) { in amdgpu_cs_ib_fill() 884 if (chunk_ib in amdgpu_cs_ib_fill() [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_cs.c | 74 struct drm_amdgpu_cs_chunk_ib *chunk_ib) in amdgpu_cs_job_idx() 80 r = amdgpu_ctx_get_entity(p->ctx, chunk_ib->ip_type, in amdgpu_cs_job_idx() 81 chunk_ib->ip_instance, in amdgpu_cs_job_idx() 82 chunk_ib->ring, &entity); in amdgpu_cs_job_idx() 108 struct drm_amdgpu_cs_chunk_ib *chunk_ib, in amdgpu_cs_p1_ib() 113 r = amdgpu_cs_job_idx(p, chunk_ib); in amdgpu_cs_p1_ib() 117 if (num_ibs[r] >= amdgpu_ring_max_ibs(chunk_ib->ip_type)) in amdgpu_cs_p1_ib() 331 struct drm_amdgpu_cs_chunk_ib *chunk_ib = chunk->kdata; in amdgpu_cs_p2_ib() local 339 r = amdgpu_cs_job_idx(p, chunk_ib); in amdgpu_cs_p2_ib() 351 if (chunk_ib in amdgpu_cs_p2_ib() 73 amdgpu_cs_job_idx(struct amdgpu_cs_parser *p, struct drm_amdgpu_cs_chunk_ib *chunk_ib) amdgpu_cs_job_idx() argument 107 amdgpu_cs_p1_ib(struct amdgpu_cs_parser *p, struct drm_amdgpu_cs_chunk_ib *chunk_ib, unsigned int *num_ibs) amdgpu_cs_p1_ib() argument [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/radeon/ |
H A D | radeon_cs.c | 289 p->chunk_ib = NULL; in radeon_cs_parser_init() 323 p->chunk_ib = &p->chunks[i]; in radeon_cs_parser_init() 466 if (parser->chunk_ib == NULL) in radeon_cs_ib_chunk() 550 if (parser->chunk_ib == NULL) in radeon_cs_ib_vm_chunk() 611 if (parser->chunk_ib == NULL) in radeon_cs_ib_fill() 639 ib_chunk = parser->chunk_ib; in radeon_cs_ib_fill() 645 ib_chunk = parser->chunk_ib; in radeon_cs_ib_fill() 737 struct radeon_cs_chunk *ib_chunk = p->chunk_ib; in radeon_cs_packet_parse()
|
H A D | radeon_uvd.c | 700 if (p->chunk_ib->length_dw % 16) { in radeon_uvd_cs_parse() 702 p->chunk_ib->length_dw); in radeon_uvd_cs_parse() 730 } while (p->idx < p->chunk_ib->length_dw); in radeon_uvd_cs_parse()
|
H A D | radeon_trace.h | 41 __entry->dw = p->chunk_ib->length_dw;
|
H A D | radeon_vce.c | 564 while (p->idx < p->chunk_ib->length_dw) { in radeon_vce_cs_parse()
|
H A D | r600_cs.c | 2317 } while (p->idx < p->chunk_ib->length_dw); in r600_cs_parse() 2376 struct radeon_cs_chunk *ib_chunk = p->chunk_ib; in r600_dma_cs_parse() 2523 } while (p->idx < p->chunk_ib->length_dw); in r600_dma_cs_parse()
|
H A D | evergreen_cs.c | 2776 } while (p->idx < p->chunk_ib->length_dw); in evergreen_cs_parse() 2799 struct radeon_cs_chunk *ib_chunk = p->chunk_ib; in evergreen_dma_cs_parse() 3215 } while (p->idx < p->chunk_ib->length_dw); in evergreen_dma_cs_parse()
|
H A D | r300.c | 1320 } while (p->idx < p->chunk_ib->length_dw); in r300_cs_parse()
|
H A D | radeon.h | 1079 struct radeon_cs_chunk *chunk_ib; member 1096 struct radeon_cs_chunk *ibc = p->chunk_ib; in radeon_get_ib_value()
|
H A D | r100.c | 2073 } while (p->idx < p->chunk_ib->length_dw); in r100_cs_parse()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/radeon/ |
H A D | radeon_cs.c | 288 p->chunk_ib = NULL; in radeon_cs_parser_init() 322 p->chunk_ib = &p->chunks[i]; in radeon_cs_parser_init() 470 if (parser->chunk_ib == NULL) in radeon_cs_ib_chunk() 558 if (parser->chunk_ib == NULL) in radeon_cs_ib_vm_chunk() 619 if (parser->chunk_ib == NULL) in radeon_cs_ib_fill() 647 ib_chunk = parser->chunk_ib; in radeon_cs_ib_fill() 653 ib_chunk = parser->chunk_ib; in radeon_cs_ib_fill() 746 struct radeon_cs_chunk *ib_chunk = p->chunk_ib; in radeon_cs_packet_parse()
|
H A D | radeon_uvd.c | 690 if (p->chunk_ib->length_dw % 16) { in radeon_uvd_cs_parse() 692 p->chunk_ib->length_dw); in radeon_uvd_cs_parse() 720 } while (p->idx < p->chunk_ib->length_dw); in radeon_uvd_cs_parse()
|
H A D | radeon_trace.h | 41 __entry->dw = p->chunk_ib->length_dw;
|
H A D | radeon_vce.c | 564 while (p->idx < p->chunk_ib->length_dw) { in radeon_vce_cs_parse()
|
H A D | r600_cs.c | 2319 } while (p->idx < p->chunk_ib->length_dw); in r600_cs_parse() 2378 struct radeon_cs_chunk *ib_chunk = p->chunk_ib; in r600_dma_cs_parse() 2525 } while (p->idx < p->chunk_ib->length_dw); in r600_dma_cs_parse()
|
H A D | evergreen_cs.c | 2775 } while (p->idx < p->chunk_ib->length_dw); in evergreen_cs_parse() 2798 struct radeon_cs_chunk *ib_chunk = p->chunk_ib; in evergreen_dma_cs_parse() 3214 } while (p->idx < p->chunk_ib->length_dw); in evergreen_dma_cs_parse()
|
H A D | r300.c | 1315 } while (p->idx < p->chunk_ib->length_dw); in r300_cs_parse()
|
H A D | radeon.h | 1035 struct radeon_cs_chunk *chunk_ib; member 1052 struct radeon_cs_chunk *ibc = p->chunk_ib; in radeon_get_ib_value()
|
H A D | r100.c | 2081 } while (p->idx < p->chunk_ib->length_dw); in r100_cs_parse()
|