/third_party/mesa3d/src/freedreno/drm/ |
H A D | freedreno_ringbuffer_sp.c | 53 static void finalize_current_cmd(struct fd_ringbuffer *ring); 142 * increment the refcnt of the current ring in fd_submit_sp_new_ringbuffer() 446 finalize_current_cmd(struct fd_ringbuffer *ring) in finalize_current_cmd() argument 448 assert(!(ring->flags & _FD_RINGBUFFER_OBJECT)); in finalize_current_cmd() 450 struct fd_ringbuffer_sp *fd_ring = to_fd_ringbuffer_sp(ring); in finalize_current_cmd() 454 .size = offset_bytes(ring->cur, ring->start), in finalize_current_cmd() 459 fd_ringbuffer_sp_grow(struct fd_ringbuffer *ring, uint32_t size) in fd_ringbuffer_sp_grow() argument 461 struct fd_ringbuffer_sp *fd_ring = to_fd_ringbuffer_sp(ring); in fd_ringbuffer_sp_grow() 464 assert(ring in fd_ringbuffer_sp_grow() 478 fd_ringbuffer_references_bo(struct fd_ringbuffer *ring, struct fd_bo *bo) fd_ringbuffer_references_bo() argument 497 fd_ringbuffer_sp_cmd_count(struct fd_ringbuffer *ring) fd_ringbuffer_sp_cmd_count() argument 505 fd_ringbuffer_sp_check_size(struct fd_ringbuffer *ring) fd_ringbuffer_sp_check_size() argument 519 fd_ringbuffer_sp_destroy(struct fd_ringbuffer *ring) fd_ringbuffer_sp_destroy() argument 582 struct fd_ringbuffer *ring = &fd_ring->base; fd_ringbuffer_sp_init() local [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/ |
H A D | sdma_v4_0.c | 600 * @ring: amdgpu ring pointer 604 static uint64_t sdma_v4_0_ring_get_rptr(struct amdgpu_ring *ring) in sdma_v4_0_ring_get_rptr() argument 609 rptr = ((u64 *)ring->rptr_cpu_addr); in sdma_v4_0_ring_get_rptr() 618 * @ring: amdgpu ring pointer 622 static uint64_t sdma_v4_0_ring_get_wptr(struct amdgpu_ring *ring) in sdma_v4_0_ring_get_wptr() argument 624 struct amdgpu_device *adev = ring->adev; in sdma_v4_0_ring_get_wptr() 627 if (ring->use_doorbell) { in sdma_v4_0_ring_get_wptr() 629 wptr = READ_ONCE(*((u64 *)ring in sdma_v4_0_ring_get_wptr() 649 sdma_v4_0_ring_set_wptr(struct amdgpu_ring *ring) sdma_v4_0_ring_set_wptr() argument 691 sdma_v4_0_page_ring_get_wptr(struct amdgpu_ring *ring) sdma_v4_0_page_ring_get_wptr() argument 715 sdma_v4_0_page_ring_set_wptr(struct amdgpu_ring *ring) sdma_v4_0_page_ring_set_wptr() argument 735 sdma_v4_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) sdma_v4_0_ring_insert_nop() argument 758 sdma_v4_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) sdma_v4_0_ring_emit_ib() argument 779 sdma_v4_0_wait_reg_mem(struct amdgpu_ring *ring, int mem_space, int hdp, uint32_t addr0, uint32_t addr1, uint32_t ref, uint32_t mask, uint32_t inv) sdma_v4_0_wait_reg_mem() argument 811 sdma_v4_0_ring_emit_hdp_flush(struct amdgpu_ring *ring) sdma_v4_0_ring_emit_hdp_flush() argument 837 sdma_v4_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) sdma_v4_0_ring_emit_fence() argument 1020 sdma_v4_0_rb_cntl(struct amdgpu_ring *ring, uint32_t rb_cntl) sdma_v4_0_rb_cntl() argument 1045 struct amdgpu_ring *ring = &adev->sdma.instance[i].ring; sdma_v4_0_gfx_resume() local 1130 struct amdgpu_ring *ring = &adev->sdma.instance[i].page; sdma_v4_0_page_resume() local 1338 struct amdgpu_ring *ring; sdma_v4_0_start() local 1423 sdma_v4_0_ring_test_ring(struct amdgpu_ring *ring) sdma_v4_0_ring_test_ring() argument 1476 sdma_v4_0_ring_test_ib(struct amdgpu_ring *ring, long timeout) sdma_v4_0_ring_test_ib() argument 1628 sdma_v4_0_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib) sdma_v4_0_ring_pad_ib() argument 1653 sdma_v4_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring) sdma_v4_0_ring_emit_pipeline_sync() argument 1676 sdma_v4_0_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) sdma_v4_0_ring_emit_vm_flush() argument 1682 sdma_v4_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) sdma_v4_0_ring_emit_wreg() argument 1691 sdma_v4_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) sdma_v4_0_ring_emit_reg_wait() argument 1762 struct amdgpu_ring *ring; sdma_v4_0_sw_init() local [all...] |
H A D | amdgpu_jpeg.c | 97 void amdgpu_jpeg_ring_begin_use(struct amdgpu_ring *ring) in amdgpu_jpeg_ring_begin_use() argument 99 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_ring_begin_use() 110 void amdgpu_jpeg_ring_end_use(struct amdgpu_ring *ring) in amdgpu_jpeg_ring_end_use() argument 112 atomic_dec(&ring->adev->jpeg.total_submission_cnt); in amdgpu_jpeg_ring_end_use() 113 schedule_delayed_work(&ring->adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT); in amdgpu_jpeg_ring_end_use() 116 int amdgpu_jpeg_dec_ring_test_ring(struct amdgpu_ring *ring) in amdgpu_jpeg_dec_ring_test_ring() argument 118 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_dec_ring_test_ring() 127 r = amdgpu_ring_alloc(ring, 3); in amdgpu_jpeg_dec_ring_test_ring() 131 WREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring in amdgpu_jpeg_dec_ring_test_ring() 152 amdgpu_jpeg_dec_set_reg(struct amdgpu_ring *ring, uint32_t handle, struct dma_fence **fence) amdgpu_jpeg_dec_set_reg() argument 192 amdgpu_jpeg_dec_ring_test_ib(struct amdgpu_ring *ring, long timeout) amdgpu_jpeg_dec_ring_test_ib() argument [all...] |
H A D | gfx_v9_0.c | 758 static void gfx_v9_0_ring_emit_de_meta(struct amdgpu_ring *ring, bool resume, bool usegds); 759 static u64 gfx_v9_0_ring_get_rptr_compute(struct amdgpu_ring *ring); 787 struct amdgpu_ring *ring) in gfx_v9_0_kiq_map_queues() 789 uint64_t mqd_addr = amdgpu_bo_gpu_offset(ring->mqd_obj); in gfx_v9_0_kiq_map_queues() 790 uint64_t wptr_addr = ring->wptr_gpu_addr; in gfx_v9_0_kiq_map_queues() 791 uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0; in gfx_v9_0_kiq_map_queues() 798 PACKET3_MAP_QUEUES_QUEUE(ring->queue) | in gfx_v9_0_kiq_map_queues() 799 PACKET3_MAP_QUEUES_PIPE(ring->pipe) | in gfx_v9_0_kiq_map_queues() 800 PACKET3_MAP_QUEUES_ME((ring->me == 1 ? 0 : 1)) | in gfx_v9_0_kiq_map_queues() 809 PACKET3_MAP_QUEUES_DOORBELL_OFFSET(ring in gfx_v9_0_kiq_map_queues() 786 gfx_v9_0_kiq_map_queues(struct amdgpu_ring *kiq_ring, struct amdgpu_ring *ring) gfx_v9_0_kiq_map_queues() argument 816 gfx_v9_0_kiq_unmap_queues(struct amdgpu_ring *kiq_ring, struct amdgpu_ring *ring, enum amdgpu_unmap_queues_action action, u64 gpu_addr, u64 seq) gfx_v9_0_kiq_unmap_queues() argument 844 gfx_v9_0_kiq_query_status(struct amdgpu_ring *kiq_ring, struct amdgpu_ring *ring, u64 addr, u64 seq) gfx_v9_0_kiq_query_status() argument 960 gfx_v9_0_write_data_to_reg(struct amdgpu_ring *ring, int eng_sel, bool wc, uint32_t reg, uint32_t val) gfx_v9_0_write_data_to_reg() argument 972 gfx_v9_0_wait_reg_mem(struct amdgpu_ring *ring, int eng_sel, int mem_space, int opt, uint32_t addr0, uint32_t addr1, uint32_t ref, uint32_t mask, uint32_t inv) gfx_v9_0_wait_reg_mem() argument 994 gfx_v9_0_ring_test_ring(struct amdgpu_ring *ring) gfx_v9_0_ring_test_ring() argument 1024 gfx_v9_0_ring_test_ib(struct amdgpu_ring *ring, long timeout) gfx_v9_0_ring_test_ib() argument 1969 struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; gfx_v9_0_compute_ring_init() local 2000 struct amdgpu_ring *ring; gfx_v9_0_sw_init() local 3025 struct amdgpu_ring *ring = &adev->gfx.gfx_ring[0]; gfx_v9_0_cp_gfx_start() local 3095 struct amdgpu_ring *ring; gfx_v9_0_cp_gfx_resume() local 3217 gfx_v9_0_kiq_setting(struct amdgpu_ring *ring) gfx_v9_0_kiq_setting() argument 3231 gfx_v9_0_mqd_set_priority(struct amdgpu_ring *ring, struct v9_mqd *mqd) gfx_v9_0_mqd_set_priority() argument 3244 gfx_v9_0_mqd_init(struct amdgpu_ring *ring) gfx_v9_0_mqd_init() argument 3376 gfx_v9_0_kiq_init_register(struct amdgpu_ring *ring) gfx_v9_0_kiq_init_register() argument 3490 gfx_v9_0_kiq_fini_register(struct amdgpu_ring *ring) gfx_v9_0_kiq_fini_register() argument 3529 gfx_v9_0_kiq_init_queue(struct amdgpu_ring *ring) gfx_v9_0_kiq_init_queue() argument 3577 gfx_v9_0_kcq_init_queue(struct amdgpu_ring *ring) gfx_v9_0_kcq_init_queue() argument 3617 struct amdgpu_ring *ring; gfx_v9_0_kiq_resume() local 3641 struct amdgpu_ring *ring = NULL; gfx_v9_0_kcq_resume() local 3671 struct amdgpu_ring *ring; gfx_v9_0_cp_resume() local 3926 struct amdgpu_ring *ring = &kiq->ring; gfx_v9_0_kiq_read_clock() local 4030 gfx_v9_0_ring_emit_gds_switch(struct amdgpu_ring *ring, uint32_t vmid, uint32_t gds_base, uint32_t gds_size, uint32_t gws_base, uint32_t gws_size, uint32_t oa_base, uint32_t oa_size) gfx_v9_0_ring_emit_gds_switch() argument 4297 struct amdgpu_ring *ring = &adev->gfx.compute_ring[0]; gfx_v9_0_do_edc_gds_workarounds() local 4344 struct amdgpu_ring *ring = &adev->gfx.compute_ring[0]; gfx_v9_0_do_edc_gpr_workarounds() local 5071 gfx_v9_0_ring_get_rptr_gfx(struct amdgpu_ring *ring) gfx_v9_0_ring_get_rptr_gfx() argument 5076 gfx_v9_0_ring_get_wptr_gfx(struct amdgpu_ring *ring) gfx_v9_0_ring_get_wptr_gfx() argument 5092 gfx_v9_0_ring_set_wptr_gfx(struct amdgpu_ring *ring) gfx_v9_0_ring_set_wptr_gfx() argument 5106 gfx_v9_0_ring_emit_hdp_flush(struct amdgpu_ring *ring) gfx_v9_0_ring_emit_hdp_flush() argument 5135 gfx_v9_0_ring_emit_ib_gfx(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) gfx_v9_0_ring_emit_ib_gfx() argument 5176 gfx_v9_0_ring_patch_cntl(struct amdgpu_ring *ring, unsigned offset) gfx_v9_0_ring_patch_cntl() argument 5185 gfx_v9_0_ring_patch_ce_meta(struct amdgpu_ring *ring, unsigned offset) gfx_v9_0_ring_patch_ce_meta() argument 5217 gfx_v9_0_ring_patch_de_meta(struct amdgpu_ring *ring, unsigned offset) gfx_v9_0_ring_patch_de_meta() argument 5252 gfx_v9_0_ring_emit_ib_compute(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) gfx_v9_0_ring_emit_ib_compute() argument 5287 gfx_v9_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) gfx_v9_0_ring_emit_fence() argument 5328 gfx_v9_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring) gfx_v9_0_ring_emit_pipeline_sync() argument 5339 gfx_v9_0_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) gfx_v9_0_ring_emit_vm_flush() argument 5352 gfx_v9_0_ring_get_rptr_compute(struct amdgpu_ring *ring) gfx_v9_0_ring_get_rptr_compute() argument 5357 gfx_v9_0_ring_get_wptr_compute(struct amdgpu_ring *ring) gfx_v9_0_ring_get_wptr_compute() argument 5369 gfx_v9_0_ring_set_wptr_compute(struct amdgpu_ring *ring) gfx_v9_0_ring_set_wptr_compute() argument 5382 gfx_v9_0_ring_emit_fence_kiq(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned int flags) gfx_v9_0_ring_emit_fence_kiq() argument 5409 gfx_v9_ring_emit_sb(struct amdgpu_ring *ring) gfx_v9_ring_emit_sb() argument 5415 gfx_v9_0_ring_emit_ce_meta(struct amdgpu_ring *ring, bool resume) gfx_v9_0_ring_emit_ce_meta() argument 5457 gfx_v9_0_ring_preempt_ib(struct amdgpu_ring *ring) gfx_v9_0_ring_preempt_ib() argument 5515 gfx_v9_0_ring_emit_de_meta(struct amdgpu_ring *ring, bool resume, bool usegds) gfx_v9_0_ring_emit_de_meta() argument 5569 gfx_v9_0_ring_emit_frame_cntl(struct amdgpu_ring *ring, bool start, bool secure) gfx_v9_0_ring_emit_frame_cntl() argument 5578 gfx_v9_ring_emit_cntxcntl(struct amdgpu_ring *ring, uint32_t flags) gfx_v9_ring_emit_cntxcntl() argument 5611 gfx_v9_0_ring_emit_init_cond_exec(struct amdgpu_ring *ring) gfx_v9_0_ring_emit_init_cond_exec() argument 5623 gfx_v9_0_ring_emit_patch_cond_exec(struct amdgpu_ring *ring, unsigned offset) gfx_v9_0_ring_emit_patch_cond_exec() argument 5636 gfx_v9_0_ring_emit_rreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t reg_val_offs) gfx_v9_0_ring_emit_rreg() argument 5653 gfx_v9_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) gfx_v9_0_ring_emit_wreg() argument 5676 gfx_v9_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) gfx_v9_0_ring_emit_reg_wait() argument 5682 gfx_v9_0_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring, uint32_t reg0, uint32_t reg1, uint32_t ref, uint32_t mask) gfx_v9_0_ring_emit_reg_write_reg_wait() argument 5699 gfx_v9_0_ring_soft_recovery(struct amdgpu_ring *ring, unsigned vmid) gfx_v9_0_ring_soft_recovery() argument 5901 struct amdgpu_ring *ring; gfx_v9_0_eop_irq() local 5936 struct amdgpu_ring *ring; gfx_v9_0_fault() local 6770 gfx_v9_0_emit_mem_sync(struct amdgpu_ring *ring) gfx_v9_0_emit_mem_sync() argument 6789 gfx_v9_0_emit_wave_limit_cs(struct amdgpu_ring *ring, uint32_t pipe, bool enable) gfx_v9_0_emit_wave_limit_cs() argument 6820 gfx_v9_0_emit_wave_limit(struct amdgpu_ring *ring, bool enable) gfx_v9_0_emit_wave_limit() argument [all...] |
H A D | jpeg_v2_5.c | 55 * Set ring and irq function pointers 90 struct amdgpu_ring *ring; in jpeg_v2_5_sw_init() local 129 ring = adev->jpeg.inst[i].ring_dec; in jpeg_v2_5_sw_init() 130 ring->use_doorbell = true; in jpeg_v2_5_sw_init() 132 ring->vm_hub = AMDGPU_MMHUB1(0); in jpeg_v2_5_sw_init() 134 ring->vm_hub = AMDGPU_MMHUB0(0); in jpeg_v2_5_sw_init() 135 ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1 + 8 * i; in jpeg_v2_5_sw_init() 136 sprintf(ring->name, "jpeg_dec_%d", i); in jpeg_v2_5_sw_init() 137 r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst[i].irq, in jpeg_v2_5_sw_init() 183 struct amdgpu_ring *ring; in jpeg_v2_5_hw_init() local 326 struct amdgpu_ring *ring; jpeg_v2_5_start() local 410 jpeg_v2_5_dec_ring_get_rptr(struct amdgpu_ring *ring) jpeg_v2_5_dec_ring_get_rptr() argument 424 jpeg_v2_5_dec_ring_get_wptr(struct amdgpu_ring *ring) jpeg_v2_5_dec_ring_get_wptr() argument 441 jpeg_v2_5_dec_ring_set_wptr(struct amdgpu_ring *ring) jpeg_v2_5_dec_ring_set_wptr() argument 460 jpeg_v2_6_dec_ring_insert_start(struct amdgpu_ring *ring) jpeg_v2_6_dec_ring_insert_start() argument 478 jpeg_v2_6_dec_ring_insert_end(struct amdgpu_ring *ring) jpeg_v2_6_dec_ring_insert_end() argument [all...] |
H A D | vcn_v2_5.c | 78 * Set ring and irq function pointers 123 struct amdgpu_ring *ring; in vcn_v2_5_sw_init() local 184 ring = &adev->vcn.inst[j].ring_dec; in vcn_v2_5_sw_init() 185 ring->use_doorbell = true; in vcn_v2_5_sw_init() 187 ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + in vcn_v2_5_sw_init() 191 ring->vm_hub = AMDGPU_MMHUB1(0); in vcn_v2_5_sw_init() 193 ring->vm_hub = AMDGPU_MMHUB0(0); in vcn_v2_5_sw_init() 195 sprintf(ring->name, "vcn_dec_%d", j); in vcn_v2_5_sw_init() 196 r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[j].irq, in vcn_v2_5_sw_init() 204 ring in vcn_v2_5_sw_init() 292 struct amdgpu_ring *ring; vcn_v2_5_hw_init() local 822 struct amdgpu_ring *ring; vcn_v2_5_start_dpg_mode() local 965 struct amdgpu_ring *ring; vcn_v2_5_start() local 1214 struct amdgpu_ring *ring; vcn_v2_5_sriov_start() local 1451 struct amdgpu_ring *ring; vcn_v2_5_pause_dpg_mode() local 1530 vcn_v2_5_dec_ring_get_rptr(struct amdgpu_ring *ring) vcn_v2_5_dec_ring_get_rptr() argument 1544 vcn_v2_5_dec_ring_get_wptr(struct amdgpu_ring *ring) vcn_v2_5_dec_ring_get_wptr() argument 1561 vcn_v2_5_dec_ring_set_wptr(struct amdgpu_ring *ring) vcn_v2_5_dec_ring_set_wptr() argument 1610 vcn_v2_5_enc_ring_get_rptr(struct amdgpu_ring *ring) vcn_v2_5_enc_ring_get_rptr() argument 1627 vcn_v2_5_enc_ring_get_wptr(struct amdgpu_ring *ring) vcn_v2_5_enc_ring_get_wptr() argument 1651 vcn_v2_5_enc_ring_set_wptr(struct amdgpu_ring *ring) vcn_v2_5_enc_ring_set_wptr() argument [all...] |
H A D | amdgpu_ih.c | 33 * @ih: ih ring to initialize 34 * @ring_size: ring size to allocate 38 * for the IH ring buffer. 47 /* Align ring size */ in amdgpu_ih_ring_init() 58 if (ih->ring) in amdgpu_ih_ring_init() 62 * add them to the end of the ring allocation. in amdgpu_ih_ring_init() 64 ih->ring = dma_alloc_coherent(adev->dev, ih->ring_size + 8, in amdgpu_ih_ring_init() 66 if (ih->ring == NULL) in amdgpu_ih_ring_init() 71 ih->wptr_cpu = &ih->ring[ih->ring_size / 4]; in amdgpu_ih_ring_init() 73 ih->rptr_cpu = &ih->ring[(i in amdgpu_ih_ring_init() [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/radeon/ |
H A D | radeon_ib.c | 39 * command ring and the hw will fetch the commands from the IB 42 * put in IBs for execution by the requested ring. 50 * @ring: ring index the IB is associated with 58 int radeon_ib_get(struct radeon_device *rdev, int ring, in radeon_ib_get() argument 72 ib->ring = ring; in radeon_ib_get() 105 * radeon_ib_schedule - schedule an IB (Indirect Buffer) on the ring 112 * Schedule an IB on the associated ring (all asics). 115 * On SI, there are two parallel engines fed from the primary ring, 128 struct radeon_ring *ring = &rdev->ring[ib->ring]; radeon_ib_schedule() local 266 struct radeon_ring *ring = &rdev->ring[i]; radeon_ib_ring_tests() local [all...] |
/third_party/mesa3d/src/gallium/drivers/freedreno/a3xx/ |
H A D | fd3_query.c | 47 occlusion_get_sample(struct fd_batch *batch, struct fd_ringbuffer *ring) in occlusion_get_sample() argument 55 OUT_PKT3(ring, CP_SET_CONSTANT, 3); in occlusion_get_sample() 56 OUT_RING(ring, CP_REG(REG_A3XX_RB_SAMPLE_COUNT_ADDR) | 0x80000000); in occlusion_get_sample() 57 OUT_RING(ring, HW_QUERY_BASE_REG); in occlusion_get_sample() 58 OUT_RING(ring, samp->offset); in occlusion_get_sample() 60 OUT_PKT0(ring, REG_A3XX_RB_SAMPLE_COUNT_CONTROL, 1); in occlusion_get_sample() 61 OUT_RING(ring, A3XX_RB_SAMPLE_COUNT_CONTROL_COPY); in occlusion_get_sample() 63 OUT_PKT3(ring, CP_DRAW_INDX, 3); in occlusion_get_sample() 64 OUT_RING(ring, 0x00000000); in occlusion_get_sample() 65 OUT_RING(ring, DRA in occlusion_get_sample() [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
H A D | sdma_v4_0.c | 688 * @ring: amdgpu ring pointer 692 static uint64_t sdma_v4_0_ring_get_rptr(struct amdgpu_ring *ring) in sdma_v4_0_ring_get_rptr() argument 697 rptr = ((u64 *)&ring->adev->wb.wb[ring->rptr_offs]); in sdma_v4_0_ring_get_rptr() 706 * @ring: amdgpu ring pointer 710 static uint64_t sdma_v4_0_ring_get_wptr(struct amdgpu_ring *ring) in sdma_v4_0_ring_get_wptr() argument 712 struct amdgpu_device *adev = ring->adev; in sdma_v4_0_ring_get_wptr() 715 if (ring in sdma_v4_0_ring_get_wptr() 737 sdma_v4_0_ring_set_wptr(struct amdgpu_ring *ring) sdma_v4_0_ring_set_wptr() argument 779 sdma_v4_0_page_ring_get_wptr(struct amdgpu_ring *ring) sdma_v4_0_page_ring_get_wptr() argument 803 sdma_v4_0_page_ring_set_wptr(struct amdgpu_ring *ring) sdma_v4_0_page_ring_set_wptr() argument 823 sdma_v4_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) sdma_v4_0_ring_insert_nop() argument 844 sdma_v4_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) sdma_v4_0_ring_emit_ib() argument 865 sdma_v4_0_wait_reg_mem(struct amdgpu_ring *ring, int mem_space, int hdp, uint32_t addr0, uint32_t addr1, uint32_t ref, uint32_t mask, uint32_t inv) sdma_v4_0_wait_reg_mem() argument 897 sdma_v4_0_ring_emit_hdp_flush(struct amdgpu_ring *ring) sdma_v4_0_ring_emit_hdp_flush() argument 921 sdma_v4_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) sdma_v4_0_ring_emit_fence() argument 1116 sdma_v4_0_rb_cntl(struct amdgpu_ring *ring, uint32_t rb_cntl) sdma_v4_0_rb_cntl() argument 1141 struct amdgpu_ring *ring = &adev->sdma.instance[i].ring; sdma_v4_0_gfx_resume() local 1231 struct amdgpu_ring *ring = &adev->sdma.instance[i].page; sdma_v4_0_page_resume() local 1443 struct amdgpu_ring *ring; sdma_v4_0_start() local 1528 sdma_v4_0_ring_test_ring(struct amdgpu_ring *ring) sdma_v4_0_ring_test_ring() argument 1580 sdma_v4_0_ring_test_ib(struct amdgpu_ring *ring, long timeout) sdma_v4_0_ring_test_ib() argument 1733 sdma_v4_0_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib) sdma_v4_0_ring_pad_ib() argument 1758 sdma_v4_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring) sdma_v4_0_ring_emit_pipeline_sync() argument 1780 sdma_v4_0_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) sdma_v4_0_ring_emit_vm_flush() argument 1786 sdma_v4_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) sdma_v4_0_ring_emit_wreg() argument 1795 sdma_v4_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) sdma_v4_0_ring_emit_reg_wait() argument 1875 struct amdgpu_ring *ring; sdma_v4_0_sw_init() local [all...] |
H A D | sdma_v3_0.c | 186 * and each one supports 1 ring buffer used for gfx 190 * (ring buffer, IBs, etc.), but sDMA has it's own 346 * @ring: amdgpu ring pointer 350 static uint64_t sdma_v3_0_ring_get_rptr(struct amdgpu_ring *ring) in sdma_v3_0_ring_get_rptr() argument 353 return ring->adev->wb.wb[ring->rptr_offs] >> 2; in sdma_v3_0_ring_get_rptr() 359 * @ring: amdgpu ring pointer 363 static uint64_t sdma_v3_0_ring_get_wptr(struct amdgpu_ring *ring) in sdma_v3_0_ring_get_wptr() argument 385 sdma_v3_0_ring_set_wptr(struct amdgpu_ring *ring) sdma_v3_0_ring_set_wptr() argument 403 sdma_v3_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) sdma_v3_0_ring_insert_nop() argument 424 sdma_v3_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) sdma_v3_0_ring_emit_ib() argument 452 sdma_v3_0_ring_emit_hdp_flush(struct amdgpu_ring *ring) sdma_v3_0_ring_emit_hdp_flush() argument 482 sdma_v3_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) sdma_v3_0_ring_emit_fence() argument 645 struct amdgpu_ring *ring; sdma_v3_0_gfx_resume() local 817 sdma_v3_0_ring_test_ring(struct amdgpu_ring *ring) sdma_v3_0_ring_test_ring() argument 869 sdma_v3_0_ring_test_ib(struct amdgpu_ring *ring, long timeout) sdma_v3_0_ring_test_ib() argument 1017 sdma_v3_0_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib) sdma_v3_0_ring_pad_ib() argument 1041 sdma_v3_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring) sdma_v3_0_ring_emit_pipeline_sync() argument 1068 sdma_v3_0_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) sdma_v3_0_ring_emit_vm_flush() argument 1085 sdma_v3_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) sdma_v3_0_ring_emit_wreg() argument 1117 struct amdgpu_ring *ring; sdma_v3_0_sw_init() local [all...] |
H A D | amdgpu_jpeg.c | 97 void amdgpu_jpeg_ring_begin_use(struct amdgpu_ring *ring) in amdgpu_jpeg_ring_begin_use() argument 99 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_ring_begin_use() 110 void amdgpu_jpeg_ring_end_use(struct amdgpu_ring *ring) in amdgpu_jpeg_ring_end_use() argument 112 atomic_dec(&ring->adev->jpeg.total_submission_cnt); in amdgpu_jpeg_ring_end_use() 113 schedule_delayed_work(&ring->adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT); in amdgpu_jpeg_ring_end_use() 116 int amdgpu_jpeg_dec_ring_test_ring(struct amdgpu_ring *ring) in amdgpu_jpeg_dec_ring_test_ring() argument 118 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_dec_ring_test_ring() 123 WREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch, 0xCAFEDEAD); in amdgpu_jpeg_dec_ring_test_ring() 124 r = amdgpu_ring_alloc(ring, 3); in amdgpu_jpeg_dec_ring_test_ring() 128 amdgpu_ring_write(ring, PACKET in amdgpu_jpeg_dec_ring_test_ring() 145 amdgpu_jpeg_dec_set_reg(struct amdgpu_ring *ring, uint32_t handle, struct dma_fence **fence) amdgpu_jpeg_dec_set_reg() argument 185 amdgpu_jpeg_dec_ring_test_ib(struct amdgpu_ring *ring, long timeout) amdgpu_jpeg_dec_ring_test_ib() argument [all...] |
H A D | vcn_v2_5.c | 73 * Set ring and irq function pointers 116 struct amdgpu_ring *ring; in vcn_v2_5_sw_init() local 186 ring = &adev->vcn.inst[j].ring_dec; in vcn_v2_5_sw_init() 187 ring->use_doorbell = true; in vcn_v2_5_sw_init() 189 ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + in vcn_v2_5_sw_init() 191 sprintf(ring->name, "vcn_dec_%d", j); in vcn_v2_5_sw_init() 192 r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[j].irq, in vcn_v2_5_sw_init() 198 ring = &adev->vcn.inst[j].ring_enc[i]; in vcn_v2_5_sw_init() 199 ring->use_doorbell = true; in vcn_v2_5_sw_init() 201 ring in vcn_v2_5_sw_init() 274 struct amdgpu_ring *ring; vcn_v2_5_hw_init() local 774 struct amdgpu_ring *ring; vcn_v2_5_start_dpg_mode() local 917 struct amdgpu_ring *ring; vcn_v2_5_start() local 1166 struct amdgpu_ring *ring; vcn_v2_5_sriov_start() local 1403 struct amdgpu_ring *ring; vcn_v2_5_pause_dpg_mode() local 1482 vcn_v2_5_dec_ring_get_rptr(struct amdgpu_ring *ring) vcn_v2_5_dec_ring_get_rptr() argument 1496 vcn_v2_5_dec_ring_get_wptr(struct amdgpu_ring *ring) vcn_v2_5_dec_ring_get_wptr() argument 1513 vcn_v2_5_dec_ring_set_wptr(struct amdgpu_ring *ring) vcn_v2_5_dec_ring_set_wptr() argument 1562 vcn_v2_5_enc_ring_get_rptr(struct amdgpu_ring *ring) vcn_v2_5_enc_ring_get_rptr() argument 1579 vcn_v2_5_enc_ring_get_wptr(struct amdgpu_ring *ring) vcn_v2_5_enc_ring_get_wptr() argument 1603 vcn_v2_5_enc_ring_set_wptr(struct amdgpu_ring *ring) vcn_v2_5_enc_ring_set_wptr() argument [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/intel/ixgbevf/ |
H A D | ixgbevf.h | 83 #define ring_is_xdp(ring) \ 84 test_bit(__IXGBEVF_TX_XDP_RING, &(ring)->state) 85 #define set_ring_xdp(ring) \ 86 set_bit(__IXGBEVF_TX_XDP_RING, &(ring)->state) 87 #define clear_ring_xdp(ring) \ 88 clear_bit(__IXGBEVF_TX_XDP_RING, &(ring)->state) 96 void *desc; /* descriptor ring memory */ 97 dma_addr_t dma; /* phys. address of descriptor ring */ 121 * associated with this ring, which is different for DCB and RSS modes 172 #define ring_uses_large_buffer(ring) \ 186 ixgbevf_rx_bufsz(struct ixgbevf_ring *ring) ixgbevf_rx_bufsz() argument 198 ixgbevf_rx_pg_order(struct ixgbevf_ring *ring) ixgbevf_rx_pg_order() argument 217 struct ixgbevf_ring *ring; /* pointer to linked list of rings */ global() member 286 ixgbevf_desc_unused(struct ixgbevf_ring *ring) ixgbevf_desc_unused() argument 294 ixgbevf_write_tail(struct ixgbevf_ring *ring, u32 value) ixgbevf_write_tail() argument [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/intel/ixgbevf/ |
H A D | ixgbevf.h | 83 #define ring_is_xdp(ring) \ 84 test_bit(__IXGBEVF_TX_XDP_RING, &(ring)->state) 85 #define set_ring_xdp(ring) \ 86 set_bit(__IXGBEVF_TX_XDP_RING, &(ring)->state) 87 #define clear_ring_xdp(ring) \ 88 clear_bit(__IXGBEVF_TX_XDP_RING, &(ring)->state) 96 void *desc; /* descriptor ring memory */ 97 dma_addr_t dma; /* phys. address of descriptor ring */ 121 * associated with this ring, which is different for DCB and RSS modes 172 #define ring_uses_large_buffer(ring) \ 186 ixgbevf_rx_bufsz(struct ixgbevf_ring *ring) ixgbevf_rx_bufsz() argument 198 ixgbevf_rx_pg_order(struct ixgbevf_ring *ring) ixgbevf_rx_pg_order() argument 217 struct ixgbevf_ring *ring; /* pointer to linked list of rings */ global() member 286 ixgbevf_desc_unused(struct ixgbevf_ring *ring) ixgbevf_desc_unused() argument 294 ixgbevf_write_tail(struct ixgbevf_ring *ring, u32 value) ixgbevf_write_tail() argument [all...] |
/third_party/mesa3d/src/virtio/vulkan/ |
H A D | vn_instance.c | 26 /* this must not exceed 2KiB for the ring to fit in a 4K page */ 128 instance->ring.shmem = in vn_instance_init_ring() 130 if (!instance->ring.shmem) { in vn_instance_init_ring() 132 vn_log(instance, "failed to allocate/map ring shmem"); in vn_instance_init_ring() 136 mtx_init(&instance->ring.mutex, mtx_plain); in vn_instance_init_ring() 138 struct vn_ring *ring = &instance->ring.ring; in vn_instance_init_ring() local 139 vn_ring_init(ring, instance->renderer, &layout, in vn_instance_init_ring() 140 instance->ring in vn_instance_init_ring() 336 const struct vn_ring *ring = &instance->ring.ring; vn_instance_wait_roundtrip() local 413 vn_instance_submission_get_ring_submit(struct vn_ring *ring, const struct vn_cs_encoder *cs, struct vn_renderer_shmem *extra_shmem, bool direct) vn_instance_submission_get_ring_submit() argument 448 vn_instance_submission_prepare(struct vn_instance_submission *submit, const struct vn_cs_encoder *cs, struct vn_ring *ring, struct vn_renderer_shmem *extra_shmem, bool direct) vn_instance_submission_prepare() argument 510 struct vn_ring *ring = &instance->ring.ring; vn_instance_ring_submit_locked() local [all...] |
/kernel/linux/linux-6.6/drivers/usb/cdns3/ |
H A D | cdnsp-mem.c | 24 * Allocates a generic ring segment from the ring pool, sets the dma address, 133 * Link the ring to the new segments. 134 * Set Toggle Cycle for the new ring if needed. 137 struct cdnsp_ring *ring, in cdnsp_link_rings() 144 if (!ring || !first || !last) in cdnsp_link_rings() 147 next = ring->enq_seg->next; in cdnsp_link_rings() 148 cdnsp_link_segments(pdev, ring->enq_seg, first, ring->type); in cdnsp_link_rings() 149 cdnsp_link_segments(pdev, last, next, ring in cdnsp_link_rings() 136 cdnsp_link_rings(struct cdnsp_device *pdev, struct cdnsp_ring *ring, struct cdnsp_segment *first, struct cdnsp_segment *last, unsigned int num_segs) cdnsp_link_rings() argument 193 cdnsp_insert_segment_mapping(struct radix_tree_root *trb_address_map, struct cdnsp_ring *ring, struct cdnsp_segment *seg, gfp_t mem_flags) cdnsp_insert_segment_mapping() argument 227 cdnsp_update_stream_segment_mapping(struct radix_tree_root *trb_address_map, struct cdnsp_ring *ring, struct cdnsp_segment *first_seg, struct cdnsp_segment *last_seg, gfp_t mem_flags) cdnsp_update_stream_segment_mapping() argument 263 cdnsp_remove_stream_mapping(struct cdnsp_ring *ring) cdnsp_remove_stream_mapping() argument 274 cdnsp_update_stream_mapping(struct cdnsp_ring *ring) cdnsp_update_stream_mapping() argument 280 cdnsp_ring_free(struct cdnsp_device *pdev, struct cdnsp_ring *ring) cdnsp_ring_free() argument 297 cdnsp_initialize_ring_info(struct cdnsp_ring *ring) cdnsp_initialize_ring_info() argument 376 struct cdnsp_ring *ring; cdnsp_ring_alloc() local 421 cdnsp_ring_expansion(struct cdnsp_device *pdev, struct cdnsp_ring *ring, unsigned int num_trbs, gfp_t flags) cdnsp_ring_expansion() argument [all...] |
/kernel/linux/linux-6.6/drivers/net/netdevsim/ |
H A D | ethtool.c | 68 struct ethtool_ringparam *ring, in nsim_get_ringparam() 74 memcpy(ring, &ns->ethtool.ring, sizeof(ns->ethtool.ring)); in nsim_get_ringparam() 78 struct ethtool_ringparam *ring, in nsim_set_ringparam() 84 ns->ethtool.ring.rx_pending = ring->rx_pending; in nsim_set_ringparam() 85 ns->ethtool.ring.rx_jumbo_pending = ring->rx_jumbo_pending; in nsim_set_ringparam() 86 ns->ethtool.ring in nsim_set_ringparam() 67 nsim_get_ringparam(struct net_device *dev, struct ethtool_ringparam *ring, struct kernel_ethtool_ringparam *kernel_ring, struct netlink_ext_ack *extack) nsim_get_ringparam() argument 77 nsim_set_ringparam(struct net_device *dev, struct ethtool_ringparam *ring, struct kernel_ethtool_ringparam *kernel_ring, struct netlink_ext_ack *extack) nsim_set_ringparam() argument [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/hisilicon/hns3/hns3vf/ |
H A D | hclgevf_cmd.c | 14 #define cmq_ring_to_dev(ring) (&(ring)->dev->pdev->dev) 16 static int hclgevf_ring_space(struct hclgevf_cmq_ring *ring) in hclgevf_ring_space() argument 18 int ntc = ring->next_to_clean; in hclgevf_ring_space() 19 int ntu = ring->next_to_use; in hclgevf_ring_space() 22 used = (ntu - ntc + ring->desc_num) % ring->desc_num; in hclgevf_ring_space() 24 return ring->desc_num - used - 1; in hclgevf_ring_space() 27 static int hclgevf_is_valid_csq_clean_head(struct hclgevf_cmq_ring *ring, in hclgevf_is_valid_csq_clean_head() argument 30 int ntu = ring in hclgevf_is_valid_csq_clean_head() 85 hclgevf_cmd_config_regs(struct hclgevf_cmq_ring *ring) hclgevf_cmd_config_regs() argument 124 hclgevf_alloc_cmd_desc(struct hclgevf_cmq_ring *ring) hclgevf_alloc_cmd_desc() argument 136 hclgevf_free_cmd_desc(struct hclgevf_cmq_ring *ring) hclgevf_free_cmd_desc() argument 150 struct hclgevf_cmq_ring *ring = hclgevf_alloc_cmd_queue() local [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/msm/adreno/ |
H A D | a6xx_gpu.c | 33 bool a6xx_idle(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a6xx_idle() argument 36 if (!adreno_idle(gpu, ring)) in a6xx_idle() 52 static void a6xx_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a6xx_flush() argument 63 OUT_PKT7(ring, CP_WHERE_AM_I, 2); in a6xx_flush() 64 OUT_RING(ring, lower_32_bits(shadowptr(a6xx_gpu, ring))); in a6xx_flush() 65 OUT_RING(ring, upper_32_bits(shadowptr(a6xx_gpu, ring))); in a6xx_flush() 68 spin_lock_irqsave(&ring->preempt_lock, flags); in a6xx_flush() 71 ring in a6xx_flush() 84 get_stats_counter(struct msm_ringbuffer *ring, u32 counter, u64 iova) get_stats_counter() argument 95 a6xx_set_pagetable(struct a6xx_gpu *a6xx_gpu, struct msm_ringbuffer *ring, struct msm_file_private *ctx) a6xx_set_pagetable() argument 144 struct msm_ringbuffer *ring = submit->ring; a6xx_submit() local 602 struct msm_ringbuffer *ring = gpu->rb[0]; a6xx_cp_init() local 1043 struct msm_ringbuffer *ring = gpu->funcs->active_ring(gpu); a6xx_fault_detect_irq() local 1228 a6xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) a6xx_get_rptr() argument [all...] |
H A D | a5xx_preempt.c | 39 /* Write the most recent wptr for the given ring into the hardware */ 40 static inline void update_wptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in update_wptr() argument 45 if (!ring) in update_wptr() 48 spin_lock_irqsave(&ring->preempt_lock, flags); in update_wptr() 49 wptr = get_wptr(ring); in update_wptr() 50 spin_unlock_irqrestore(&ring->preempt_lock, flags); in update_wptr() 63 struct msm_ringbuffer *ring = gpu->rb[i]; in get_next_ring() local 65 spin_lock_irqsave(&ring->preempt_lock, flags); in get_next_ring() 66 empty = (get_wptr(ring) == gpu->funcs->get_rptr(gpu, ring)); in get_next_ring() 96 struct msm_ringbuffer *ring; a5xx_preempt_trigger() local 224 preempt_init_ring(struct a5xx_gpu *a5xx_gpu, struct msm_ringbuffer *ring) preempt_init_ring() argument [all...] |
/kernel/linux/linux-5.10/drivers/usb/host/ |
H A D | xhci-trace.h | 115 TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb), 116 TP_ARGS(ring, trb), 126 __entry->type = ring->type; 139 TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb), 140 TP_ARGS(ring, trb) 144 TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb), 145 TP_ARGS(ring, trb) 149 TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb), 150 TP_ARGS(ring, trb) 154 TP_PROTO(struct xhci_ring *ring, struc [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gt/ |
H A D | mock_engine.c | 49 struct intel_ring *ring; in mock_ring() local 51 ring = kzalloc(sizeof(*ring) + sz, GFP_KERNEL); in mock_ring() 52 if (!ring) in mock_ring() 55 kref_init(&ring->ref); in mock_ring() 56 ring->size = sz; in mock_ring() 57 ring->effective_size = sz; in mock_ring() 58 ring->vaddr = (void *)(ring + 1); in mock_ring() 59 atomic_set(&ring in mock_ring() 76 mock_ring_free(struct intel_ring *ring) mock_ring_free() argument [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/msm/adreno/ |
H A D | a5xx_preempt.c | 39 /* Write the most recent wptr for the given ring into the hardware */ 40 static inline void update_wptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in update_wptr() argument 45 if (!ring) in update_wptr() 48 spin_lock_irqsave(&ring->preempt_lock, flags); in update_wptr() 49 wptr = get_wptr(ring); in update_wptr() 50 spin_unlock_irqrestore(&ring->preempt_lock, flags); in update_wptr() 63 struct msm_ringbuffer *ring = gpu->rb[i]; in get_next_ring() local 65 spin_lock_irqsave(&ring->preempt_lock, flags); in get_next_ring() 66 empty = (get_wptr(ring) == gpu->funcs->get_rptr(gpu, ring)); in get_next_ring() 95 struct msm_ringbuffer *ring; a5xx_preempt_trigger() local 220 preempt_init_ring(struct a5xx_gpu *a5xx_gpu, struct msm_ringbuffer *ring) preempt_init_ring() argument [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/apm/xgene-v2/ |
H A D | ring.c | 13 void xge_setup_desc(struct xge_desc_ring *ring) in xge_setup_desc() argument 21 raw_desc = &ring->raw_desc[i]; in xge_setup_desc() 24 next_dma = ring->dma_addr + (offset * XGENE_ENET_DESC_SIZE); in xge_setup_desc() 36 struct xge_desc_ring *ring = pdata->tx_ring; in xge_update_tx_desc_addr() local 37 dma_addr_t dma_addr = ring->dma_addr; in xge_update_tx_desc_addr() 42 ring->head = 0; in xge_update_tx_desc_addr() 43 ring->tail = 0; in xge_update_tx_desc_addr() 48 struct xge_desc_ring *ring = pdata->rx_ring; in xge_update_rx_desc_addr() local 49 dma_addr_t dma_addr = ring->dma_addr; in xge_update_rx_desc_addr() 54 ring in xge_update_rx_desc_addr() [all...] |