/kernel/linux/linux-6.6/tools/testing/selftests/net/ |
H A D | psock_tpacket.c | 66 struct ring { struct 71 void (*walk)(int sock, struct ring *ring); 220 static void walk_v1_v2_rx(int sock, struct ring *ring) in walk_v1_v2_rx() argument 227 bug_on(ring->type != PACKET_RX_RING); in walk_v1_v2_rx() 239 while (__v1_v2_rx_kernel_ready(ring->rd[frame_num].iov_base, in walk_v1_v2_rx() 240 ring->version)) { in walk_v1_v2_rx() 241 ppd.raw = ring->rd[frame_num].iov_base; in walk_v1_v2_rx() 243 switch (ring in walk_v1_v2_rx() 354 get_next_frame(struct ring *ring, int n) get_next_frame() argument 369 walk_tx(int sock, struct ring *ring) walk_tx() argument 500 walk_v1_v2(int sock, struct ring *ring) walk_v1_v2() argument 581 walk_v3_rx(int sock, struct ring *ring) walk_v3_rx() argument 622 walk_v3(int sock, struct ring *ring) walk_v3() argument 630 __v1_v2_fill(struct ring *ring, unsigned int blocks) __v1_v2_fill() argument 646 __v3_fill(struct ring *ring, unsigned int blocks, int type) __v3_fill() argument 667 setup_ring(int sock, struct ring *ring, int version, int type) setup_ring() argument 708 mmap_ring(int sock, struct ring *ring) mmap_ring() argument 726 bind_ring(int sock, struct ring *ring) bind_ring() argument 746 walk_ring(int sock, struct ring *ring) walk_ring() argument 751 unmap_ring(int sock, struct ring *ring) unmap_ring() argument 805 struct ring ring; test_tpacket() local [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/amd/xgbe/ |
H A D | xgbe-desc.c | 123 struct xgbe_ring *ring) in xgbe_free_ring() 128 if (!ring) in xgbe_free_ring() 131 if (ring->rdata) { in xgbe_free_ring() 132 for (i = 0; i < ring->rdesc_count; i++) { in xgbe_free_ring() 133 rdata = XGBE_GET_DESC_DATA(ring, i); in xgbe_free_ring() 137 kfree(ring->rdata); in xgbe_free_ring() 138 ring->rdata = NULL; in xgbe_free_ring() 141 if (ring->rx_hdr_pa.pages) { in xgbe_free_ring() 142 dma_unmap_page(pdata->dev, ring->rx_hdr_pa.pages_dma, in xgbe_free_ring() 143 ring in xgbe_free_ring() 122 xgbe_free_ring(struct xgbe_prv_data *pdata, struct xgbe_ring *ring) xgbe_free_ring() argument 215 xgbe_init_ring(struct xgbe_prv_data *pdata, struct xgbe_ring *ring, unsigned int rdesc_count) xgbe_init_ring() argument 356 xgbe_map_rx_buffer(struct xgbe_prv_data *pdata, struct xgbe_ring *ring, struct xgbe_ring_data *rdata) xgbe_map_rx_buffer() argument 390 struct xgbe_ring *ring; xgbe_wrapper_tx_descriptor_init() local 431 struct xgbe_ring *ring; xgbe_wrapper_rx_descriptor_init() local 526 struct xgbe_ring *ring = channel->tx_ring; xgbe_map_tx_skb() local [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/amd/xgbe/ |
H A D | xgbe-desc.c | 123 struct xgbe_ring *ring) in xgbe_free_ring() 128 if (!ring) in xgbe_free_ring() 131 if (ring->rdata) { in xgbe_free_ring() 132 for (i = 0; i < ring->rdesc_count; i++) { in xgbe_free_ring() 133 rdata = XGBE_GET_DESC_DATA(ring, i); in xgbe_free_ring() 137 kfree(ring->rdata); in xgbe_free_ring() 138 ring->rdata = NULL; in xgbe_free_ring() 141 if (ring->rx_hdr_pa.pages) { in xgbe_free_ring() 142 dma_unmap_page(pdata->dev, ring->rx_hdr_pa.pages_dma, in xgbe_free_ring() 143 ring in xgbe_free_ring() 122 xgbe_free_ring(struct xgbe_prv_data *pdata, struct xgbe_ring *ring) xgbe_free_ring() argument 215 xgbe_init_ring(struct xgbe_prv_data *pdata, struct xgbe_ring *ring, unsigned int rdesc_count) xgbe_init_ring() argument 356 xgbe_map_rx_buffer(struct xgbe_prv_data *pdata, struct xgbe_ring *ring, struct xgbe_ring_data *rdata) xgbe_map_rx_buffer() argument 390 struct xgbe_ring *ring; xgbe_wrapper_tx_descriptor_init() local 431 struct xgbe_ring *ring; xgbe_wrapper_rx_descriptor_init() local 526 struct xgbe_ring *ring = channel->tx_ring; xgbe_map_tx_skb() local [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/ |
H A D | vcn_sw_ring.c | 27 void vcn_dec_sw_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, in vcn_dec_sw_ring_emit_fence() argument 32 amdgpu_ring_write(ring, VCN_DEC_SW_CMD_FENCE); in vcn_dec_sw_ring_emit_fence() 33 amdgpu_ring_write(ring, addr); in vcn_dec_sw_ring_emit_fence() 34 amdgpu_ring_write(ring, upper_32_bits(addr)); in vcn_dec_sw_ring_emit_fence() 35 amdgpu_ring_write(ring, seq); in vcn_dec_sw_ring_emit_fence() 36 amdgpu_ring_write(ring, VCN_DEC_SW_CMD_TRAP); in vcn_dec_sw_ring_emit_fence() 39 void vcn_dec_sw_ring_insert_end(struct amdgpu_ring *ring) in vcn_dec_sw_ring_insert_end() argument 41 amdgpu_ring_write(ring, VCN_DEC_SW_CMD_END); in vcn_dec_sw_ring_insert_end() 44 void vcn_dec_sw_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, in vcn_dec_sw_ring_emit_ib() argument 49 amdgpu_ring_write(ring, VCN_DEC_SW_CMD_I in vcn_dec_sw_ring_emit_ib() 56 vcn_dec_sw_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) vcn_dec_sw_ring_emit_reg_wait() argument 65 vcn_dec_sw_ring_emit_vm_flush(struct amdgpu_ring *ring, uint32_t vmid, uint64_t pd_addr) vcn_dec_sw_ring_emit_vm_flush() argument 80 vcn_dec_sw_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) vcn_dec_sw_ring_emit_wreg() argument [all...] |
H A D | uvd_v6_0.c | 73 * @ring: amdgpu_ring pointer 77 static uint64_t uvd_v6_0_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v6_0_ring_get_rptr() argument 79 struct amdgpu_device *adev = ring->adev; in uvd_v6_0_ring_get_rptr() 87 * @ring: amdgpu_ring pointer 91 static uint64_t uvd_v6_0_enc_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v6_0_enc_ring_get_rptr() argument 93 struct amdgpu_device *adev = ring->adev; in uvd_v6_0_enc_ring_get_rptr() 95 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_get_rptr() 103 * @ring: amdgpu_ring pointer 107 static uint64_t uvd_v6_0_ring_get_wptr(struct amdgpu_ring *ring) in uvd_v6_0_ring_get_wptr() argument 109 struct amdgpu_device *adev = ring in uvd_v6_0_ring_get_wptr() 121 uvd_v6_0_enc_ring_get_wptr(struct amdgpu_ring *ring) uvd_v6_0_enc_ring_get_wptr() argument 138 uvd_v6_0_ring_set_wptr(struct amdgpu_ring *ring) uvd_v6_0_ring_set_wptr() argument 152 uvd_v6_0_enc_ring_set_wptr(struct amdgpu_ring *ring) uvd_v6_0_enc_ring_set_wptr() argument 170 uvd_v6_0_enc_ring_test_ring(struct amdgpu_ring *ring) uvd_v6_0_enc_ring_test_ring() argument 208 uvd_v6_0_enc_get_create_msg(struct amdgpu_ring *ring, uint32_t handle, struct amdgpu_bo *bo, struct dma_fence **fence) uvd_v6_0_enc_get_create_msg() argument 271 uvd_v6_0_enc_get_destroy_msg(struct amdgpu_ring *ring, uint32_t handle, struct amdgpu_bo *bo, struct dma_fence **fence) uvd_v6_0_enc_get_destroy_msg() argument 332 uvd_v6_0_enc_ring_test_ib(struct amdgpu_ring *ring, long timeout) uvd_v6_0_enc_ring_test_ib() argument 380 struct amdgpu_ring *ring; uvd_v6_0_sw_init() local 467 struct amdgpu_ring *ring = &adev->uvd.inst->ring; uvd_v6_0_hw_init() local 722 struct amdgpu_ring *ring = &adev->uvd.inst->ring; uvd_v6_0_start() local 919 uvd_v6_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) uvd_v6_0_ring_emit_fence() argument 951 uvd_v6_0_enc_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) uvd_v6_0_enc_ring_emit_fence() argument 968 uvd_v6_0_ring_emit_hdp_flush(struct amdgpu_ring *ring) uvd_v6_0_ring_emit_hdp_flush() argument 980 uvd_v6_0_ring_test_ring(struct amdgpu_ring *ring) uvd_v6_0_ring_test_ring() argument 1018 uvd_v6_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) uvd_v6_0_ring_emit_ib() argument 1046 uvd_v6_0_enc_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) uvd_v6_0_enc_ring_emit_ib() argument 1060 uvd_v6_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) uvd_v6_0_ring_emit_wreg() argument 1071 uvd_v6_0_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) uvd_v6_0_ring_emit_vm_flush() argument 1086 uvd_v6_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring) uvd_v6_0_ring_emit_pipeline_sync() argument 1103 uvd_v6_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) uvd_v6_0_ring_insert_nop() argument 1115 uvd_v6_0_enc_ring_emit_pipeline_sync(struct amdgpu_ring *ring) uvd_v6_0_enc_ring_emit_pipeline_sync() argument 1126 uvd_v6_0_enc_ring_insert_end(struct amdgpu_ring *ring) uvd_v6_0_enc_ring_insert_end() argument 1131 uvd_v6_0_enc_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned int vmid, uint64_t pd_addr) uvd_v6_0_enc_ring_emit_vm_flush() argument [all...] |
H A D | uvd_v7_0.c | 67 * @ring: amdgpu_ring pointer 71 static uint64_t uvd_v7_0_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v7_0_ring_get_rptr() argument 73 struct amdgpu_device *adev = ring->adev; in uvd_v7_0_ring_get_rptr() 75 return RREG32_SOC15(UVD, ring->me, mmUVD_RBC_RB_RPTR); in uvd_v7_0_ring_get_rptr() 81 * @ring: amdgpu_ring pointer 85 static uint64_t uvd_v7_0_enc_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v7_0_enc_ring_get_rptr() argument 87 struct amdgpu_device *adev = ring->adev; in uvd_v7_0_enc_ring_get_rptr() 89 if (ring == &adev->uvd.inst[ring->me].ring_enc[0]) in uvd_v7_0_enc_ring_get_rptr() 90 return RREG32_SOC15(UVD, ring in uvd_v7_0_enc_ring_get_rptr() 102 uvd_v7_0_ring_get_wptr(struct amdgpu_ring *ring) uvd_v7_0_ring_get_wptr() argument 116 uvd_v7_0_enc_ring_get_wptr(struct amdgpu_ring *ring) uvd_v7_0_enc_ring_get_wptr() argument 136 uvd_v7_0_ring_set_wptr(struct amdgpu_ring *ring) uvd_v7_0_ring_set_wptr() argument 150 uvd_v7_0_enc_ring_set_wptr(struct amdgpu_ring *ring) uvd_v7_0_enc_ring_set_wptr() argument 175 uvd_v7_0_enc_ring_test_ring(struct amdgpu_ring *ring) uvd_v7_0_enc_ring_test_ring() argument 216 uvd_v7_0_enc_get_create_msg(struct amdgpu_ring *ring, u32 handle, struct amdgpu_bo *bo, struct dma_fence **fence) uvd_v7_0_enc_get_create_msg() argument 279 uvd_v7_0_enc_get_destroy_msg(struct amdgpu_ring *ring, u32 handle, struct amdgpu_bo *bo, struct dma_fence **fence) uvd_v7_0_enc_get_destroy_msg() argument 339 uvd_v7_0_enc_ring_test_ib(struct amdgpu_ring *ring, long timeout) uvd_v7_0_enc_ring_test_ib() argument 400 struct amdgpu_ring *ring; uvd_v7_0_sw_init() local 524 struct amdgpu_ring *ring; uvd_v7_0_hw_init() local 786 struct amdgpu_ring *ring; uvd_v7_0_sriov_start() local 952 struct amdgpu_ring *ring; uvd_v7_0_start() local 1177 uvd_v7_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) uvd_v7_0_ring_emit_fence() argument 1218 uvd_v7_0_enc_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) uvd_v7_0_enc_ring_emit_fence() argument 1236 uvd_v7_0_ring_emit_hdp_flush(struct amdgpu_ring *ring) uvd_v7_0_ring_emit_hdp_flush() argument 1248 uvd_v7_0_ring_test_ring(struct amdgpu_ring *ring) uvd_v7_0_ring_test_ring() argument 1289 struct amdgpu_ring *ring = to_amdgpu_ring(job->base.sched); uvd_v7_0_ring_patch_cs_in_place() local 1317 uvd_v7_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) uvd_v7_0_ring_emit_ib() argument 1350 uvd_v7_0_enc_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) uvd_v7_0_enc_ring_emit_ib() argument 1364 uvd_v7_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) uvd_v7_0_ring_emit_wreg() argument 1380 uvd_v7_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) uvd_v7_0_ring_emit_reg_wait() argument 1399 uvd_v7_0_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) uvd_v7_0_ring_emit_vm_flush() argument 1414 uvd_v7_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) uvd_v7_0_ring_insert_nop() argument 1427 uvd_v7_0_enc_ring_insert_end(struct amdgpu_ring *ring) uvd_v7_0_enc_ring_insert_end() argument 1432 uvd_v7_0_enc_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) uvd_v7_0_enc_ring_emit_reg_wait() argument 1442 uvd_v7_0_enc_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned int vmid, uint64_t pd_addr) uvd_v7_0_enc_ring_emit_vm_flush() argument 1455 uvd_v7_0_enc_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) uvd_v7_0_enc_ring_emit_wreg() argument [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/radeon/ |
H A D | cik_sdma.c | 43 * and each one supports 1 ring buffer used for gfx 47 * (ring buffer, IBs, etc.), but sDMA has it's own 59 * @ring: radeon ring pointer 64 struct radeon_ring *ring) in cik_sdma_get_rptr() 69 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cik_sdma_get_rptr() 71 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cik_sdma_get_rptr() 86 * @ring: radeon ring pointer 91 struct radeon_ring *ring) in cik_sdma_get_wptr() 63 cik_sdma_get_rptr(struct radeon_device *rdev, struct radeon_ring *ring) cik_sdma_get_rptr() argument 90 cik_sdma_get_wptr(struct radeon_device *rdev, struct radeon_ring *ring) cik_sdma_get_wptr() argument 111 cik_sdma_set_wptr(struct radeon_device *rdev, struct radeon_ring *ring) cik_sdma_set_wptr() argument 136 struct radeon_ring *ring = &rdev->ring[ib->ring]; cik_sdma_ring_ib_execute() local 172 struct radeon_ring *ring = &rdev->ring[ridx]; cik_sdma_hdp_flush_ring_emit() local 203 struct radeon_ring *ring = &rdev->ring[fence->ring]; cik_sdma_fence_ring_emit() local 228 cik_sdma_semaphore_ring_emit(struct radeon_device *rdev, struct radeon_ring *ring, struct radeon_semaphore *semaphore, bool emit_wait) cik_sdma_semaphore_ring_emit() argument 367 struct radeon_ring *ring; cik_sdma_gfx_resume() local 587 struct radeon_ring *ring = &rdev->ring[ring_index]; cik_copy_dma() local 645 cik_sdma_ring_test(struct radeon_device *rdev, struct radeon_ring *ring) cik_sdma_ring_test() argument 702 cik_sdma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring) cik_sdma_ib_test() argument 775 cik_sdma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) cik_sdma_is_lockup() argument 947 cik_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, unsigned vm_id, uint64_t pd_addr) cik_dma_vm_flush() argument [all...] |
H A D | radeon_fence.c | 53 * are no longer in use by the associated ring on the GPU and 64 * @ring: ring index the fence is associated with 68 static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring) in radeon_fence_write() argument 70 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_write() 84 * @ring: ring index the fence is associated with 89 static u32 radeon_fence_read(struct radeon_device *rdev, int ring) in radeon_fence_read() argument 91 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_read() 110 * @ring 114 radeon_fence_schedule_check(struct radeon_device *rdev, int ring) radeon_fence_schedule_check() argument 135 radeon_fence_emit(struct radeon_device *rdev, struct radeon_fence **fence, int ring) radeon_fence_emit() argument 205 radeon_fence_activity(struct radeon_device *rdev, int ring) radeon_fence_activity() argument 279 int ring; radeon_fence_check_lockup() local 328 radeon_fence_process(struct radeon_device *rdev, int ring) radeon_fence_process() argument 348 radeon_fence_seq_signaled(struct radeon_device *rdev, u64 seq, unsigned ring) radeon_fence_seq_signaled() argument 366 unsigned ring = fence->ring; radeon_fence_is_signaled() local 651 radeon_fence_wait_next(struct radeon_device *rdev, int ring) radeon_fence_wait_next() argument 678 radeon_fence_wait_empty(struct radeon_device *rdev, int ring) radeon_fence_wait_empty() argument 739 radeon_fence_count_emitted(struct radeon_device *rdev, int ring) radeon_fence_count_emitted() argument 833 radeon_fence_driver_start_ring(struct radeon_device *rdev, int ring) radeon_fence_driver_start_ring() argument 883 radeon_fence_driver_init_ring(struct radeon_device *rdev, int ring) radeon_fence_driver_init_ring() argument 913 int ring; radeon_fence_driver_init() local 935 int ring, r; radeon_fence_driver_fini() local 963 radeon_fence_driver_force_completion(struct radeon_device *rdev, int ring) radeon_fence_driver_force_completion() argument [all...] |
H A D | r600_dma.c | 35 * to the 3D engine (ring buffer, IBs, etc.), but the 47 * @ring: radeon ring pointer 52 struct radeon_ring *ring) in r600_dma_get_rptr() 57 rptr = rdev->wb.wb[ring->rptr_offs/4]; in r600_dma_get_rptr() 68 * @ring: radeon ring pointer 73 struct radeon_ring *ring) in r600_dma_get_wptr() 82 * @ring: radeon ring pointe 51 r600_dma_get_rptr(struct radeon_device *rdev, struct radeon_ring *ring) r600_dma_get_rptr() argument 72 r600_dma_get_wptr(struct radeon_device *rdev, struct radeon_ring *ring) r600_dma_get_wptr() argument 86 r600_dma_set_wptr(struct radeon_device *rdev, struct radeon_ring *ring) r600_dma_set_wptr() argument 122 struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; r600_dma_resume() local 208 r600_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) r600_dma_is_lockup() argument 230 r600_dma_ring_test(struct radeon_device *rdev, struct radeon_ring *ring) r600_dma_ring_test() argument 290 struct radeon_ring *ring = &rdev->ring[fence->ring]; r600_dma_fence_ring_emit() local 313 r600_dma_semaphore_ring_emit(struct radeon_device *rdev, struct radeon_ring *ring, struct radeon_semaphore *semaphore, bool emit_wait) r600_dma_semaphore_ring_emit() argument 337 r600_dma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring) r600_dma_ib_test() argument 407 struct radeon_ring *ring = &rdev->ring[ib->ring]; r600_dma_ring_ib_execute() local 452 struct radeon_ring *ring = &rdev->ring[ring_index]; r600_copy_dma() local [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/radeon/ |
H A D | cik_sdma.c | 42 * and each one supports 1 ring buffer used for gfx 46 * (ring buffer, IBs, etc.), but sDMA has it's own 58 * @ring: radeon ring pointer 63 struct radeon_ring *ring) in cik_sdma_get_rptr() 68 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cik_sdma_get_rptr() 70 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cik_sdma_get_rptr() 85 * @ring: radeon ring pointer 90 struct radeon_ring *ring) in cik_sdma_get_wptr() 62 cik_sdma_get_rptr(struct radeon_device *rdev, struct radeon_ring *ring) cik_sdma_get_rptr() argument 89 cik_sdma_get_wptr(struct radeon_device *rdev, struct radeon_ring *ring) cik_sdma_get_wptr() argument 110 cik_sdma_set_wptr(struct radeon_device *rdev, struct radeon_ring *ring) cik_sdma_set_wptr() argument 135 struct radeon_ring *ring = &rdev->ring[ib->ring]; cik_sdma_ring_ib_execute() local 171 struct radeon_ring *ring = &rdev->ring[ridx]; cik_sdma_hdp_flush_ring_emit() local 202 struct radeon_ring *ring = &rdev->ring[fence->ring]; cik_sdma_fence_ring_emit() local 227 cik_sdma_semaphore_ring_emit(struct radeon_device *rdev, struct radeon_ring *ring, struct radeon_semaphore *semaphore, bool emit_wait) cik_sdma_semaphore_ring_emit() argument 366 struct radeon_ring *ring; cik_sdma_gfx_resume() local 586 struct radeon_ring *ring = &rdev->ring[ring_index]; cik_copy_dma() local 644 cik_sdma_ring_test(struct radeon_device *rdev, struct radeon_ring *ring) cik_sdma_ring_test() argument 701 cik_sdma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring) cik_sdma_ib_test() argument 774 cik_sdma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) cik_sdma_is_lockup() argument 944 cik_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, unsigned vm_id, uint64_t pd_addr) cik_dma_vm_flush() argument [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/aquantia/atlantic/ |
H A D | aq_vec.c | 27 struct aq_ring_s ring[AQ_CFG_TCS_MAX][2]; member 37 struct aq_ring_s *ring = NULL; in aq_vec_poll() local 47 ring = self->ring[i]; in aq_vec_poll() 48 u64_stats_update_begin(&ring[AQ_VEC_RX_ID].stats.rx.syncp); in aq_vec_poll() 49 ring[AQ_VEC_RX_ID].stats.rx.polls++; in aq_vec_poll() 50 u64_stats_update_end(&ring[AQ_VEC_RX_ID].stats.rx.syncp); in aq_vec_poll() 54 &ring[AQ_VEC_TX_ID]); in aq_vec_poll() 59 if (ring[AQ_VEC_TX_ID].sw_head != in aq_vec_poll() 60 ring[AQ_VEC_TX_I in aq_vec_poll() 137 struct aq_ring_s *ring = NULL; aq_vec_ring_alloc() local 178 struct aq_ring_s *ring = NULL; aq_vec_init() local 223 struct aq_ring_s *ring = NULL; aq_vec_start() local 248 struct aq_ring_s *ring = NULL; aq_vec_stop() local 265 struct aq_ring_s *ring = NULL; aq_vec_deinit() local 294 struct aq_ring_s *ring = NULL; aq_vec_ring_free() local [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_ring.h | 74 /* Direct submission to the ring buffer during init and reset. */ 97 /* sync_seq is protected by ring emission lock */ 111 void amdgpu_fence_driver_force_completion(struct amdgpu_ring *ring); 113 int amdgpu_fence_driver_init_ring(struct amdgpu_ring *ring, 115 int amdgpu_fence_driver_start_ring(struct amdgpu_ring *ring, 120 int amdgpu_fence_emit(struct amdgpu_ring *ring, struct dma_fence **fence, 122 int amdgpu_fence_emit_polling(struct amdgpu_ring *ring, uint32_t *s, 124 bool amdgpu_fence_process(struct amdgpu_ring *ring); 125 int amdgpu_fence_wait_empty(struct amdgpu_ring *ring); 126 signed long amdgpu_fence_wait_polling(struct amdgpu_ring *ring, 209 volatile uint32_t *ring; global() member 296 amdgpu_ring_set_preempt_cond_exec(struct amdgpu_ring *ring, bool cond_exec) amdgpu_ring_set_preempt_cond_exec() argument 302 amdgpu_ring_clear_ring(struct amdgpu_ring *ring) amdgpu_ring_clear_ring() argument 310 amdgpu_ring_write(struct amdgpu_ring *ring, uint32_t v) amdgpu_ring_write() argument 319 amdgpu_ring_write_multiple(struct amdgpu_ring *ring, void *src, int count_dw) amdgpu_ring_write_multiple() argument [all...] |
H A D | uvd_v6_0.c | 73 * @ring: amdgpu_ring pointer 77 static uint64_t uvd_v6_0_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v6_0_ring_get_rptr() argument 79 struct amdgpu_device *adev = ring->adev; in uvd_v6_0_ring_get_rptr() 87 * @ring: amdgpu_ring pointer 91 static uint64_t uvd_v6_0_enc_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v6_0_enc_ring_get_rptr() argument 93 struct amdgpu_device *adev = ring->adev; in uvd_v6_0_enc_ring_get_rptr() 95 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_get_rptr() 103 * @ring: amdgpu_ring pointer 107 static uint64_t uvd_v6_0_ring_get_wptr(struct amdgpu_ring *ring) in uvd_v6_0_ring_get_wptr() argument 109 struct amdgpu_device *adev = ring in uvd_v6_0_ring_get_wptr() 121 uvd_v6_0_enc_ring_get_wptr(struct amdgpu_ring *ring) uvd_v6_0_enc_ring_get_wptr() argument 138 uvd_v6_0_ring_set_wptr(struct amdgpu_ring *ring) uvd_v6_0_ring_set_wptr() argument 152 uvd_v6_0_enc_ring_set_wptr(struct amdgpu_ring *ring) uvd_v6_0_enc_ring_set_wptr() argument 170 uvd_v6_0_enc_ring_test_ring(struct amdgpu_ring *ring) uvd_v6_0_enc_ring_test_ring() argument 208 uvd_v6_0_enc_get_create_msg(struct amdgpu_ring *ring, uint32_t handle, struct amdgpu_bo *bo, struct dma_fence **fence) uvd_v6_0_enc_get_create_msg() argument 271 uvd_v6_0_enc_get_destroy_msg(struct amdgpu_ring *ring, uint32_t handle, struct amdgpu_bo *bo, struct dma_fence **fence) uvd_v6_0_enc_get_destroy_msg() argument 331 uvd_v6_0_enc_ring_test_ib(struct amdgpu_ring *ring, long timeout) uvd_v6_0_enc_ring_test_ib() argument 388 struct amdgpu_ring *ring; uvd_v6_0_sw_init() local 475 struct amdgpu_ring *ring = &adev->uvd.inst->ring; uvd_v6_0_hw_init() local 704 struct amdgpu_ring *ring = &adev->uvd.inst->ring; uvd_v6_0_start() local 899 uvd_v6_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) uvd_v6_0_ring_emit_fence() argument 929 uvd_v6_0_enc_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) uvd_v6_0_enc_ring_emit_fence() argument 946 uvd_v6_0_ring_emit_hdp_flush(struct amdgpu_ring *ring) uvd_v6_0_ring_emit_hdp_flush() argument 958 uvd_v6_0_ring_test_ring(struct amdgpu_ring *ring) uvd_v6_0_ring_test_ring() argument 994 uvd_v6_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) uvd_v6_0_ring_emit_ib() argument 1020 uvd_v6_0_enc_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) uvd_v6_0_enc_ring_emit_ib() argument 1034 uvd_v6_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) uvd_v6_0_ring_emit_wreg() argument 1045 uvd_v6_0_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) uvd_v6_0_ring_emit_vm_flush() argument 1060 uvd_v6_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring) uvd_v6_0_ring_emit_pipeline_sync() argument 1077 uvd_v6_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) uvd_v6_0_ring_insert_nop() argument 1089 uvd_v6_0_enc_ring_emit_pipeline_sync(struct amdgpu_ring *ring) uvd_v6_0_enc_ring_emit_pipeline_sync() argument 1100 uvd_v6_0_enc_ring_insert_end(struct amdgpu_ring *ring) uvd_v6_0_enc_ring_insert_end() argument 1105 uvd_v6_0_enc_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned int vmid, uint64_t pd_addr) uvd_v6_0_enc_ring_emit_vm_flush() argument [all...] |
H A D | uvd_v7_0.c | 67 * @ring: amdgpu_ring pointer 71 static uint64_t uvd_v7_0_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v7_0_ring_get_rptr() argument 73 struct amdgpu_device *adev = ring->adev; in uvd_v7_0_ring_get_rptr() 75 return RREG32_SOC15(UVD, ring->me, mmUVD_RBC_RB_RPTR); in uvd_v7_0_ring_get_rptr() 81 * @ring: amdgpu_ring pointer 85 static uint64_t uvd_v7_0_enc_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v7_0_enc_ring_get_rptr() argument 87 struct amdgpu_device *adev = ring->adev; in uvd_v7_0_enc_ring_get_rptr() 89 if (ring == &adev->uvd.inst[ring->me].ring_enc[0]) in uvd_v7_0_enc_ring_get_rptr() 90 return RREG32_SOC15(UVD, ring in uvd_v7_0_enc_ring_get_rptr() 102 uvd_v7_0_ring_get_wptr(struct amdgpu_ring *ring) uvd_v7_0_ring_get_wptr() argument 116 uvd_v7_0_enc_ring_get_wptr(struct amdgpu_ring *ring) uvd_v7_0_enc_ring_get_wptr() argument 136 uvd_v7_0_ring_set_wptr(struct amdgpu_ring *ring) uvd_v7_0_ring_set_wptr() argument 150 uvd_v7_0_enc_ring_set_wptr(struct amdgpu_ring *ring) uvd_v7_0_enc_ring_set_wptr() argument 175 uvd_v7_0_enc_ring_test_ring(struct amdgpu_ring *ring) uvd_v7_0_enc_ring_test_ring() argument 216 uvd_v7_0_enc_get_create_msg(struct amdgpu_ring *ring, uint32_t handle, struct amdgpu_bo *bo, struct dma_fence **fence) uvd_v7_0_enc_get_create_msg() argument 279 uvd_v7_0_enc_get_destroy_msg(struct amdgpu_ring *ring, uint32_t handle, struct amdgpu_bo *bo, struct dma_fence **fence) uvd_v7_0_enc_get_destroy_msg() argument 338 uvd_v7_0_enc_ring_test_ib(struct amdgpu_ring *ring, long timeout) uvd_v7_0_enc_ring_test_ib() argument 407 struct amdgpu_ring *ring; uvd_v7_0_sw_init() local 529 struct amdgpu_ring *ring; uvd_v7_0_hw_init() local 765 struct amdgpu_ring *ring; uvd_v7_0_sriov_start() local 931 struct amdgpu_ring *ring; uvd_v7_0_start() local 1154 uvd_v7_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) uvd_v7_0_ring_emit_fence() argument 1193 uvd_v7_0_enc_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) uvd_v7_0_enc_ring_emit_fence() argument 1211 uvd_v7_0_ring_emit_hdp_flush(struct amdgpu_ring *ring) uvd_v7_0_ring_emit_hdp_flush() argument 1223 uvd_v7_0_ring_test_ring(struct amdgpu_ring *ring) uvd_v7_0_ring_test_ring() argument 1262 struct amdgpu_ring *ring = to_amdgpu_ring(p->entity->rq->sched); uvd_v7_0_ring_patch_cs_in_place() local 1289 uvd_v7_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) uvd_v7_0_ring_emit_ib() argument 1320 uvd_v7_0_enc_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) uvd_v7_0_enc_ring_emit_ib() argument 1334 uvd_v7_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) uvd_v7_0_ring_emit_wreg() argument 1350 uvd_v7_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) uvd_v7_0_ring_emit_reg_wait() argument 1369 uvd_v7_0_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) uvd_v7_0_ring_emit_vm_flush() argument 1384 uvd_v7_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) uvd_v7_0_ring_insert_nop() argument 1397 uvd_v7_0_enc_ring_insert_end(struct amdgpu_ring *ring) uvd_v7_0_enc_ring_insert_end() argument 1402 uvd_v7_0_enc_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) uvd_v7_0_enc_ring_emit_reg_wait() argument 1412 uvd_v7_0_enc_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned int vmid, uint64_t pd_addr) uvd_v7_0_enc_ring_emit_vm_flush() argument 1425 uvd_v7_0_enc_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) uvd_v7_0_enc_ring_emit_wreg() argument [all...] |
/kernel/linux/linux-6.6/virt/kvm/ |
H A D | dirty_ring.c | 3 * KVM dirty ring implementation 38 static u32 kvm_dirty_ring_used(struct kvm_dirty_ring *ring) in kvm_dirty_ring_used() argument 40 return READ_ONCE(ring->dirty_index) - READ_ONCE(ring->reset_index); in kvm_dirty_ring_used() 43 static bool kvm_dirty_ring_soft_full(struct kvm_dirty_ring *ring) in kvm_dirty_ring_soft_full() argument 45 return kvm_dirty_ring_used(ring) >= ring->soft_limit; in kvm_dirty_ring_soft_full() 48 static bool kvm_dirty_ring_full(struct kvm_dirty_ring *ring) in kvm_dirty_ring_full() argument 50 return kvm_dirty_ring_used(ring) >= ring in kvm_dirty_ring_full() 74 kvm_dirty_ring_alloc(struct kvm_dirty_ring *ring, int index, u32 size) kvm_dirty_ring_alloc() argument 104 kvm_dirty_ring_reset(struct kvm *kvm, struct kvm_dirty_ring *ring) kvm_dirty_ring_reset() argument 171 struct kvm_dirty_ring *ring = &vcpu->dirty_ring; kvm_dirty_ring_push() local 213 kvm_dirty_ring_get_page(struct kvm_dirty_ring *ring, u32 offset) kvm_dirty_ring_get_page() argument 218 kvm_dirty_ring_free(struct kvm_dirty_ring *ring) kvm_dirty_ring_free() argument [all...] |
/kernel/linux/linux-6.6/drivers/net/wireless/ath/ath11k/ |
H A D | dbring.c | 39 struct ath11k_dbring *ring, in ath11k_dbring_bufs_replenish() 51 srng = &ab->hal.srng_list[ring->refill_srng.ring_id]; in ath11k_dbring_bufs_replenish() 58 ptr_aligned = PTR_ALIGN(ptr_unaligned, ring->buf_align); in ath11k_dbring_bufs_replenish() 59 ath11k_dbring_fill_magic_value(ar, ptr_aligned, ring->buf_sz); in ath11k_dbring_bufs_replenish() 60 paddr = dma_map_single(ab->dev, ptr_aligned, ring->buf_sz, in ath11k_dbring_bufs_replenish() 67 spin_lock_bh(&ring->idr_lock); in ath11k_dbring_bufs_replenish() 68 buf_id = idr_alloc(&ring->bufs_idr, buff, 0, ring->bufs_max, GFP_ATOMIC); in ath11k_dbring_bufs_replenish() 69 spin_unlock_bh(&ring->idr_lock); in ath11k_dbring_bufs_replenish() 94 spin_lock_bh(&ring in ath11k_dbring_bufs_replenish() 38 ath11k_dbring_bufs_replenish(struct ath11k *ar, struct ath11k_dbring *ring, struct ath11k_dbring_element *buff, enum wmi_direct_buffer_module id) ath11k_dbring_bufs_replenish() argument 105 ath11k_dbring_fill_bufs(struct ath11k *ar, struct ath11k_dbring *ring, enum wmi_direct_buffer_module id) ath11k_dbring_fill_bufs() argument 151 ath11k_dbring_wmi_cfg_setup(struct ath11k *ar, struct ath11k_dbring *ring, enum wmi_direct_buffer_module id) ath11k_dbring_wmi_cfg_setup() argument 183 ath11k_dbring_set_cfg(struct ath11k *ar, struct ath11k_dbring *ring, u32 num_resp_per_event, u32 event_timeout_ms, int (*handler)(struct ath11k *, struct ath11k_dbring_data *)) ath11k_dbring_set_cfg() argument 198 ath11k_dbring_buf_setup(struct ath11k *ar, struct ath11k_dbring *ring, struct ath11k_dbring_cap *db_cap) ath11k_dbring_buf_setup() argument 221 ath11k_dbring_srng_setup(struct ath11k *ar, struct ath11k_dbring *ring, int ring_num, int num_entries) ath11k_dbring_srng_setup() argument 268 struct ath11k_dbring *ring; ath11k_dbring_buffer_release_event() local 378 ath11k_dbring_srng_cleanup(struct ath11k *ar, struct ath11k_dbring *ring) ath11k_dbring_srng_cleanup() argument 383 ath11k_dbring_buf_cleanup(struct ath11k *ar, struct ath11k_dbring *ring) ath11k_dbring_buf_cleanup() argument [all...] |
/kernel/linux/linux-5.10/drivers/block/xen-blkback/ |
H A D | blkback.c | 73 * to fill the ring, but since this might become too high, specially with 109 * Maximum order of pages to be used for the shared ring between front and 114 MODULE_PARM_DESC(max_ring_page_order, "Maximum order of pages to be used for the shared ring"); 143 static int do_block_io_op(struct xen_blkif_ring *ring, unsigned int *eoi_flags); 144 static int dispatch_rw_block_io(struct xen_blkif_ring *ring, 147 static void make_response(struct xen_blkif_ring *ring, u64 id, 168 static int add_persistent_gnt(struct xen_blkif_ring *ring, in add_persistent_gnt() argument 173 struct xen_blkif *blkif = ring->blkif; in add_persistent_gnt() 175 if (ring->persistent_gnt_c >= max_pgrants) { in add_persistent_gnt() 181 new = &ring in add_persistent_gnt() 205 get_persistent_gnt(struct xen_blkif_ring *ring, grant_ref_t gref) get_persistent_gnt() argument 232 put_persistent_gnt(struct xen_blkif_ring *ring, struct persistent_gnt *persistent_gnt) put_persistent_gnt() argument 242 free_persistent_gnts(struct xen_blkif_ring *ring, struct rb_root *root, unsigned int num) free_persistent_gnts() argument 291 struct xen_blkif_ring *ring = container_of(work, typeof(*ring), persistent_purge_work); xen_blkbk_unmap_purged_grants() local 327 purge_persistent_gnt(struct xen_blkif_ring *ring) purge_persistent_gnt() argument 409 alloc_req(struct xen_blkif_ring *ring) alloc_req() argument 428 free_req(struct xen_blkif_ring *ring, struct pending_req *req) free_req() argument 518 blkif_notify_work(struct xen_blkif_ring *ring) blkif_notify_work() argument 534 print_stats(struct xen_blkif_ring *ring) print_stats() argument 551 struct xen_blkif_ring *ring = arg; xen_blkif_schedule() local 631 xen_blkbk_free_caches(struct xen_blkif_ring *ring) xen_blkbk_free_caches() argument 645 xen_blkbk_unmap_prepare( struct xen_blkif_ring *ring, struct grant_page **pages, unsigned int num, struct gnttab_unmap_grant_ref *unmap_ops, struct page **unmap_pages) xen_blkbk_unmap_prepare() argument 674 struct xen_blkif_ring *ring = pending_req->ring; xen_blkbk_unmap_and_respond_callback() local 706 struct xen_blkif_ring *ring = req->ring; xen_blkbk_unmap_and_respond() local 731 xen_blkbk_unmap(struct xen_blkif_ring *ring, struct grant_page *pages[], int num) xen_blkbk_unmap() argument 756 xen_blkbk_map(struct xen_blkif_ring *ring, struct grant_page *pages[], int num, bool ro) xen_blkbk_map() argument 919 struct xen_blkif_ring *ring = pending_req->ring; xen_blkbk_parse_indirect() local 966 dispatch_discard_io(struct xen_blkif_ring *ring, struct blkif_request *req) dispatch_discard_io() argument 1009 dispatch_other_io(struct xen_blkif_ring *ring, struct blkif_request *req, struct pending_req *pending_req) dispatch_other_io() argument 1019 xen_blk_drain_io(struct xen_blkif_ring *ring) xen_blk_drain_io() argument 1082 __do_block_io_op(struct xen_blkif_ring *ring, unsigned int *eoi_flags) __do_block_io_op() argument 1166 do_block_io_op(struct xen_blkif_ring *ring, unsigned int *eoi_flags) do_block_io_op() argument 1185 dispatch_rw_block_io(struct xen_blkif_ring *ring, struct blkif_request *req, struct pending_req *pending_req) dispatch_rw_block_io() argument 1401 make_response(struct xen_blkif_ring *ring, u64 id, unsigned short op, int st) make_response() argument [all...] |
/kernel/linux/linux-5.10/drivers/net/wireless/ath/ath11k/ |
H A D | dbring.c | 10 struct ath11k_dbring *ring, in ath11k_dbring_bufs_replenish() 21 srng = &ab->hal.srng_list[ring->refill_srng.ring_id]; in ath11k_dbring_bufs_replenish() 28 ptr_aligned = PTR_ALIGN(ptr_unaligned, ring->buf_align); in ath11k_dbring_bufs_replenish() 29 paddr = dma_map_single(ab->dev, ptr_aligned, ring->buf_sz, in ath11k_dbring_bufs_replenish() 36 spin_lock_bh(&ring->idr_lock); in ath11k_dbring_bufs_replenish() 37 buf_id = idr_alloc(&ring->bufs_idr, buff, 0, ring->bufs_max, GFP_ATOMIC); in ath11k_dbring_bufs_replenish() 38 spin_unlock_bh(&ring->idr_lock); in ath11k_dbring_bufs_replenish() 62 spin_lock_bh(&ring->idr_lock); in ath11k_dbring_bufs_replenish() 63 idr_remove(&ring in ath11k_dbring_bufs_replenish() 9 ath11k_dbring_bufs_replenish(struct ath11k *ar, struct ath11k_dbring *ring, struct ath11k_dbring_element *buff) ath11k_dbring_bufs_replenish() argument 73 ath11k_dbring_fill_bufs(struct ath11k *ar, struct ath11k_dbring *ring) ath11k_dbring_fill_bufs() argument 112 ath11k_dbring_wmi_cfg_setup(struct ath11k *ar, struct ath11k_dbring *ring, enum wmi_direct_buffer_module id) ath11k_dbring_wmi_cfg_setup() argument 144 ath11k_dbring_set_cfg(struct ath11k *ar, struct ath11k_dbring *ring, u32 num_resp_per_event, u32 event_timeout_ms, int (*handler)(struct ath11k *, struct ath11k_dbring_data *)) ath11k_dbring_set_cfg() argument 159 ath11k_dbring_buf_setup(struct ath11k *ar, struct ath11k_dbring *ring, struct ath11k_dbring_cap *db_cap) ath11k_dbring_buf_setup() argument 182 ath11k_dbring_srng_setup(struct ath11k *ar, struct ath11k_dbring *ring, int ring_num, int num_entries) ath11k_dbring_srng_setup() argument 229 struct ath11k_dbring *ring; ath11k_dbring_buffer_release_event() local 334 ath11k_dbring_srng_cleanup(struct ath11k *ar, struct ath11k_dbring *ring) ath11k_dbring_srng_cleanup() argument 339 ath11k_dbring_buf_cleanup(struct ath11k *ar, struct ath11k_dbring *ring) ath11k_dbring_buf_cleanup() argument [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gt/ |
H A D | intel_ring.h | 21 unsigned int intel_ring_update_space(struct intel_ring *ring); 23 void __intel_ring_pin(struct intel_ring *ring); 24 int intel_ring_pin(struct intel_ring *ring, struct i915_gem_ww_ctx *ww); 25 void intel_ring_unpin(struct intel_ring *ring); 26 void intel_ring_reset(struct intel_ring *ring, u32 tail); 30 static inline struct intel_ring *intel_ring_get(struct intel_ring *ring) in intel_ring_get() argument 32 kref_get(&ring->ref); in intel_ring_get() 33 return ring; in intel_ring_get() 36 static inline void intel_ring_put(struct intel_ring *ring) in intel_ring_put() argument 38 kref_put(&ring in intel_ring_put() 55 intel_ring_wrap(const struct intel_ring *ring, u32 pos) intel_ring_wrap() argument 60 intel_ring_direction(const struct intel_ring *ring, u32 next, u32 prev) intel_ring_direction() argument 69 intel_ring_offset_valid(const struct intel_ring *ring, unsigned int pos) intel_ring_offset_valid() argument 91 assert_ring_tail_valid(const struct intel_ring *ring, unsigned int tail) assert_ring_tail_valid() argument [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx4/ |
H A D | en_rx.c | 75 struct mlx4_en_rx_ring *ring, in mlx4_en_alloc_frags() 86 ring->rx_alloc_pages++; in mlx4_en_alloc_frags() 109 struct mlx4_en_rx_ring *ring, int index) in mlx4_en_init_rx_desc() 111 struct mlx4_en_rx_desc *rx_desc = ring->buf + ring->stride * index; in mlx4_en_init_rx_desc() 122 /* If the number of used fragments does not fill up the ring stride, in mlx4_en_init_rx_desc() 125 possible_frags = (ring->stride - sizeof(struct mlx4_en_rx_desc)) / DS_SIZE; in mlx4_en_init_rx_desc() 134 struct mlx4_en_rx_ring *ring, int index, in mlx4_en_prepare_rx_desc() 137 struct mlx4_en_rx_desc *rx_desc = ring->buf + in mlx4_en_prepare_rx_desc() 138 (index << ring in mlx4_en_prepare_rx_desc() 74 mlx4_en_alloc_frags(struct mlx4_en_priv *priv, struct mlx4_en_rx_ring *ring, struct mlx4_en_rx_desc *rx_desc, struct mlx4_en_rx_alloc *frags, gfp_t gfp) mlx4_en_alloc_frags() argument 108 mlx4_en_init_rx_desc(const struct mlx4_en_priv *priv, struct mlx4_en_rx_ring *ring, int index) mlx4_en_init_rx_desc() argument 133 mlx4_en_prepare_rx_desc(struct mlx4_en_priv *priv, struct mlx4_en_rx_ring *ring, int index, gfp_t gfp) mlx4_en_prepare_rx_desc() argument 157 mlx4_en_is_ring_empty(const struct mlx4_en_rx_ring *ring) mlx4_en_is_ring_empty() argument 162 mlx4_en_update_rx_prod_db(struct mlx4_en_rx_ring *ring) mlx4_en_update_rx_prod_db() argument 168 mlx4_en_free_rx_desc(const struct mlx4_en_priv *priv, struct mlx4_en_rx_ring *ring, int index) mlx4_en_free_rx_desc() argument 185 struct mlx4_en_rx_ring *ring; mlx4_en_fill_rx_buffers() local 226 mlx4_en_free_rx_buf(struct mlx4_en_priv *priv, struct mlx4_en_rx_ring *ring) mlx4_en_free_rx_buf() argument 268 struct mlx4_en_rx_ring *ring; mlx4_en_create_rx_ring() local 328 struct mlx4_en_rx_ring *ring; mlx4_en_activate_rx_rings() local 395 int ring; mlx4_en_recover_from_oom() local 415 mlx4_en_rx_recycle(struct mlx4_en_rx_ring *ring, struct mlx4_en_rx_alloc *frame) mlx4_en_rx_recycle() argument 434 struct mlx4_en_rx_ring *ring = *pring; mlx4_en_destroy_rx_ring() local 450 mlx4_en_deactivate_rx_ring(struct mlx4_en_priv *priv, struct mlx4_en_rx_ring *ring) mlx4_en_deactivate_rx_ring() argument 547 mlx4_en_refill_rx_buffers(struct mlx4_en_priv *priv, struct mlx4_en_rx_ring *ring) mlx4_en_refill_rx_buffers() argument 668 struct mlx4_en_rx_ring *ring; global() member 718 struct mlx4_en_rx_ring *ring; mlx4_en_process_rx_cq() local 1114 mlx4_en_config_rss_qp(struct mlx4_en_priv *priv, int qpn, struct mlx4_en_rx_ring *ring, enum mlx4_qp_state *state, struct mlx4_qp *qp) mlx4_en_config_rss_qp() argument [all...] |
/kernel/linux/linux-6.6/drivers/net/wireless/ath/ath12k/ |
H A D | dbring.c | 11 struct ath12k_dbring *ring, in ath12k_dbring_bufs_replenish() 23 srng = &ab->hal.srng_list[ring->refill_srng.ring_id]; in ath12k_dbring_bufs_replenish() 30 ptr_aligned = PTR_ALIGN(ptr_unaligned, ring->buf_align); in ath12k_dbring_bufs_replenish() 31 paddr = dma_map_single(ab->dev, ptr_aligned, ring->buf_sz, in ath12k_dbring_bufs_replenish() 38 spin_lock_bh(&ring->idr_lock); in ath12k_dbring_bufs_replenish() 39 buf_id = idr_alloc(&ring->bufs_idr, buff, 0, ring->bufs_max, gfp); in ath12k_dbring_bufs_replenish() 40 spin_unlock_bh(&ring->idr_lock); in ath12k_dbring_bufs_replenish() 64 spin_lock_bh(&ring->idr_lock); in ath12k_dbring_bufs_replenish() 65 idr_remove(&ring in ath12k_dbring_bufs_replenish() 10 ath12k_dbring_bufs_replenish(struct ath12k *ar, struct ath12k_dbring *ring, struct ath12k_dbring_element *buff, gfp_t gfp) ath12k_dbring_bufs_replenish() argument 75 ath12k_dbring_fill_bufs(struct ath12k *ar, struct ath12k_dbring *ring, gfp_t gfp) ath12k_dbring_fill_bufs() argument 116 ath12k_dbring_wmi_cfg_setup(struct ath12k *ar, struct ath12k_dbring *ring, enum wmi_direct_buffer_module id) ath12k_dbring_wmi_cfg_setup() argument 148 ath12k_dbring_set_cfg(struct ath12k *ar, struct ath12k_dbring *ring, u32 num_resp_per_event, u32 event_timeout_ms, int (*handler)(struct ath12k *, struct ath12k_dbring_data *)) ath12k_dbring_set_cfg() argument 163 ath12k_dbring_buf_setup(struct ath12k *ar, struct ath12k_dbring *ring, struct ath12k_dbring_cap *db_cap) ath12k_dbring_buf_setup() argument 186 ath12k_dbring_srng_setup(struct ath12k *ar, struct ath12k_dbring *ring, int ring_num, int num_entries) ath12k_dbring_srng_setup() argument 233 struct ath12k_dbring *ring = NULL; ath12k_dbring_buffer_release_event() local 337 ath12k_dbring_srng_cleanup(struct ath12k *ar, struct ath12k_dbring *ring) ath12k_dbring_srng_cleanup() argument 342 ath12k_dbring_buf_cleanup(struct ath12k *ar, struct ath12k_dbring *ring) ath12k_dbring_buf_cleanup() argument [all...] |
/kernel/linux/linux-5.10/tools/virtio/ringtest/ |
H A D | virtio_ring_0_9.c | 22 struct vring ring; variable 24 /* enabling the below activates experimental ring polling code 26 * high bits of ring id ^ 0x8000). 30 * (which skips ring updates and reads and writes len in descriptor). 63 /* implemented by ring */ 72 perror("Unable to allocate ring buffer.\n"); in alloc_ring() 76 vring_init(&ring, ring_size, p, 0x1000); in alloc_ring() 86 ring.desc[i].next = i + 1; in alloc_ring() 117 desc = ring.desc; in add_inbuf() 136 ring in add_inbuf() [all...] |
/kernel/linux/linux-6.6/tools/virtio/ringtest/ |
H A D | virtio_ring_0_9.c | 22 struct vring ring; variable 24 /* enabling the below activates experimental ring polling code 26 * high bits of ring id ^ 0x8000). 30 * (which skips ring updates and reads and writes len in descriptor). 63 /* implemented by ring */ 72 perror("Unable to allocate ring buffer.\n"); in alloc_ring() 76 vring_init(&ring, ring_size, p, 0x1000); in alloc_ring() 86 ring.desc[i].next = i + 1; in alloc_ring() 117 desc = ring.desc; in add_inbuf() 136 ring in add_inbuf() [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/hisilicon/hns/ |
H A D | hnae.c | 36 static int hnae_alloc_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb) in hnae_alloc_buffer() argument 38 unsigned int order = hnae_page_order(ring); in hnae_alloc_buffer() 48 cb->length = hnae_page_size(ring); in hnae_alloc_buffer() 54 static void hnae_free_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb) in hnae_free_buffer() argument 61 else if (unlikely(is_rx_ring(ring))) in hnae_free_buffer() 67 static int hnae_map_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb) in hnae_map_buffer() argument 69 cb->dma = dma_map_page(ring_to_dev(ring), cb->priv, 0, in hnae_map_buffer() 70 cb->length, ring_to_dma_dir(ring)); in hnae_map_buffer() 72 if (dma_mapping_error(ring_to_dev(ring), cb->dma)) in hnae_map_buffer() 78 static void hnae_unmap_buffer(struct hnae_ring *ring, struc argument 119 hnae_free_buffers(struct hnae_ring *ring) hnae_free_buffers() argument 128 hnae_alloc_buffers(struct hnae_ring *ring) hnae_alloc_buffers() argument 147 hnae_free_desc(struct hnae_ring *ring) hnae_free_desc() argument 158 hnae_alloc_desc(struct hnae_ring *ring) hnae_alloc_desc() argument 179 hnae_fini_ring(struct hnae_ring *ring) hnae_fini_ring() argument 193 hnae_init_ring(struct hnae_queue *q, struct hnae_ring *ring, int flags) hnae_init_ring() argument [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/hisilicon/hns/ |
H A D | hnae.c | 36 static int hnae_alloc_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb) in hnae_alloc_buffer() argument 38 unsigned int order = hnae_page_order(ring); in hnae_alloc_buffer() 48 cb->length = hnae_page_size(ring); in hnae_alloc_buffer() 54 static void hnae_free_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb) in hnae_free_buffer() argument 61 else if (unlikely(is_rx_ring(ring))) in hnae_free_buffer() 67 static int hnae_map_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb) in hnae_map_buffer() argument 69 cb->dma = dma_map_page(ring_to_dev(ring), cb->priv, 0, in hnae_map_buffer() 70 cb->length, ring_to_dma_dir(ring)); in hnae_map_buffer() 72 if (dma_mapping_error(ring_to_dev(ring), cb->dma)) in hnae_map_buffer() 78 static void hnae_unmap_buffer(struct hnae_ring *ring, struc argument 119 hnae_free_buffers(struct hnae_ring *ring) hnae_free_buffers() argument 128 hnae_alloc_buffers(struct hnae_ring *ring) hnae_alloc_buffers() argument 147 hnae_free_desc(struct hnae_ring *ring) hnae_free_desc() argument 158 hnae_alloc_desc(struct hnae_ring *ring) hnae_alloc_desc() argument 179 hnae_fini_ring(struct hnae_ring *ring) hnae_fini_ring() argument 193 hnae_init_ring(struct hnae_queue *q, struct hnae_ring *ring, int flags) hnae_init_ring() argument [all...] |