Home
last modified time | relevance | path

Searched refs:ring (Results 201 - 225 of 1680) sorted by relevance

12345678910>>...68

/third_party/libdrm/freedreno/
H A Dfreedreno_ringbuffer.h101 struct fd_ringbuffer *fd_ringbuffer_ref(struct fd_ringbuffer *ring);
102 void fd_ringbuffer_del(struct fd_ringbuffer *ring);
103 void fd_ringbuffer_set_parent(struct fd_ringbuffer *ring,
106 void fd_ringbuffer_reset(struct fd_ringbuffer *ring);
107 int fd_ringbuffer_flush(struct fd_ringbuffer *ring);
111 int fd_ringbuffer_flush2(struct fd_ringbuffer *ring, int in_fence_fd,
113 void fd_ringbuffer_grow(struct fd_ringbuffer *ring, uint32_t ndwords);
114 uint32_t fd_ringbuffer_timestamp(struct fd_ringbuffer *ring);
116 static inline void fd_ringbuffer_emit(struct fd_ringbuffer *ring, in fd_ringbuffer_emit() argument
119 (*ring in fd_ringbuffer_emit()
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/qxl/
H A Dqxl_cmd.c26 /* QXL cmd/ring handling */
37 struct ring { struct
43 struct ring *ring; member
51 void qxl_ring_free(struct qxl_ring *ring) in qxl_ring_free() argument
53 kfree(ring); in qxl_ring_free()
63 struct qxl_ring *ring; in qxl_ring_create() local
65 ring = kmalloc(sizeof(*ring), GFP_KERNEL); in qxl_ring_create()
66 if (!ring) in qxl_ring_create()
78 qxl_check_header(struct qxl_ring *ring) qxl_check_header() argument
92 qxl_check_idle(struct qxl_ring *ring) qxl_check_idle() argument
104 qxl_ring_push(struct qxl_ring *ring, const void *new_elt, bool interruptible) qxl_ring_push() argument
151 qxl_ring_pop(struct qxl_ring *ring, void *element) qxl_ring_pop() argument
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/
H A Dcik_sdma.c86 * and each one supports 1 ring buffer used for gfx
90 * (ring buffer, IBs, etc.), but sDMA has it's own
155 * @ring: amdgpu ring pointer
159 static uint64_t cik_sdma_ring_get_rptr(struct amdgpu_ring *ring) in cik_sdma_ring_get_rptr() argument
163 rptr = *ring->rptr_cpu_addr; in cik_sdma_ring_get_rptr()
171 * @ring: amdgpu ring pointer
175 static uint64_t cik_sdma_ring_get_wptr(struct amdgpu_ring *ring) in cik_sdma_ring_get_wptr() argument
177 struct amdgpu_device *adev = ring in cik_sdma_ring_get_wptr()
189 cik_sdma_ring_set_wptr(struct amdgpu_ring *ring) cik_sdma_ring_set_wptr() argument
197 cik_sdma_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) cik_sdma_ring_insert_nop() argument
220 cik_sdma_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) cik_sdma_ring_emit_ib() argument
245 cik_sdma_ring_emit_hdp_flush(struct amdgpu_ring *ring) cik_sdma_ring_emit_hdp_flush() argument
276 cik_sdma_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) cik_sdma_ring_emit_fence() argument
428 struct amdgpu_ring *ring; cik_sdma_gfx_resume() local
603 cik_sdma_ring_test_ring(struct amdgpu_ring *ring) cik_sdma_ring_test_ring() argument
655 cik_sdma_ring_test_ib(struct amdgpu_ring *ring, long timeout) cik_sdma_ring_test_ib() argument
801 cik_sdma_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib) cik_sdma_ring_pad_ib() argument
825 cik_sdma_ring_emit_pipeline_sync(struct amdgpu_ring *ring) cik_sdma_ring_emit_pipeline_sync() argument
852 cik_sdma_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned vmid, uint64_t pd_addr) cik_sdma_ring_emit_vm_flush() argument
868 cik_sdma_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) cik_sdma_ring_emit_wreg() argument
941 struct amdgpu_ring *ring; cik_sdma_sw_init() local
[all...]
H A Dvce_v4_0.c58 * @ring: amdgpu_ring pointer
62 static uint64_t vce_v4_0_ring_get_rptr(struct amdgpu_ring *ring) in vce_v4_0_ring_get_rptr() argument
64 struct amdgpu_device *adev = ring->adev; in vce_v4_0_ring_get_rptr()
66 if (ring->me == 0) in vce_v4_0_ring_get_rptr()
68 else if (ring->me == 1) in vce_v4_0_ring_get_rptr()
77 * @ring: amdgpu_ring pointer
81 static uint64_t vce_v4_0_ring_get_wptr(struct amdgpu_ring *ring) in vce_v4_0_ring_get_wptr() argument
83 struct amdgpu_device *adev = ring->adev; in vce_v4_0_ring_get_wptr()
85 if (ring->use_doorbell) in vce_v4_0_ring_get_wptr()
86 return *ring in vce_v4_0_ring_get_wptr()
103 vce_v4_0_ring_set_wptr(struct amdgpu_ring *ring) vce_v4_0_ring_set_wptr() argument
207 struct amdgpu_ring *ring; vce_v4_0_sriov_start() local
338 struct amdgpu_ring *ring; vce_v4_0_start() local
428 struct amdgpu_ring *ring; vce_v4_0_sw_init() local
984 vce_v4_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) vce_v4_0_ring_emit_ib() argument
996 vce_v4_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) vce_v4_0_ring_emit_fence() argument
1008 vce_v4_0_ring_insert_end(struct amdgpu_ring *ring) vce_v4_0_ring_insert_end() argument
1013 vce_v4_0_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) vce_v4_0_emit_reg_wait() argument
1022 vce_v4_0_emit_vm_flush(struct amdgpu_ring *ring, unsigned int vmid, uint64_t pd_addr) vce_v4_0_emit_vm_flush() argument
1035 vce_v4_0_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) vce_v4_0_emit_wreg() argument
[all...]
H A Dvce_v3_0.c73 * @ring: amdgpu_ring pointer
77 static uint64_t vce_v3_0_ring_get_rptr(struct amdgpu_ring *ring) in vce_v3_0_ring_get_rptr() argument
79 struct amdgpu_device *adev = ring->adev; in vce_v3_0_ring_get_rptr()
89 if (ring->me == 0) in vce_v3_0_ring_get_rptr()
91 else if (ring->me == 1) in vce_v3_0_ring_get_rptr()
105 * @ring: amdgpu_ring pointer
109 static uint64_t vce_v3_0_ring_get_wptr(struct amdgpu_ring *ring) in vce_v3_0_ring_get_wptr() argument
111 struct amdgpu_device *adev = ring->adev; in vce_v3_0_ring_get_wptr()
121 if (ring->me == 0) in vce_v3_0_ring_get_wptr()
123 else if (ring in vce_v3_0_ring_get_wptr()
141 vce_v3_0_ring_set_wptr(struct amdgpu_ring *ring) vce_v3_0_ring_set_wptr() argument
267 struct amdgpu_ring *ring; vce_v3_0_start() local
421 struct amdgpu_ring *ring; vce_v3_0_sw_init() local
862 vce_v3_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) vce_v3_0_ring_emit_ib() argument
876 vce_v3_0_emit_vm_flush(struct amdgpu_ring *ring, unsigned int vmid, uint64_t pd_addr) vce_v3_0_emit_vm_flush() argument
888 vce_v3_0_emit_pipeline_sync(struct amdgpu_ring *ring) vce_v3_0_emit_pipeline_sync() argument
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/hisilicon/hns3/
H A Dhns3_trace.h69 TP_PROTO(struct hns3_enet_ring *ring, int cur_ntu),
70 TP_ARGS(ring, cur_ntu),
78 __string(devname, ring->tqp->handle->kinfo.netdev->name)
82 __entry->index = ring->tqp->tqp_index;
83 __entry->ntu = ring->next_to_use;
84 __entry->ntc = ring->next_to_clean;
85 __entry->desc_dma = ring->desc_dma_addr,
86 memcpy(__entry->desc, &ring->desc[cur_ntu],
88 __assign_str(devname, ring->tqp->handle->kinfo.netdev->name);
100 TP_PROTO(struct hns3_enet_ring *ring),
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/hisilicon/hns3/
H A Dhns3_trace.h68 TP_PROTO(struct hns3_enet_ring *ring, int cur_ntu),
69 TP_ARGS(ring, cur_ntu),
77 __string(devname, ring->tqp->handle->kinfo.netdev->name)
81 __entry->index = ring->tqp->tqp_index;
82 __entry->ntu = ring->next_to_use;
83 __entry->ntc = ring->next_to_clean;
84 __entry->desc_dma = ring->desc_dma_addr,
85 memcpy(__entry->desc, &ring->desc[cur_ntu],
87 __assign_str(devname, ring->tqp->handle->kinfo.netdev->name);
99 TP_PROTO(struct hns3_enet_ring *ring),
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/apm/xgene/
H A Dxgene_enet_main.c390 static __le64 *xgene_enet_get_exp_bufs(struct xgene_enet_desc_ring *ring) in xgene_enet_get_exp_bufs() argument
394 exp_bufs = &ring->exp_bufs[ring->exp_buf_tail * MAX_EXP_BUFFS]; in xgene_enet_get_exp_bufs()
396 ring->exp_buf_tail = (ring->exp_buf_tail + 1) & ((ring->slots / 2) - 1); in xgene_enet_get_exp_bufs()
401 static dma_addr_t *xgene_get_frag_dma_array(struct xgene_enet_desc_ring *ring) in xgene_get_frag_dma_array() argument
403 return &ring->cp_ring->frag_dma_addr[ring->tail * MAX_SKB_FRAGS]; in xgene_get_frag_dma_array()
783 static int xgene_enet_process_ring(struct xgene_enet_desc_ring *ring, in xgene_enet_process_ring() argument
850 struct xgene_enet_desc_ring *ring; xgene_enet_napi() local
882 struct xgene_enet_desc_ring *ring; xgene_enet_set_irq_name() local
907 struct xgene_enet_desc_ring *ring; xgene_enet_register_irq() local
939 struct xgene_enet_desc_ring *ring; xgene_enet_free_irq() local
1048 xgene_enet_delete_ring(struct xgene_enet_desc_ring *ring) xgene_enet_delete_ring() argument
1063 struct xgene_enet_desc_ring *ring; xgene_enet_delete_desc_rings() local
1129 xgene_enet_free_desc_ring(struct xgene_enet_desc_ring *ring) xgene_enet_free_desc_ring() argument
1151 struct xgene_enet_desc_ring *ring; xgene_enet_free_desc_rings() local
1195 is_irq_mbox_required(struct xgene_enet_pdata *pdata, struct xgene_enet_desc_ring *ring) is_irq_mbox_required() argument
1206 xgene_enet_ring_cmd_base(struct xgene_enet_pdata *pdata, struct xgene_enet_desc_ring *ring) xgene_enet_ring_cmd_base() argument
1220 struct xgene_enet_desc_ring *ring; xgene_enet_create_desc_ring() local
1477 struct xgene_enet_desc_ring *ring; xgene_enet_get_stats64() local
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/apm/xgene/
H A Dxgene_enet_main.c390 static __le64 *xgene_enet_get_exp_bufs(struct xgene_enet_desc_ring *ring) in xgene_enet_get_exp_bufs() argument
394 exp_bufs = &ring->exp_bufs[ring->exp_buf_tail * MAX_EXP_BUFFS]; in xgene_enet_get_exp_bufs()
396 ring->exp_buf_tail = (ring->exp_buf_tail + 1) & ((ring->slots / 2) - 1); in xgene_enet_get_exp_bufs()
401 static dma_addr_t *xgene_get_frag_dma_array(struct xgene_enet_desc_ring *ring) in xgene_get_frag_dma_array() argument
403 return &ring->cp_ring->frag_dma_addr[ring->tail * MAX_SKB_FRAGS]; in xgene_get_frag_dma_array()
783 static int xgene_enet_process_ring(struct xgene_enet_desc_ring *ring, in xgene_enet_process_ring() argument
850 struct xgene_enet_desc_ring *ring; xgene_enet_napi() local
882 struct xgene_enet_desc_ring *ring; xgene_enet_set_irq_name() local
907 struct xgene_enet_desc_ring *ring; xgene_enet_register_irq() local
939 struct xgene_enet_desc_ring *ring; xgene_enet_free_irq() local
1048 xgene_enet_delete_ring(struct xgene_enet_desc_ring *ring) xgene_enet_delete_ring() argument
1063 struct xgene_enet_desc_ring *ring; xgene_enet_delete_desc_rings() local
1129 xgene_enet_free_desc_ring(struct xgene_enet_desc_ring *ring) xgene_enet_free_desc_ring() argument
1151 struct xgene_enet_desc_ring *ring; xgene_enet_free_desc_rings() local
1195 is_irq_mbox_required(struct xgene_enet_pdata *pdata, struct xgene_enet_desc_ring *ring) is_irq_mbox_required() argument
1206 xgene_enet_ring_cmd_base(struct xgene_enet_pdata *pdata, struct xgene_enet_desc_ring *ring) xgene_enet_ring_cmd_base() argument
1220 struct xgene_enet_desc_ring *ring; xgene_enet_create_desc_ring() local
1477 struct xgene_enet_desc_ring *ring; xgene_enet_get_stats64() local
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/actions/
H A Dowl-emac.c140 static unsigned int owl_emac_ring_num_unused(struct owl_emac_ring *ring) in owl_emac_ring_num_unused() argument
142 return CIRC_SPACE(ring->head, ring->tail, ring->size); in owl_emac_ring_num_unused()
145 static unsigned int owl_emac_ring_get_next(struct owl_emac_ring *ring, in owl_emac_ring_get_next() argument
148 return (cur + 1) & (ring->size - 1); in owl_emac_ring_get_next()
151 static void owl_emac_ring_push_head(struct owl_emac_ring *ring) in owl_emac_ring_push_head() argument
153 ring->head = owl_emac_ring_get_next(ring, ring in owl_emac_ring_push_head()
156 owl_emac_ring_pop_tail(struct owl_emac_ring *ring) owl_emac_ring_pop_tail() argument
181 struct owl_emac_ring *ring = &priv->rx_ring; owl_emac_ring_prepare_rx() local
220 struct owl_emac_ring *ring = &priv->tx_ring; owl_emac_ring_prepare_tx() local
243 struct owl_emac_ring *ring = &priv->rx_ring; owl_emac_ring_unprepare_rx() local
262 struct owl_emac_ring *ring = &priv->tx_ring; owl_emac_ring_unprepare_tx() local
279 owl_emac_ring_alloc(struct device *dev, struct owl_emac_ring *ring, unsigned int size) owl_emac_ring_alloc() argument
492 struct owl_emac_ring *ring = &priv->tx_ring; owl_emac_setup_frame_xmit() local
570 struct owl_emac_ring *ring = &priv->tx_ring; owl_emac_ndo_start_xmit() local
633 struct owl_emac_ring *ring = &priv->tx_ring; owl_emac_tx_complete_tail() local
697 struct owl_emac_ring *ring = &priv->tx_ring; owl_emac_tx_complete() local
748 struct owl_emac_ring *ring = &priv->rx_ring; owl_emac_rx_process() local
[all...]
/third_party/mesa3d/src/gallium/drivers/freedreno/ir3/
H A Dir3_const.h40 static bool is_stateobj(struct fd_ringbuffer *ring);
42 static void emit_const_user(struct fd_ringbuffer *ring,
46 static void emit_const_bo(struct fd_ringbuffer *ring,
51 emit_const_prsc(struct fd_ringbuffer *ring, const struct ir3_shader_variant *v, in emit_const_prsc() argument
56 emit_const_bo(ring, v, regid, offset, size, rsc->bo); in emit_const_prsc()
59 static void emit_const_ptrs(struct fd_ringbuffer *ring,
65 emit_const_asserts(struct fd_ringbuffer *ring, in emit_const_asserts() argument
75 ring_wfi(struct fd_batch *batch, struct fd_ringbuffer *ring) assert_dt
77 /* when we emit const state via ring (IB2) we need a WFI, but when
80 if (is_stateobj(ring))
114 ir3_emit_constant_data(struct fd_screen *screen, const struct ir3_shader_variant *v, struct fd_ringbuffer *ring) ir3_emit_constant_data() argument
154 ir3_emit_user_consts(struct fd_screen *screen, const struct ir3_shader_variant *v, struct fd_ringbuffer *ring, struct fd_constbuf_stateobj *constbuf) ir3_emit_user_consts() argument
204 ir3_emit_ubos(struct fd_context *ctx, const struct ir3_shader_variant *v, struct fd_ringbuffer *ring, struct fd_constbuf_stateobj *constbuf) ir3_emit_ubos() argument
257 ir3_emit_image_dims(struct fd_screen *screen, const struct ir3_shader_variant *v, struct fd_ringbuffer *ring, struct fd_shaderimg_stateobj *si) ir3_emit_image_dims() argument
311 ir3_emit_immediates(struct fd_screen *screen, const struct ir3_shader_variant *v, struct fd_ringbuffer *ring) ir3_emit_immediates() argument
338 ir3_emit_link_map(struct fd_screen *screen, const struct ir3_shader_variant *producer, const struct ir3_shader_variant *v, struct fd_ringbuffer *ring) ir3_emit_link_map() argument
362 emit_tfbos(struct fd_context *ctx, const struct ir3_shader_variant *v, struct fd_ringbuffer *ring) emit_tfbos() argument
[all...]
/kernel/linux/linux-6.6/drivers/usb/mtu3/
H A Dmtu3_qmu.c119 static struct qmu_gpd *gpd_dma_to_virt(struct mtu3_gpd_ring *ring, in gpd_dma_to_virt() argument
122 dma_addr_t dma_base = ring->dma; in gpd_dma_to_virt()
123 struct qmu_gpd *gpd_head = ring->start; in gpd_dma_to_virt()
132 static dma_addr_t gpd_virt_to_dma(struct mtu3_gpd_ring *ring, in gpd_virt_to_dma() argument
135 dma_addr_t dma_base = ring->dma; in gpd_virt_to_dma()
136 struct qmu_gpd *gpd_head = ring->start; in gpd_virt_to_dma()
146 static void gpd_ring_init(struct mtu3_gpd_ring *ring, struct qmu_gpd *gpd) in gpd_ring_init() argument
148 ring->start = gpd; in gpd_ring_init()
149 ring->enqueue = gpd; in gpd_ring_init()
150 ring in gpd_ring_init()
156 struct mtu3_gpd_ring *ring = &mep->gpd_ring; reset_gpd_list() local
168 struct mtu3_gpd_ring *ring = &mep->gpd_ring; mtu3_gpd_ring_alloc() local
182 struct mtu3_gpd_ring *ring = &mep->gpd_ring; mtu3_gpd_ring_free() local
203 advance_enq_gpd(struct mtu3_gpd_ring *ring) advance_enq_gpd() argument
214 advance_deq_gpd(struct mtu3_gpd_ring *ring) advance_deq_gpd() argument
225 gpd_ring_empty(struct mtu3_gpd_ring *ring) gpd_ring_empty() argument
247 struct mtu3_gpd_ring *ring = &mep->gpd_ring; mtu3_prepare_tx_gpd() local
290 struct mtu3_gpd_ring *ring = &mep->gpd_ring; mtu3_prepare_rx_gpd() local
335 struct mtu3_gpd_ring *ring = &mep->gpd_ring; mtu3_qmu_start() local
428 struct mtu3_gpd_ring *ring = &mep->gpd_ring; qmu_tx_zlp_error_handler() local
477 struct mtu3_gpd_ring *ring = &mep->gpd_ring; qmu_error_rx() local
511 struct mtu3_gpd_ring *ring = &mep->gpd_ring; qmu_done_tx() local
551 struct mtu3_gpd_ring *ring = &mep->gpd_ring; qmu_done_rx() local
[all...]
/third_party/ffmpeg/libavformat/
H A Dasync.c72 RingBuffer ring; member
83 static int ring_init(RingBuffer *ring, unsigned int capacity, int read_back_capacity) in ring_init() argument
85 memset(ring, 0, sizeof(RingBuffer)); in ring_init()
86 ring->fifo = av_fifo_alloc2(capacity + read_back_capacity, 1, 0); in ring_init()
87 if (!ring->fifo) in ring_init()
90 ring->read_back_capacity = read_back_capacity; in ring_init()
94 static void ring_destroy(RingBuffer *ring) in ring_destroy() argument
96 av_fifo_freep2(&ring->fifo); in ring_destroy()
99 static void ring_reset(RingBuffer *ring) in ring_reset() argument
101 av_fifo_reset2(ring in ring_reset()
105 ring_size(RingBuffer *ring) ring_size() argument
110 ring_space(RingBuffer *ring) ring_space() argument
115 ring_read(RingBuffer *ring, void *dest, int buf_size) ring_read() argument
145 ring_write(RingBuffer *ring, URLContext *h, size_t size) ring_write() argument
157 ring_size_of_read_back(RingBuffer *ring) ring_size_of_read_back() argument
162 ring_drain(RingBuffer *ring, int offset) ring_drain() argument
188 RingBuffer *ring = &c->ring; async_buffer_task() local
341 RingBuffer *ring = &c->ring; async_read_internal() local
393 RingBuffer *ring = &c->ring; async_seek() local
[all...]
/kernel/linux/linux-5.10/tools/io_uring/
H A Dio_uring-cp.c34 static int setup_context(unsigned entries, struct io_uring *ring) in setup_context() argument
38 ret = io_uring_queue_init(entries, ring, 0); in setup_context()
69 static void queue_prepped(struct io_uring *ring, struct io_data *data) in queue_prepped() argument
73 sqe = io_uring_get_sqe(ring); in queue_prepped()
84 static int queue_read(struct io_uring *ring, off_t size, off_t offset) in queue_read() argument
93 sqe = io_uring_get_sqe(ring); in queue_read()
111 static void queue_write(struct io_uring *ring, struct io_data *data) in queue_write() argument
119 queue_prepped(ring, data); in queue_write()
120 io_uring_submit(ring); in queue_write()
123 static int copy_file(struct io_uring *ring, off_ argument
229 struct io_uring ring; main() local
[all...]
/kernel/linux/linux-6.6/net/xdp/
H A Dxsk_queue.h2 /* XDP user-space ring structure
45 struct xdp_ring *ring; member
59 * completion ring, the kernel is the producer and user space is the
95 * now and again after circling through the ring.
102 * RESERVE entries PEEK in the ring for entries
103 * WRITE data into the ring READ data from the ring
106 * The producer reserves one or more entries in the ring. It can then
110 * The consumer peeks into the ring to see if the producer has written
122 struct xdp_umem_ring *ring in __xskq_cons_read_addr_unchecked() local
210 struct xdp_rxtx_ring *ring = (struct xdp_rxtx_ring *)q->ring; xskq_cons_read_desc() local
245 struct xdp_rxtx_ring *ring = (struct xdp_rxtx_ring *)q->ring; xskq_cons_read_desc_batch() local
385 struct xdp_umem_ring *ring = (struct xdp_umem_ring *)q->ring; xskq_prod_reserve_addr() local
398 struct xdp_umem_ring *ring = (struct xdp_umem_ring *)q->ring; xskq_prod_write_addr_batch() local
411 struct xdp_rxtx_ring *ring = (struct xdp_rxtx_ring *)q->ring; xskq_prod_reserve_desc() local
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/
H A Dvce_v3_0.c73 * @ring: amdgpu_ring pointer
77 static uint64_t vce_v3_0_ring_get_rptr(struct amdgpu_ring *ring) in vce_v3_0_ring_get_rptr() argument
79 struct amdgpu_device *adev = ring->adev; in vce_v3_0_ring_get_rptr()
89 if (ring->me == 0) in vce_v3_0_ring_get_rptr()
91 else if (ring->me == 1) in vce_v3_0_ring_get_rptr()
105 * @ring: amdgpu_ring pointer
109 static uint64_t vce_v3_0_ring_get_wptr(struct amdgpu_ring *ring) in vce_v3_0_ring_get_wptr() argument
111 struct amdgpu_device *adev = ring->adev; in vce_v3_0_ring_get_wptr()
121 if (ring->me == 0) in vce_v3_0_ring_get_wptr()
123 else if (ring in vce_v3_0_ring_get_wptr()
141 vce_v3_0_ring_set_wptr(struct amdgpu_ring *ring) vce_v3_0_ring_set_wptr() argument
267 struct amdgpu_ring *ring; vce_v3_0_start() local
421 struct amdgpu_ring *ring; vce_v3_0_sw_init() local
835 vce_v3_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) vce_v3_0_ring_emit_ib() argument
849 vce_v3_0_emit_vm_flush(struct amdgpu_ring *ring, unsigned int vmid, uint64_t pd_addr) vce_v3_0_emit_vm_flush() argument
861 vce_v3_0_emit_pipeline_sync(struct amdgpu_ring *ring) vce_v3_0_emit_pipeline_sync() argument
[all...]
H A Dvce_v4_0.c57 * @ring: amdgpu_ring pointer
61 static uint64_t vce_v4_0_ring_get_rptr(struct amdgpu_ring *ring) in vce_v4_0_ring_get_rptr() argument
63 struct amdgpu_device *adev = ring->adev; in vce_v4_0_ring_get_rptr()
65 if (ring->me == 0) in vce_v4_0_ring_get_rptr()
67 else if (ring->me == 1) in vce_v4_0_ring_get_rptr()
76 * @ring: amdgpu_ring pointer
80 static uint64_t vce_v4_0_ring_get_wptr(struct amdgpu_ring *ring) in vce_v4_0_ring_get_wptr() argument
82 struct amdgpu_device *adev = ring->adev; in vce_v4_0_ring_get_wptr()
84 if (ring->use_doorbell) in vce_v4_0_ring_get_wptr()
85 return adev->wb.wb[ring in vce_v4_0_ring_get_wptr()
102 vce_v4_0_ring_set_wptr(struct amdgpu_ring *ring) vce_v4_0_ring_set_wptr() argument
206 struct amdgpu_ring *ring; vce_v4_0_sriov_start() local
337 struct amdgpu_ring *ring; vce_v4_0_start() local
427 struct amdgpu_ring *ring; vce_v4_0_sw_init() local
948 vce_v4_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, struct amdgpu_ib *ib, uint32_t flags) vce_v4_0_ring_emit_ib() argument
960 vce_v4_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq, unsigned flags) vce_v4_0_ring_emit_fence() argument
972 vce_v4_0_ring_insert_end(struct amdgpu_ring *ring) vce_v4_0_ring_insert_end() argument
977 vce_v4_0_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg, uint32_t val, uint32_t mask) vce_v4_0_emit_reg_wait() argument
986 vce_v4_0_emit_vm_flush(struct amdgpu_ring *ring, unsigned int vmid, uint64_t pd_addr) vce_v4_0_emit_vm_flush() argument
999 vce_v4_0_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val) vce_v4_0_emit_wreg() argument
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/broadcom/
H A Dbgmac.c41 static void bgmac_dma_tx_reset(struct bgmac *bgmac, struct bgmac_dma_ring *ring) in bgmac_dma_tx_reset() argument
46 if (!ring->mmio_base) in bgmac_dma_tx_reset()
49 /* Suspend DMA TX ring first. in bgmac_dma_tx_reset()
53 bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_TX_CTL, in bgmac_dma_tx_reset()
56 val = bgmac_read(bgmac, ring->mmio_base + BGMAC_DMA_TX_STATUS); in bgmac_dma_tx_reset()
67 dev_err(bgmac->dev, "Timeout suspending DMA TX ring 0x%X (BGMAC_DMA_TX_STAT: 0x%08X)\n", in bgmac_dma_tx_reset()
68 ring->mmio_base, val); in bgmac_dma_tx_reset()
71 bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_TX_CTL, 0); in bgmac_dma_tx_reset()
73 ring->mmio_base + BGMAC_DMA_TX_STATUS, in bgmac_dma_tx_reset()
76 dev_warn(bgmac->dev, "DMA TX ring in bgmac_dma_tx_reset()
86 bgmac_dma_tx_enable(struct bgmac *bgmac, struct bgmac_dma_ring *ring) bgmac_dma_tx_enable() argument
111 bgmac_dma_tx_add_buf(struct bgmac *bgmac, struct bgmac_dma_ring *ring, int i, int len, u32 ctl0) bgmac_dma_tx_add_buf() argument
131 bgmac_dma_tx_add(struct bgmac *bgmac, struct bgmac_dma_ring *ring, struct sk_buff *skb) bgmac_dma_tx_add() argument
235 bgmac_dma_tx_free(struct bgmac *bgmac, struct bgmac_dma_ring *ring) bgmac_dma_tx_free() argument
292 bgmac_dma_rx_reset(struct bgmac *bgmac, struct bgmac_dma_ring *ring) bgmac_dma_rx_reset() argument
306 bgmac_dma_rx_enable(struct bgmac *bgmac, struct bgmac_dma_ring *ring) bgmac_dma_rx_enable() argument
367 bgmac_dma_rx_update_index(struct bgmac *bgmac, struct bgmac_dma_ring *ring) bgmac_dma_rx_update_index() argument
377 bgmac_dma_rx_setup_desc(struct bgmac *bgmac, struct bgmac_dma_ring *ring, int desc_idx) bgmac_dma_rx_setup_desc() argument
412 bgmac_dma_rx_read(struct bgmac *bgmac, struct bgmac_dma_ring *ring, int weight) bgmac_dma_rx_read() argument
504 bgmac_dma_unaligned(struct bgmac *bgmac, struct bgmac_dma_ring *ring, enum bgmac_dma_ring_type ring_type) bgmac_dma_unaligned() argument
525 bgmac_dma_tx_ring_free(struct bgmac *bgmac, struct bgmac_dma_ring *ring) bgmac_dma_tx_ring_free() argument
552 bgmac_dma_rx_ring_free(struct bgmac *bgmac, struct bgmac_dma_ring *ring) bgmac_dma_rx_ring_free() argument
572 bgmac_dma_ring_desc_free(struct bgmac *bgmac, struct bgmac_dma_ring *ring, int num_slots) bgmac_dma_ring_desc_free() argument
615 struct bgmac_dma_ring *ring; bgmac_dma_alloc() local
688 struct bgmac_dma_ring *ring; bgmac_dma_init() local
1227 struct bgmac_dma_ring *ring; bgmac_start_xmit() local
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/broadcom/
H A Dbgmac.c41 static void bgmac_dma_tx_reset(struct bgmac *bgmac, struct bgmac_dma_ring *ring) in bgmac_dma_tx_reset() argument
46 if (!ring->mmio_base) in bgmac_dma_tx_reset()
49 /* Suspend DMA TX ring first. in bgmac_dma_tx_reset()
53 bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_TX_CTL, in bgmac_dma_tx_reset()
56 val = bgmac_read(bgmac, ring->mmio_base + BGMAC_DMA_TX_STATUS); in bgmac_dma_tx_reset()
67 dev_err(bgmac->dev, "Timeout suspending DMA TX ring 0x%X (BGMAC_DMA_TX_STAT: 0x%08X)\n", in bgmac_dma_tx_reset()
68 ring->mmio_base, val); in bgmac_dma_tx_reset()
71 bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_TX_CTL, 0); in bgmac_dma_tx_reset()
73 ring->mmio_base + BGMAC_DMA_TX_STATUS, in bgmac_dma_tx_reset()
76 dev_warn(bgmac->dev, "DMA TX ring in bgmac_dma_tx_reset()
86 bgmac_dma_tx_enable(struct bgmac *bgmac, struct bgmac_dma_ring *ring) bgmac_dma_tx_enable() argument
111 bgmac_dma_tx_add_buf(struct bgmac *bgmac, struct bgmac_dma_ring *ring, int i, int len, u32 ctl0) bgmac_dma_tx_add_buf() argument
131 bgmac_dma_tx_add(struct bgmac *bgmac, struct bgmac_dma_ring *ring, struct sk_buff *skb) bgmac_dma_tx_add() argument
235 bgmac_dma_tx_free(struct bgmac *bgmac, struct bgmac_dma_ring *ring) bgmac_dma_tx_free() argument
292 bgmac_dma_rx_reset(struct bgmac *bgmac, struct bgmac_dma_ring *ring) bgmac_dma_rx_reset() argument
306 bgmac_dma_rx_enable(struct bgmac *bgmac, struct bgmac_dma_ring *ring) bgmac_dma_rx_enable() argument
367 bgmac_dma_rx_update_index(struct bgmac *bgmac, struct bgmac_dma_ring *ring) bgmac_dma_rx_update_index() argument
377 bgmac_dma_rx_setup_desc(struct bgmac *bgmac, struct bgmac_dma_ring *ring, int desc_idx) bgmac_dma_rx_setup_desc() argument
412 bgmac_dma_rx_read(struct bgmac *bgmac, struct bgmac_dma_ring *ring, int weight) bgmac_dma_rx_read() argument
504 bgmac_dma_unaligned(struct bgmac *bgmac, struct bgmac_dma_ring *ring, enum bgmac_dma_ring_type ring_type) bgmac_dma_unaligned() argument
525 bgmac_dma_tx_ring_free(struct bgmac *bgmac, struct bgmac_dma_ring *ring) bgmac_dma_tx_ring_free() argument
552 bgmac_dma_rx_ring_free(struct bgmac *bgmac, struct bgmac_dma_ring *ring) bgmac_dma_rx_ring_free() argument
572 bgmac_dma_ring_desc_free(struct bgmac *bgmac, struct bgmac_dma_ring *ring, int num_slots) bgmac_dma_ring_desc_free() argument
615 struct bgmac_dma_ring *ring; bgmac_dma_alloc() local
688 struct bgmac_dma_ring *ring; bgmac_dma_init() local
1227 struct bgmac_dma_ring *ring; bgmac_start_xmit() local
[all...]
H A Dbcm4908_enet.c144 * DMA ring ops
148 struct bcm4908_enet_dma_ring *ring) in bcm4908_enet_dma_ring_intrs_on()
150 enet_write(enet, ring->cfg_block + ENET_DMA_CH_CFG_INT_MASK, ENET_DMA_INT_DEFAULTS); in bcm4908_enet_dma_ring_intrs_on()
154 struct bcm4908_enet_dma_ring *ring) in bcm4908_enet_dma_ring_intrs_off()
156 enet_write(enet, ring->cfg_block + ENET_DMA_CH_CFG_INT_MASK, 0); in bcm4908_enet_dma_ring_intrs_off()
160 struct bcm4908_enet_dma_ring *ring) in bcm4908_enet_dma_ring_intrs_ack()
162 enet_write(enet, ring->cfg_block + ENET_DMA_CH_CFG_INT_STAT, ENET_DMA_INT_DEFAULTS); in bcm4908_enet_dma_ring_intrs_ack()
170 struct bcm4908_enet_dma_ring *ring) in bcm4908_dma_alloc_buf_descs()
172 int size = ring->length * sizeof(struct bcm4908_enet_dma_ring_bd); in bcm4908_dma_alloc_buf_descs()
175 ring in bcm4908_dma_alloc_buf_descs()
147 bcm4908_enet_dma_ring_intrs_on(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) bcm4908_enet_dma_ring_intrs_on() argument
153 bcm4908_enet_dma_ring_intrs_off(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) bcm4908_enet_dma_ring_intrs_off() argument
159 bcm4908_enet_dma_ring_intrs_ack(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) bcm4908_enet_dma_ring_intrs_ack() argument
169 bcm4908_dma_alloc_buf_descs(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) bcm4908_dma_alloc_buf_descs() argument
257 struct bcm4908_enet_dma_ring *ring = rings[i]; bcm4908_enet_dma_reset() local
298 bcm4908_enet_dma_ring_init(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) bcm4908_enet_dma_ring_init() argument
358 bcm4908_enet_dma_tx_ring_enable(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) bcm4908_enet_dma_tx_ring_enable() argument
364 bcm4908_enet_dma_tx_ring_disable(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) bcm4908_enet_dma_tx_ring_disable() argument
370 bcm4908_enet_dma_rx_ring_enable(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) bcm4908_enet_dma_rx_ring_enable() argument
376 bcm4908_enet_dma_rx_ring_disable(struct bcm4908_enet *enet, struct bcm4908_enet_dma_ring *ring) bcm4908_enet_dma_rx_ring_disable() argument
436 struct bcm4908_enet_dma_ring *ring; bcm4908_enet_irq_handler() local
524 struct bcm4908_enet_dma_ring *ring = &enet->tx_ring; bcm4908_enet_start_xmit() local
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/intel/fm10k/
H A Dfm10k_debugfs.c15 struct fm10k_ring *ring = s->private; in fm10k_dbg_desc_seq_start() local
17 return (*pos < ring->count) ? pos : NULL; in fm10k_dbg_desc_seq_start()
24 struct fm10k_ring *ring = s->private; in fm10k_dbg_desc_seq_next() local
26 return (++(*pos) < ring->count) ? pos : NULL; in fm10k_dbg_desc_seq_next()
45 struct fm10k_ring *ring = s->private; in fm10k_dbg_tx_desc_seq_show() local
57 if (!ring->desc) { in fm10k_dbg_tx_desc_seq_show()
58 seq_printf(s, "%03X Descriptor ring not allocated.\n", i); in fm10k_dbg_tx_desc_seq_show()
60 struct fm10k_tx_desc *txd = FM10K_TX_DESC(ring, i); in fm10k_dbg_tx_desc_seq_show()
72 struct fm10k_ring *ring = s->private; in fm10k_dbg_rx_desc_seq_show() local
84 if (!ring in fm10k_dbg_rx_desc_seq_show()
115 struct fm10k_ring *ring = inode->i_private; fm10k_dbg_desc_open() local
166 struct fm10k_ring *ring = &q_vector->tx.ring[i]; fm10k_dbg_q_vector_init() local
177 struct fm10k_ring *ring = &q_vector->rx.ring[i]; fm10k_dbg_q_vector_init() local
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/intel/fm10k/
H A Dfm10k_debugfs.c15 struct fm10k_ring *ring = s->private; in fm10k_dbg_desc_seq_start() local
17 return (*pos < ring->count) ? pos : NULL; in fm10k_dbg_desc_seq_start()
24 struct fm10k_ring *ring = s->private; in fm10k_dbg_desc_seq_next() local
26 return (++(*pos) < ring->count) ? pos : NULL; in fm10k_dbg_desc_seq_next()
45 struct fm10k_ring *ring = s->private; in fm10k_dbg_tx_desc_seq_show() local
57 if (!ring->desc) { in fm10k_dbg_tx_desc_seq_show()
58 seq_printf(s, "%03X Descriptor ring not allocated.\n", i); in fm10k_dbg_tx_desc_seq_show()
60 struct fm10k_tx_desc *txd = FM10K_TX_DESC(ring, i); in fm10k_dbg_tx_desc_seq_show()
72 struct fm10k_ring *ring = s->private; in fm10k_dbg_rx_desc_seq_show() local
84 if (!ring in fm10k_dbg_rx_desc_seq_show()
115 struct fm10k_ring *ring = inode->i_private; fm10k_dbg_desc_open() local
166 struct fm10k_ring *ring = &q_vector->tx.ring[i]; fm10k_dbg_q_vector_init() local
177 struct fm10k_ring *ring = &q_vector->rx.ring[i]; fm10k_dbg_q_vector_init() local
[all...]
/kernel/linux/linux-5.10/net/xdp/
H A Dxsk_queue.h2 /* XDP user-space ring structure
43 struct xdp_ring *ring; member
49 * ring buffer in kernel/events/ring_buffer.c. For the Rx and completion
50 * ring, the kernel is the producer and user space is the consumer. For
87 * now and again after circling through the ring.
94 * RESERVE entries PEEK in the ring for entries
95 * WRITE data into the ring READ data from the ring
98 * The producer reserves one or more entries in the ring. It can then
102 * The consumer peeks into the ring t
114 struct xdp_umem_ring *ring = (struct xdp_umem_ring *)q->ring; xskq_cons_read_addr_unchecked() local
190 struct xdp_rxtx_ring *ring = (struct xdp_rxtx_ring *)q->ring; xskq_cons_read_desc() local
309 struct xdp_umem_ring *ring = (struct xdp_umem_ring *)q->ring; xskq_prod_reserve_addr() local
322 struct xdp_rxtx_ring *ring = (struct xdp_rxtx_ring *)q->ring; xskq_prod_reserve_desc() local
350 struct xdp_umem_ring *ring = (struct xdp_umem_ring *)q->ring; xskq_prod_submit_addr() local
[all...]
/device/soc/rockchip/common/kernel/drivers/net/wireless/rockchip_wlan/rkwifi/bcmdhd_wifi6/
H A Ddhd_msgbuf.c3 * @file definition of host message ring functionality
141 /* optimization to write "n" tx items at a time to ring */
152 struct msgbuf_ring; /* ring context for common and flow rings */
166 * 4. Dongle DMA's all indices after producing items in the D2H ring, flushing
167 * ring contents before the indices.
194 typedef uint8 (* d2h_sync_cb_t)(dhd_pub_t *dhd, struct msgbuf_ring *ring,
205 typedef int (* d2h_edl_sync_cb_t)(dhd_pub_t *dhd, struct msgbuf_ring *ring,
289 * d2h debug ring is located at the end, i.e. after all the tx flow rings and h2d debug ring
367 /* Traverse each flowring in the flowring pool, assigning ring an
816 msgbuf_ring_t *ring = (msgbuf_ring_t *)prot_info; dhd_prot_dump_ring_ptrs() local
863 dhd_prot_d2h_sync_livelock(dhd_pub_t *dhd, uint32 msg_seqnum, msgbuf_ring_t *ring, uint32 tries, volatile uchar *msg, int msglen) dhd_prot_d2h_sync_livelock() argument
912 dhd_prot_d2h_sync_seqnum(dhd_pub_t *dhd, msgbuf_ring_t *ring, volatile cmn_msg_hdr_t *msg, int msglen) dhd_prot_d2h_sync_seqnum() argument
991 dhd_prot_d2h_sync_xorcsum(dhd_pub_t *dhd, msgbuf_ring_t *ring, volatile cmn_msg_hdr_t *msg, int msglen) dhd_prot_d2h_sync_xorcsum() argument
1079 dhd_prot_d2h_sync_none(dhd_pub_t *dhd, msgbuf_ring_t *ring, volatile cmn_msg_hdr_t *msg, int msglen) dhd_prot_d2h_sync_none() argument
1109 dhd_prot_d2h_sync_edl(dhd_pub_t *dhd, msgbuf_ring_t *ring, volatile cmn_msg_hdr_t *msg) dhd_prot_d2h_sync_edl() argument
1245 dhd_prot_d2h_sync_edl_none(dhd_pub_t *dhd, msgbuf_ring_t *ring, volatile cmn_msg_hdr_t *msg) dhd_prot_d2h_sync_edl_none() argument
4486 msgbuf_ring_t *ring = &prot->h2dring_rxp_subn; dhd_prot_rxbuf_post() local
4674 dhd_prot_infobufpost(dhd_pub_t *dhd, msgbuf_ring_t *ring) dhd_prot_infobufpost() argument
4896 msgbuf_ring_t *ring = &prot->h2dring_ctrl_subn; dhd_prot_rxbufpost_ctrl() local
5192 msgbuf_ring_t *ring = prot->d2hring_info_cpln; dhd_prot_process_msgbuf_infocpl() local
5255 msgbuf_ring_t *ring = prot->d2hring_edl; dhd_prot_process_msgbuf_edl() local
5341 msgbuf_ring_t *ring = NULL; dhd_prot_process_edl_complete() local
5502 msgbuf_ring_t *ring = NULL; dhd_prot_edl_ring_tcm_rd_update() local
5525 msgbuf_ring_t *ring; dhd_prot_process_msgbuf_rxcpl() local
5776 msgbuf_ring_t *ring = (msgbuf_ring_t *)msgring; dhd_prot_update_txflowring() local
5800 msgbuf_ring_t *ring; dhd_prot_process_msgbuf_txcpl() local
5903 msgbuf_ring_t *ring = &prot->d2hring_ctrl_cpln; dhd_prot_process_ctrlbuf() local
5952 dhd_prot_process_msgtype(dhd_pub_t *dhd, msgbuf_ring_t *ring, uint8 *buf, uint32 len) dhd_prot_process_msgtype() argument
6341 msgbuf_ring_t *ring = &dhd->prot->d2hring_tx_cpln; dhd_prot_txstatus_process() local
6659 msgbuf_ring_t *ring; dhd_prot_txdata() local
6955 msgbuf_ring_t *ring; dhd_prot_txdata_write_flush() local
7163 msgbuf_ring_t *ring = &prot->h2dring_ctrl_subn; dhdmsgbuf_lpbk_req() local
7404 msgbuf_ring_t *ring = &prot->h2dring_ctrl_subn; dhdmsgbuf_dmaxfer_req() local
7785 msgbuf_ring_t *ring; dhd_d2h_h2d_ring_dump() local
7849 dhd_ring_write(dhd_pub_t *dhd, msgbuf_ring_t *ring, void *file, const void *user_buf, unsigned long *file_posn) dhd_ring_write() argument
7877 dhd_edl_ring_hdr_write(dhd_pub_t *dhd, msgbuf_ring_t *ring, void *file, const void *user_buf, unsigned long *file_posn) dhd_edl_ring_hdr_write() argument
7972 msgbuf_ring_t *ring = &prot->h2dring_ctrl_subn; dhd_post_dummy_msg() local
8009 dhd_prot_alloc_ring_space(dhd_pub_t *dhd, msgbuf_ring_t *ring, uint16 nitems, uint16 * alloced, bool exactly_nitems) dhd_prot_alloc_ring_space() argument
8061 msgbuf_ring_t *ring = &prot->h2dring_ctrl_subn; dhd_fillup_ioct_reqst() local
8153 dhd_prot_ring_attach(dhd_pub_t *dhd, msgbuf_ring_t *ring, const char *name, uint16 max_items, uint16 item_len, uint16 ringid) dhd_prot_ring_attach() argument
8266 dhd_prot_ring_init(dhd_pub_t *dhd, msgbuf_ring_t *ring) dhd_prot_ring_init() argument
8300 dhd_prot_ring_reset(dhd_pub_t *dhd, msgbuf_ring_t *ring) dhd_prot_ring_reset() argument
8317 dhd_prot_ring_detach(dhd_pub_t *dhd, msgbuf_ring_t *ring) dhd_prot_ring_detach() argument
8381 msgbuf_ring_t *ring; dhd_prot_flowrings_pool_attach() local
8460 msgbuf_ring_t *ring; dhd_prot_flowrings_pool_reset() local
8487 msgbuf_ring_t *ring; dhd_prot_flowrings_pool_detach() local
8523 msgbuf_ring_t *ring; dhd_prot_flowrings_pool_fetch() local
8553 msgbuf_ring_t *ring; dhd_prot_flowrings_pool_release() local
8579 dhd_prot_get_ring_space(msgbuf_ring_t *ring, uint16 nitems, uint16 * alloced, bool exactly_nitems) dhd_prot_get_ring_space() argument
8625 __dhd_prot_ring_write_complete(dhd_pub_t *dhd, msgbuf_ring_t * ring, void* p, uint16 nitems) __dhd_prot_ring_write_complete() argument
8675 dhd_prot_ring_write_complete(dhd_pub_t *dhd, msgbuf_ring_t * ring, void* p, uint16 nitems) dhd_prot_ring_write_complete() argument
8690 dhd_prot_ring_write_complete_mbdata(dhd_pub_t *dhd, msgbuf_ring_t * ring, void *p, uint16 nitems, uint32 mb_data) dhd_prot_ring_write_complete_mbdata() argument
8713 dhd_prot_upd_read_idx(dhd_pub_t *dhd, msgbuf_ring_t * ring) dhd_prot_upd_read_idx() argument
9106 dhd_prot_get_read_addr(dhd_pub_t *dhd, msgbuf_ring_t *ring, uint32 *available_len) dhd_prot_get_read_addr() argument
9617 msgbuf_ring_t *ring = &prot->h2dring_ctrl_subn; dhd_prot_flow_ring_delete() local
9661 msgbuf_ring_t *ring = (msgbuf_ring_t *)flow_ring_node->prot_info; dhd_prot_flow_ring_fastdelete() local
9746 msgbuf_ring_t *ring = &prot->h2dring_ctrl_subn; dhd_prot_flow_ring_flush() local
9883 msgbuf_ring_t *ring; dhd_prot_debug_info_print() local
10362 msgbuf_ring_t *ring = &prot->h2dring_ctrl_subn; dhd_prot_flow_ring_batch_suspend_request() local
11082 dhd_calc_hp2p_burst(dhd_pub_t *dhd, msgbuf_ring_t *ring, uint16 flowid) dhd_calc_hp2p_burst() argument
[all...]
/third_party/mesa3d/src/gallium/drivers/freedreno/a3xx/
H A Dfd3_query.c47 occlusion_get_sample(struct fd_batch *batch, struct fd_ringbuffer *ring) in occlusion_get_sample() argument
55 OUT_PKT3(ring, CP_SET_CONSTANT, 3); in occlusion_get_sample()
56 OUT_RING(ring, CP_REG(REG_A3XX_RB_SAMPLE_COUNT_ADDR) | 0x80000000); in occlusion_get_sample()
57 OUT_RING(ring, HW_QUERY_BASE_REG); in occlusion_get_sample()
58 OUT_RING(ring, samp->offset); in occlusion_get_sample()
60 OUT_PKT0(ring, REG_A3XX_RB_SAMPLE_COUNT_CONTROL, 1); in occlusion_get_sample()
61 OUT_RING(ring, A3XX_RB_SAMPLE_COUNT_CONTROL_COPY); in occlusion_get_sample()
63 OUT_PKT3(ring, CP_DRAW_INDX, 3); in occlusion_get_sample()
64 OUT_RING(ring, 0x00000000); in occlusion_get_sample()
65 OUT_RING(ring, DRA in occlusion_get_sample()
[all...]

Completed in 23 milliseconds

12345678910>>...68