/kernel/linux/linux-6.6/drivers/gpu/drm/radeon/ |
H A D | radeon_ring.c | 88 ring->ring_free_dw &= ring->ptr_mask; in radeon_ring_free_size() 316 size &= ring->ptr_mask; in radeon_ring_backup() 330 ptr &= ring->ptr_mask; in radeon_ring_backup() 417 ring->ptr_mask = (ring->ring_size / 4) - 1; in radeon_ring_init() 506 i = (rptr + ring->ptr_mask + 1 - 32) & ring->ptr_mask; in radeon_debugfs_ring_info_show() 514 i = (i + 1) & ring->ptr_mask; in radeon_debugfs_ring_info_show()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/sfc/ |
H A D | tx_common.c | 18 return DIV_ROUND_UP(tx_queue->ptr_mask + 1, in efx_tx_cb_page_count() 31 tx_queue->ptr_mask = entries - 1; in efx_probe_tx_queue() 35 tx_queue->queue, efx->txq_entries, tx_queue->ptr_mask); in efx_probe_tx_queue() 111 buffer = &tx_queue->buffer[tx_queue->read_count & tx_queue->ptr_mask]; in efx_fini_tx_queue() 205 stop_index = (index + 1) & tx_queue->ptr_mask; in efx_dequeue_buffers() 206 read_ptr = tx_queue->read_count & tx_queue->ptr_mask; in efx_dequeue_buffers() 222 read_ptr = tx_queue->read_count & tx_queue->ptr_mask; in efx_dequeue_buffers() 244 EFX_WARN_ON_ONCE_PARANOID(index > tx_queue->ptr_mask); in efx_xmit_done()
|
H A D | ef100_tx.c | 27 (tx_queue->ptr_mask + 2) * in ef100_tx_probe() 136 write_ptr = tx_queue->write_count & tx_queue->ptr_mask; in ef100_notify_tx_desc() 264 write_ptr = new_write_count & tx_queue->ptr_mask; in ef100_tx_make_descriptors() 324 tx_queue->ptr_mask; in ef100_ev_tx() 346 if (!tx_queue->buffer || !tx_queue->ptr_mask) { in ef100_enqueue_skb()
|
H A D | ef100_rx.c | 145 rx_queue->removed_count & rx_queue->ptr_mask); in efx_ef100_ev_rx() 158 idx = rx_queue->notified_count & rx_queue->ptr_mask; in ef100_rx_write() 169 rx_queue->added_count & rx_queue->ptr_mask); in ef100_rx_write()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/sfc/falcon/ |
H A D | tx.c | 362 stop_index = (index + 1) & tx_queue->ptr_mask; in ef4_dequeue_buffers() 363 read_ptr = tx_queue->read_count & tx_queue->ptr_mask; in ef4_dequeue_buffers() 380 read_ptr = tx_queue->read_count & tx_queue->ptr_mask; in ef4_dequeue_buffers() 498 EF4_BUG_ON_PARANOID(index > tx_queue->ptr_mask); in ef4_xmit_done() 535 return DIV_ROUND_UP(tx_queue->ptr_mask + 1, PAGE_SIZE >> EF4_TX_CB_ORDER); in ef4_tx_cb_page_count() 547 tx_queue->ptr_mask = entries - 1; in ef4_probe_tx_queue() 551 tx_queue->queue, efx->txq_entries, tx_queue->ptr_mask); in ef4_probe_tx_queue() 619 buffer = &tx_queue->buffer[tx_queue->read_count & tx_queue->ptr_mask]; in ef4_fini_tx_queue()
|
H A D | rx.c | 78 if (unlikely(rx_buf == ef4_rx_buffer(rx_queue, rx_queue->ptr_mask))) in ef4_rx_buf_next() 191 index = rx_queue->added_count & rx_queue->ptr_mask; in ef4_init_rx_buffers() 560 (index + n_frags - 1) & rx_queue->ptr_mask, len, in ef4_rx_packet() 695 rx_queue->ptr_mask = entries - 1; in ef4_probe_rx_queue() 700 rx_queue->ptr_mask); in ef4_probe_rx_queue() 798 unsigned index = i & rx_queue->ptr_mask; in ef4_fini_rx_queue()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/sfc/falcon/ |
H A D | tx.c | 363 stop_index = (index + 1) & tx_queue->ptr_mask; in ef4_dequeue_buffers() 364 read_ptr = tx_queue->read_count & tx_queue->ptr_mask; in ef4_dequeue_buffers() 381 read_ptr = tx_queue->read_count & tx_queue->ptr_mask; in ef4_dequeue_buffers() 499 EF4_BUG_ON_PARANOID(index > tx_queue->ptr_mask); in ef4_xmit_done() 536 return DIV_ROUND_UP(tx_queue->ptr_mask + 1, PAGE_SIZE >> EF4_TX_CB_ORDER); in ef4_tx_cb_page_count() 548 tx_queue->ptr_mask = entries - 1; in ef4_probe_tx_queue() 552 tx_queue->queue, efx->txq_entries, tx_queue->ptr_mask); in ef4_probe_tx_queue() 620 buffer = &tx_queue->buffer[tx_queue->read_count & tx_queue->ptr_mask]; in ef4_fini_tx_queue()
|
H A D | rx.c | 78 if (unlikely(rx_buf == ef4_rx_buffer(rx_queue, rx_queue->ptr_mask))) in ef4_rx_buf_next() 191 index = rx_queue->added_count & rx_queue->ptr_mask; in ef4_init_rx_buffers() 560 (index + n_frags - 1) & rx_queue->ptr_mask, len, in ef4_rx_packet() 695 rx_queue->ptr_mask = entries - 1; in ef4_probe_rx_queue() 700 rx_queue->ptr_mask); in ef4_probe_rx_queue() 800 unsigned index = i & rx_queue->ptr_mask; in ef4_fini_rx_queue()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/sfc/ |
H A D | tx_common.c | 19 return DIV_ROUND_UP(tx_queue->ptr_mask + 1, in efx_tx_cb_page_count() 32 tx_queue->ptr_mask = entries - 1; in efx_probe_tx_queue() 36 tx_queue->queue, efx->txq_entries, tx_queue->ptr_mask); in efx_probe_tx_queue() 115 buffer = &tx_queue->buffer[tx_queue->read_count & tx_queue->ptr_mask]; in efx_fini_tx_queue() 218 stop_index = (index + 1) & tx_queue->ptr_mask; in efx_dequeue_buffers() 219 read_ptr = tx_queue->read_count & tx_queue->ptr_mask; in efx_dequeue_buffers() 236 read_ptr = tx_queue->read_count & tx_queue->ptr_mask; in efx_dequeue_buffers() 259 EFX_WARN_ON_ONCE_PARANOID(index > tx_queue->ptr_mask); in efx_xmit_done()
|
H A D | ef100_tx.c | 27 (tx_queue->ptr_mask + 2) * in ef100_tx_probe() 120 write_ptr = tx_queue->write_count & tx_queue->ptr_mask; in ef100_notify_tx_desc() 278 write_ptr = new_write_count & tx_queue->ptr_mask; in ef100_tx_make_descriptors() 295 write_ptr = new_write_count & tx_queue->ptr_mask; in ef100_tx_make_descriptors() 358 tx_queue->ptr_mask; in ef100_ev_tx() 386 if (!tx_queue->buffer || !tx_queue->ptr_mask) { in __ef100_enqueue_skb()
|
H A D | ef100_rx.c | 186 rx_queue->removed_count & rx_queue->ptr_mask); in efx_ef100_ev_rx() 200 idx = notified_count & rx_queue->ptr_mask; in ef100_rx_write() 213 rx_queue->added_count & rx_queue->ptr_mask); in ef100_rx_write()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/sfc/siena/ |
H A D | tx_common.c | 19 return DIV_ROUND_UP(tx_queue->ptr_mask + 1, in efx_tx_cb_page_count() 32 tx_queue->ptr_mask = entries - 1; in efx_siena_probe_tx_queue() 36 tx_queue->queue, efx->txq_entries, tx_queue->ptr_mask); in efx_siena_probe_tx_queue() 184 buffer = &tx_queue->buffer[tx_queue->read_count & tx_queue->ptr_mask]; in efx_siena_fini_tx_queue() 206 stop_index = (index + 1) & tx_queue->ptr_mask; in efx_dequeue_buffers() 207 read_ptr = tx_queue->read_count & tx_queue->ptr_mask; in efx_dequeue_buffers() 223 read_ptr = tx_queue->read_count & tx_queue->ptr_mask; in efx_dequeue_buffers() 245 EFX_WARN_ON_ONCE_PARANOID(index > tx_queue->ptr_mask); in efx_siena_xmit_done()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_ih.c | 51 ih->ptr_mask = ih->ring_size - 1; in amdgpu_ih_ring_init() 159 wptr &= ih->ptr_mask; in amdgpu_ih_ring_write() 227 ih->rptr &= ih->ptr_mask; in amdgpu_ih_process() 295 ring_index = (rptr & ih->ptr_mask) >> 2; in amdgpu_ih_decode_iv_ts_helper()
|
H A D | iceland_ih.c | 212 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in iceland_ih_get_wptr() 213 ih->rptr = (wptr + 16) & ih->ptr_mask; in iceland_ih_get_wptr() 225 return (wptr & ih->ptr_mask); in iceland_ih_get_wptr()
|
H A D | cik_ih.c | 202 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in cik_ih_get_wptr() 203 ih->rptr = (wptr + 16) & ih->ptr_mask; in cik_ih_get_wptr() 214 return (wptr & ih->ptr_mask); in cik_ih_get_wptr()
|
H A D | cz_ih.c | 213 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in cz_ih_get_wptr() 214 ih->rptr = (wptr + 16) & ih->ptr_mask; in cz_ih_get_wptr() 226 return (wptr & ih->ptr_mask); in cz_ih_get_wptr()
|
H A D | si_ih.c | 117 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in si_ih_get_wptr() 118 ih->rptr = (wptr + 16) & ih->ptr_mask; in si_ih_get_wptr() 129 return (wptr & ih->ptr_mask); in si_ih_get_wptr()
|
H A D | tonga_ih.c | 216 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in tonga_ih_get_wptr() 217 ih->rptr = (wptr + 16) & ih->ptr_mask; in tonga_ih_get_wptr() 229 return (wptr & ih->ptr_mask); in tonga_ih_get_wptr()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/radeon/ |
H A D | radeon_ring.c | 89 ring->ring_free_dw &= ring->ptr_mask; in radeon_ring_free_size() 315 size &= ring->ptr_mask; in radeon_ring_backup() 329 ptr &= ring->ptr_mask; in radeon_ring_backup() 415 ring->ptr_mask = (ring->ring_size / 4) - 1; in radeon_ring_init() 509 i = (rptr + ring->ptr_mask + 1 - 32) & ring->ptr_mask; in radeon_debugfs_ring_info() 517 i = (i + 1) & ring->ptr_mask; in radeon_debugfs_ring_info()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_ih.c | 51 ih->ptr_mask = ih->ring_size - 1; in amdgpu_ih_ring_init() 165 ih->rptr &= ih->ptr_mask; in amdgpu_ih_process()
|
H A D | iceland_ih.c | 211 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in iceland_ih_get_wptr() 212 ih->rptr = (wptr + 16) & ih->ptr_mask; in iceland_ih_get_wptr() 219 return (wptr & ih->ptr_mask); in iceland_ih_get_wptr()
|
H A D | cz_ih.c | 212 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in cz_ih_get_wptr() 213 ih->rptr = (wptr + 16) & ih->ptr_mask; in cz_ih_get_wptr() 220 return (wptr & ih->ptr_mask); in cz_ih_get_wptr()
|
H A D | cik_ih.c | 201 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in cik_ih_get_wptr() 202 ih->rptr = (wptr + 16) & ih->ptr_mask; in cik_ih_get_wptr() 207 return (wptr & ih->ptr_mask); in cik_ih_get_wptr()
|
H A D | tonga_ih.c | 215 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in tonga_ih_get_wptr() 216 ih->rptr = (wptr + 16) & ih->ptr_mask; in tonga_ih_get_wptr() 222 return (wptr & ih->ptr_mask); in tonga_ih_get_wptr()
|
H A D | si_ih.c | 117 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in si_ih_get_wptr() 118 ih->rptr = (wptr + 16) & ih->ptr_mask; in si_ih_get_wptr() 123 return (wptr & ih->ptr_mask); in si_ih_get_wptr()
|