Home
last modified time | relevance | path

Searched refs:rbi (Results 1 - 15 of 15) sorted by relevance

/kernel/linux/linux-5.10/drivers/hv/
H A Dring_buffer.c52 struct hv_ring_buffer_info *rbi = &channel->outbound; in hv_signal_on_write() local
55 if (READ_ONCE(rbi->ring_buffer->interrupt_mask)) in hv_signal_on_write()
64 if (old_write == READ_ONCE(rbi->ring_buffer->read_index)) { in hv_signal_on_write()
140 hv_get_ringbuffer_availbytes(const struct hv_ring_buffer_info *rbi, in hv_get_ringbuffer_availbytes() argument
146 read_loc = READ_ONCE(rbi->ring_buffer->read_index); in hv_get_ringbuffer_availbytes()
147 write_loc = READ_ONCE(rbi->ring_buffer->write_index); in hv_get_ringbuffer_availbytes()
148 dsize = rbi->ring_datasize; in hv_get_ringbuffer_availbytes()
391 static u32 hv_pkt_iter_avail(const struct hv_ring_buffer_info *rbi) in hv_pkt_iter_avail() argument
393 u32 priv_read_loc = rbi->priv_read_index; in hv_pkt_iter_avail()
403 write_loc = virt_load_acquire(&rbi in hv_pkt_iter_avail()
418 struct hv_ring_buffer_info *rbi = &channel->inbound; hv_pkt_iter_first() local
443 struct hv_ring_buffer_info *rbi = &channel->inbound; __hv_pkt_iter_next() local
459 hv_pkt_iter_bytes_read(const struct hv_ring_buffer_info *rbi, u32 start_read_index) hv_pkt_iter_bytes_read() argument
491 struct hv_ring_buffer_info *rbi = &channel->inbound; hv_pkt_iter_close() local
[all...]
H A Dvmbus_drv.c1639 struct hv_ring_buffer_info *rbi = &channel->outbound; in out_mask_show() local
1642 mutex_lock(&rbi->ring_buffer_mutex); in out_mask_show()
1643 if (!rbi->ring_buffer) { in out_mask_show()
1644 mutex_unlock(&rbi->ring_buffer_mutex); in out_mask_show()
1648 ret = sprintf(buf, "%u\n", rbi->ring_buffer->interrupt_mask); in out_mask_show()
1649 mutex_unlock(&rbi->ring_buffer_mutex); in out_mask_show()
1656 struct hv_ring_buffer_info *rbi = &channel->inbound; in in_mask_show() local
1659 mutex_lock(&rbi->ring_buffer_mutex); in in_mask_show()
1660 if (!rbi->ring_buffer) { in in_mask_show()
1661 mutex_unlock(&rbi in in_mask_show()
1673 struct hv_ring_buffer_info *rbi = &channel->inbound; read_avail_show() local
1690 struct hv_ring_buffer_info *rbi = &channel->outbound; write_avail_show() local
[all...]
/kernel/linux/linux-6.6/drivers/hv/
H A Dring_buffer.c54 struct hv_ring_buffer_info *rbi = &channel->outbound; in hv_signal_on_write() local
57 if (READ_ONCE(rbi->ring_buffer->interrupt_mask)) in hv_signal_on_write()
66 if (old_write == READ_ONCE(rbi->ring_buffer->read_index)) { in hv_signal_on_write()
133 hv_get_ringbuffer_availbytes(const struct hv_ring_buffer_info *rbi, in hv_get_ringbuffer_availbytes() argument
139 read_loc = READ_ONCE(rbi->ring_buffer->read_index); in hv_get_ringbuffer_availbytes()
140 write_loc = READ_ONCE(rbi->ring_buffer->write_index); in hv_get_ringbuffer_availbytes()
141 dsize = rbi->ring_datasize; in hv_get_ringbuffer_availbytes()
438 static u32 hv_pkt_iter_avail(const struct hv_ring_buffer_info *rbi) in hv_pkt_iter_avail() argument
440 u32 priv_read_loc = rbi->priv_read_index; in hv_pkt_iter_avail()
450 write_loc = virt_load_acquire(&rbi in hv_pkt_iter_avail()
465 struct hv_ring_buffer_info *rbi = &channel->inbound; hv_pkt_iter_first() local
525 struct hv_ring_buffer_info *rbi = &channel->inbound; __hv_pkt_iter_next() local
541 hv_pkt_iter_bytes_read(const struct hv_ring_buffer_info *rbi, u32 start_read_index) hv_pkt_iter_bytes_read() argument
573 struct hv_ring_buffer_info *rbi = &channel->inbound; hv_pkt_iter_close() local
[all...]
H A Dvmbus_drv.c1526 struct hv_ring_buffer_info *rbi = &channel->outbound; in out_mask_show() local
1529 mutex_lock(&rbi->ring_buffer_mutex); in out_mask_show()
1530 if (!rbi->ring_buffer) { in out_mask_show()
1531 mutex_unlock(&rbi->ring_buffer_mutex); in out_mask_show()
1535 ret = sprintf(buf, "%u\n", rbi->ring_buffer->interrupt_mask); in out_mask_show()
1536 mutex_unlock(&rbi->ring_buffer_mutex); in out_mask_show()
1543 struct hv_ring_buffer_info *rbi = &channel->inbound; in in_mask_show() local
1546 mutex_lock(&rbi->ring_buffer_mutex); in in_mask_show()
1547 if (!rbi->ring_buffer) { in in_mask_show()
1548 mutex_unlock(&rbi in in_mask_show()
1560 struct hv_ring_buffer_info *rbi = &channel->inbound; read_avail_show() local
1577 struct hv_ring_buffer_info *rbi = &channel->outbound; write_avail_show() local
[all...]
/kernel/linux/linux-5.10/include/linux/
H A Dhyperv.h187 static inline u32 hv_get_bytes_to_read(const struct hv_ring_buffer_info *rbi) in hv_get_bytes_to_read() argument
191 dsize = rbi->ring_datasize; in hv_get_bytes_to_read()
192 read_loc = rbi->ring_buffer->read_index; in hv_get_bytes_to_read()
193 write_loc = READ_ONCE(rbi->ring_buffer->write_index); in hv_get_bytes_to_read()
201 static inline u32 hv_get_bytes_to_write(const struct hv_ring_buffer_info *rbi) in hv_get_bytes_to_write() argument
205 dsize = rbi->ring_datasize; in hv_get_bytes_to_write()
206 read_loc = READ_ONCE(rbi->ring_buffer->read_index); in hv_get_bytes_to_write()
207 write_loc = rbi->ring_buffer->write_index; in hv_get_bytes_to_write()
215 const struct hv_ring_buffer_info *rbi) in hv_get_avail_to_write_percent()
217 u32 avail_write = hv_get_bytes_to_write(rbi); in hv_get_avail_to_write_percent()
214 hv_get_avail_to_write_percent( const struct hv_ring_buffer_info *rbi) hv_get_avail_to_write_percent() argument
535 hv_ringbuffer_pending_size(const struct hv_ring_buffer_info *rbi) hv_ringbuffer_pending_size() argument
1580 hv_begin_read(struct hv_ring_buffer_info *rbi) hv_begin_read() argument
1591 hv_end_read(struct hv_ring_buffer_info *rbi) hv_end_read() argument
[all...]
/kernel/linux/linux-6.6/drivers/net/vmxnet3/
H A Dvmxnet3_drv.c622 struct vmxnet3_rx_buf_info *rbi; in vmxnet3_rq_alloc_rx_buf() local
625 rbi = rbi_base + ring->next2fill; in vmxnet3_rq_alloc_rx_buf()
627 rbi->comp_state = VMXNET3_RXD_COMP_PENDING; in vmxnet3_rq_alloc_rx_buf()
629 if (rbi->buf_type == VMXNET3_RX_BUF_XDP) { in vmxnet3_rq_alloc_rx_buf()
631 &rbi->dma_addr, in vmxnet3_rq_alloc_rx_buf()
637 rbi->page = virt_to_page(data); in vmxnet3_rq_alloc_rx_buf()
639 } else if (rbi->buf_type == VMXNET3_RX_BUF_SKB) { in vmxnet3_rq_alloc_rx_buf()
640 if (rbi->skb == NULL) { in vmxnet3_rq_alloc_rx_buf()
641 rbi->skb = __netdev_alloc_skb_ip_align(adapter->netdev, in vmxnet3_rq_alloc_rx_buf()
642 rbi in vmxnet3_rq_alloc_rx_buf()
719 vmxnet3_append_frag(struct sk_buff *skb, struct Vmxnet3_RxCompDesc *rcd, struct vmxnet3_rx_buf_info *rbi) vmxnet3_append_frag() argument
1525 struct vmxnet3_rx_buf_info *rbi; vmxnet3_rq_rx_complete() local
1922 struct vmxnet3_rx_buf_info *rbi; vmxnet3_rq_cleanup() local
[all...]
H A Dvmxnet3_xdp.c371 struct vmxnet3_rx_buf_info *rbi, in vmxnet3_process_xdp()
382 page = rbi->page; in vmxnet3_process_xdp()
385 rq->page_pool->p.offset, rbi->len, in vmxnet3_process_xdp()
390 rbi->len, false); in vmxnet3_process_xdp()
413 rbi->page = virt_to_page(new_data); in vmxnet3_process_xdp()
414 rbi->dma_addr = new_dma_addr; in vmxnet3_process_xdp()
415 rxd->addr = cpu_to_le64(rbi->dma_addr); in vmxnet3_process_xdp()
416 rxd->len = rbi->len; in vmxnet3_process_xdp()
368 vmxnet3_process_xdp(struct vmxnet3_adapter *adapter, struct vmxnet3_rx_queue *rq, struct Vmxnet3_RxCompDesc *rcd, struct vmxnet3_rx_buf_info *rbi, struct Vmxnet3_RxDesc *rxd, struct sk_buff **skb_xdp_pass) vmxnet3_process_xdp() argument
H A Dvmxnet3_xdp.h32 struct vmxnet3_rx_buf_info *rbi,
/kernel/linux/linux-5.10/drivers/net/vmxnet3/
H A Dvmxnet3_drv.c575 struct vmxnet3_rx_buf_info *rbi; in vmxnet3_rq_alloc_rx_buf() local
578 rbi = rbi_base + ring->next2fill; in vmxnet3_rq_alloc_rx_buf()
581 if (rbi->buf_type == VMXNET3_RX_BUF_SKB) { in vmxnet3_rq_alloc_rx_buf()
582 if (rbi->skb == NULL) { in vmxnet3_rq_alloc_rx_buf()
583 rbi->skb = __netdev_alloc_skb_ip_align(adapter->netdev, in vmxnet3_rq_alloc_rx_buf()
584 rbi->len, in vmxnet3_rq_alloc_rx_buf()
586 if (unlikely(rbi->skb == NULL)) { in vmxnet3_rq_alloc_rx_buf()
591 rbi->dma_addr = dma_map_single( in vmxnet3_rq_alloc_rx_buf()
593 rbi->skb->data, rbi in vmxnet3_rq_alloc_rx_buf()
659 vmxnet3_append_frag(struct sk_buff *skb, struct Vmxnet3_RxCompDesc *rcd, struct vmxnet3_rx_buf_info *rbi) vmxnet3_append_frag() argument
1378 struct vmxnet3_rx_buf_info *rbi; vmxnet3_rq_rx_complete() local
[all...]
/kernel/linux/linux-6.6/include/linux/
H A Dhyperv.h191 static inline u32 hv_get_bytes_to_read(const struct hv_ring_buffer_info *rbi) in hv_get_bytes_to_read() argument
195 dsize = rbi->ring_datasize; in hv_get_bytes_to_read()
196 read_loc = rbi->ring_buffer->read_index; in hv_get_bytes_to_read()
197 write_loc = READ_ONCE(rbi->ring_buffer->write_index); in hv_get_bytes_to_read()
205 static inline u32 hv_get_bytes_to_write(const struct hv_ring_buffer_info *rbi) in hv_get_bytes_to_write() argument
209 dsize = rbi->ring_datasize; in hv_get_bytes_to_write()
210 read_loc = READ_ONCE(rbi->ring_buffer->read_index); in hv_get_bytes_to_write()
211 write_loc = rbi->ring_buffer->write_index; in hv_get_bytes_to_write()
219 const struct hv_ring_buffer_info *rbi) in hv_get_avail_to_write_percent()
221 u32 avail_write = hv_get_bytes_to_write(rbi); in hv_get_avail_to_write_percent()
218 hv_get_avail_to_write_percent( const struct hv_ring_buffer_info *rbi) hv_get_avail_to_write_percent() argument
1664 hv_begin_read(struct hv_ring_buffer_info *rbi) hv_begin_read() argument
1675 hv_end_read(struct hv_ring_buffer_info *rbi) hv_end_read() argument
[all...]
/kernel/linux/linux-5.10/kernel/rcu/
H A Drcutorture.c886 struct rcu_boost_inflight rbi = { .inflight = 0 }; in rcu_torture_boost() local
893 init_rcu_head_on_stack(&rbi.rcu); in rcu_torture_boost()
925 if (!smp_load_acquire(&rbi.inflight)) { in rcu_torture_boost()
927 smp_store_release(&rbi.inflight, 1); in rcu_torture_boost()
928 call_rcu(&rbi.rcu, rcu_torture_boost_cb); in rcu_torture_boost()
945 if (!failed && smp_load_acquire(&rbi.inflight)) in rcu_torture_boost()
971 while (!kthread_should_stop() || smp_load_acquire(&rbi.inflight)) { in rcu_torture_boost()
975 destroy_rcu_head_on_stack(&rbi.rcu); in rcu_torture_boost()
/kernel/linux/linux-5.10/drivers/md/
H A Draid5.c1075 struct bio *bi, *rbi; in ops_run_io() local
1098 rbi = &sh->dev[i].rreq; /* For writing to replacement */ in ops_run_io()
1241 bio_set_dev(rbi, rrdev->bdev); in ops_run_io()
1242 bio_set_op_attrs(rbi, op, op_flags); in ops_run_io()
1244 rbi->bi_end_io = raid5_end_write_request; in ops_run_io()
1245 rbi->bi_private = sh; in ops_run_io()
1250 rbi->bi_opf, i); in ops_run_io()
1255 rbi->bi_iter.bi_sector = (sh->sector in ops_run_io()
1258 rbi->bi_iter.bi_sector = (sh->sector in ops_run_io()
1263 rbi in ops_run_io()
1389 struct bio *rbi, *rbi2; ops_complete_biofill() local
1422 struct bio *rbi; ops_run_biofill() local
[all...]
/kernel/linux/linux-5.10/drivers/net/hyperv/
H A Dnetvsc.c1460 struct hv_ring_buffer_info *rbi = &channel->inbound; in netvsc_channel_cb() local
1463 prefetch(hv_get_ring_buffer(rbi) + rbi->priv_read_index); in netvsc_channel_cb()
1467 hv_begin_read(rbi); in netvsc_channel_cb()
/kernel/linux/linux-6.6/drivers/md/
H A Draid5.c1159 struct bio *bi, *rbi; in ops_run_io() local
1183 rbi = &dev->rreq; /* For writing to replacement */ in ops_run_io()
1322 bio_init(rbi, rrdev->bdev, &dev->rvec, 1, op | op_flags); in ops_run_io()
1324 rbi->bi_end_io = raid5_end_write_request; in ops_run_io()
1325 rbi->bi_private = sh; in ops_run_io()
1330 rbi->bi_opf, i); in ops_run_io()
1335 rbi->bi_iter.bi_sector = (sh->sector in ops_run_io()
1338 rbi->bi_iter.bi_sector = (sh->sector in ops_run_io()
1343 rbi->bi_vcnt = 1; in ops_run_io()
1344 rbi in ops_run_io()
1467 struct bio *rbi, *rbi2; ops_complete_biofill() local
1500 struct bio *rbi; ops_run_biofill() local
[all...]
/kernel/linux/linux-6.6/drivers/net/hyperv/
H A Dnetvsc.c1695 struct hv_ring_buffer_info *rbi = &channel->inbound; in netvsc_channel_cb() local
1698 prefetch(hv_get_ring_buffer(rbi) + rbi->priv_read_index); in netvsc_channel_cb()
1702 hv_begin_read(rbi); in netvsc_channel_cb()

Completed in 33 milliseconds