Home
last modified time | relevance | path

Searched refs:inbound (Results 1 - 25 of 41) sorted by relevance

12

/kernel/linux/linux-5.10/drivers/soc/qcom/
H A Dsmp2p.c36 * inbound entry.
83 * @domain: irq_domain for inbound entries
114 * @in: pointer to the inbound smem item
116 * @valid_entries: already scanned inbound entries
117 * @local_pid: processor id of the inbound edge
124 * @inbound: list of inbound entries
147 struct list_head inbound; member
202 list_for_each_entry(entry, &smp2p->inbound, node) { in qcom_smp2p_intr()
213 list_for_each_entry(entry, &smp2p->inbound, nod in qcom_smp2p_intr()
[all...]
/kernel/linux/linux-6.6/drivers/soc/qcom/
H A Dsmp2p.c37 * inbound entry.
87 * @domain: irq_domain for inbound entries
118 * @in: pointer to the inbound smem item
121 * @valid_entries: already scanned inbound entries
125 * @local_pid: processor id of the inbound edge
132 * @inbound: list of inbound entries
159 struct list_head inbound; member
233 list_for_each_entry(entry, &smp2p->inbound, node) { in qcom_smp2p_notify_in()
244 list_for_each_entry(entry, &smp2p->inbound, nod in qcom_smp2p_notify_in()
[all...]
/kernel/linux/linux-5.10/drivers/hv/
H A Dvmbus_drv.c424 struct hv_ring_buffer_debug_info inbound; in in_intr_mask_show() local
430 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->inbound, &inbound); in in_intr_mask_show()
434 return sprintf(buf, "%d\n", inbound.current_interrupt_mask); in in_intr_mask_show()
442 struct hv_ring_buffer_debug_info inbound; in in_read_index_show() local
448 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->inbound, &inbound); in in_read_index_show()
452 return sprintf(buf, "%d\n", inbound.current_read_index); in in_read_index_show()
460 struct hv_ring_buffer_debug_info inbound; in in_write_index_show() local
466 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->inbound, in in_write_index_show()
479 struct hv_ring_buffer_debug_info inbound; in_read_bytes_avail_show() local
498 struct hv_ring_buffer_debug_info inbound; in_write_bytes_avail_show() local
[all...]
H A Dring_buffer.c187 mutex_init(&channel->inbound.ring_buffer_mutex); in hv_ringbuffer_pre_init()
418 struct hv_ring_buffer_info *rbi = &channel->inbound; in hv_pkt_iter_first()
443 struct hv_ring_buffer_info *rbi = &channel->inbound; in __hv_pkt_iter_next()
491 struct hv_ring_buffer_info *rbi = &channel->inbound; in hv_pkt_iter_close()
H A Dconnection.c355 if (likely(hv_end_read(&channel->inbound) == 0)) in vmbus_on_event()
358 hv_begin_read(&channel->inbound); in vmbus_on_event()
H A Dchannel.c152 hv_ringbuffer_cleanup(&channel->inbound); in vmbus_free_ring()
534 err = hv_ringbuffer_init(&newchannel->inbound, in __vmbus_open()
628 hv_ringbuffer_cleanup(&newchannel->inbound); in __vmbus_open()
734 * the former is accessing channel->inbound.ring_buffer, the latter in vmbus_reset_channel_cb()
/kernel/linux/linux-6.6/drivers/hv/
H A Dvmbus_drv.c389 struct hv_ring_buffer_debug_info inbound; in in_intr_mask_show() local
395 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->inbound, &inbound); in in_intr_mask_show()
399 return sprintf(buf, "%d\n", inbound.current_interrupt_mask); in in_intr_mask_show()
407 struct hv_ring_buffer_debug_info inbound; in in_read_index_show() local
413 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->inbound, &inbound); in in_read_index_show()
417 return sprintf(buf, "%d\n", inbound.current_read_index); in in_read_index_show()
425 struct hv_ring_buffer_debug_info inbound; in in_write_index_show() local
431 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->inbound, in in_write_index_show()
444 struct hv_ring_buffer_debug_info inbound; in_read_bytes_avail_show() local
463 struct hv_ring_buffer_debug_info inbound; in_write_bytes_avail_show() local
[all...]
H A Dring_buffer.c180 mutex_init(&channel->inbound.ring_buffer_mutex); in hv_ringbuffer_pre_init()
465 struct hv_ring_buffer_info *rbi = &channel->inbound; in hv_pkt_iter_first()
525 struct hv_ring_buffer_info *rbi = &channel->inbound; in __hv_pkt_iter_next()
573 struct hv_ring_buffer_info *rbi = &channel->inbound; in hv_pkt_iter_close()
H A Dconnection.c400 if (likely(hv_end_read(&channel->inbound) == 0)) in vmbus_on_event()
403 hv_begin_read(&channel->inbound); in vmbus_on_event()
/kernel/linux/linux-6.6/drivers/uio/
H A Duio_hv_generic.c84 dev->channel->inbound.ring_buffer->interrupt_mask = !irq_state; in hv_uio_irqcontrol()
91 * Callback from vmbus_event when something is in inbound ring.
99 chan->inbound.ring_buffer->interrupt_mask = 1; in hv_uio_channel_cb()
168 new_sc->inbound.ring_buffer->interrupt_mask = 1; in hv_uio_new_channel()
211 dev->channel->inbound.ring_buffer->interrupt_mask = 1; in hv_uio_open()
/kernel/linux/linux-5.10/drivers/net/ppp/
H A Dppp_async.c107 int len, int inbound);
956 int len, int inbound) in async_lcp_peek()
972 if (code == (inbound? CONFACK: CONFREQ)) { in async_lcp_peek()
981 if (!inbound) { in async_lcp_peek()
992 } else if (inbound) in async_lcp_peek()
1003 if (inbound) in async_lcp_peek()
1010 if (inbound) in async_lcp_peek()
955 async_lcp_peek(struct asyncppp *ap, unsigned char *data, int len, int inbound) async_lcp_peek() argument
/kernel/linux/linux-6.6/drivers/net/ppp/
H A Dppp_async.c107 int len, int inbound);
944 int len, int inbound) in async_lcp_peek()
960 if (code == (inbound? CONFACK: CONFREQ)) { in async_lcp_peek()
969 if (!inbound) { in async_lcp_peek()
980 } else if (inbound) in async_lcp_peek()
991 if (inbound) in async_lcp_peek()
998 if (inbound) in async_lcp_peek()
943 async_lcp_peek(struct asyncppp *ap, unsigned char *data, int len, int inbound) async_lcp_peek() argument
/kernel/linux/linux-5.10/arch/arm/common/
H A Dmcpm_entry.c77 * false: the critical section was not entered because an inbound CPU was
87 /* Warn inbound CPUs that the cluster is being torn down: */ in __mcpm_outbound_enter_critical()
91 /* Back out if the inbound cluster is already in the critical region: */ in __mcpm_outbound_enter_critical()
92 sync_cache_r(&c->inbound); in __mcpm_outbound_enter_critical()
93 if (c->inbound == INBOUND_COMING_UP) in __mcpm_outbound_enter_critical()
179 * for an outbound CPU to call power_down() after its inbound counterpart
437 mcpm_sync.clusters[i].inbound = INBOUND_NOT_COMING_UP; in mcpm_sync_init()
/kernel/linux/linux-6.6/arch/arm/common/
H A Dmcpm_entry.c77 * false: the critical section was not entered because an inbound CPU was
87 /* Warn inbound CPUs that the cluster is being torn down: */ in __mcpm_outbound_enter_critical()
91 /* Back out if the inbound cluster is already in the critical region: */ in __mcpm_outbound_enter_critical()
92 sync_cache_r(&c->inbound); in __mcpm_outbound_enter_critical()
93 if (c->inbound == INBOUND_COMING_UP) in __mcpm_outbound_enter_critical()
179 * for an outbound CPU to call power_down() after its inbound counterpart
437 mcpm_sync.clusters[i].inbound = INBOUND_NOT_COMING_UP; in mcpm_sync_init()
/kernel/linux/linux-5.10/drivers/uio/
H A Duio_hv_generic.c84 dev->channel->inbound.ring_buffer->interrupt_mask = !irq_state; in hv_uio_irqcontrol()
91 * Callback from vmbus_event when something is in inbound ring.
99 chan->inbound.ring_buffer->interrupt_mask = 1; in hv_uio_channel_cb()
169 new_sc->inbound.ring_buffer->interrupt_mask = 1; in hv_uio_new_channel()
214 dev->channel->inbound.ring_buffer->interrupt_mask = 1; in hv_uio_open()
/kernel/linux/linux-5.10/arch/arm/include/asm/
H A Dmcpm.h293 /* inbound-side state */
294 s8 inbound __aligned(__CACHE_WRITEBACK_GRANULE);
/kernel/linux/linux-6.6/arch/arm/include/asm/
H A Dmcpm.h293 /* inbound-side state */
294 s8 inbound __aligned(__CACHE_WRITEBACK_GRANULE);
/kernel/linux/linux-5.10/include/net/sctp/
H A Dulpevent.h79 __u16 inbound,
/kernel/linux/linux-6.6/include/net/sctp/
H A Dulpevent.h79 __u16 inbound,
/kernel/linux/linux-6.6/include/linux/netfilter/
H A Dnf_conntrack_pptp.h309 int (*inbound)(struct sk_buff *skb, member
/kernel/linux/linux-5.10/net/vmw_vsock/
H A Dhyperv_transport.c184 u32 readable = hv_get_bytes_to_read(&chan->inbound); in hvs_channel_readable()
192 u32 readable = hv_get_bytes_to_read(&chan->inbound); in hvs_channel_readable_payload()
/kernel/linux/linux-6.6/net/vmw_vsock/
H A Dhyperv_transport.c187 u32 readable = hv_get_bytes_to_read(&chan->inbound); in hvs_channel_readable()
195 u32 readable = hv_get_bytes_to_read(&chan->inbound); in hvs_channel_readable_payload()
/kernel/linux/linux-5.10/drivers/perf/
H A Dxgene_pmu.c324 XGENE_PMU_EVENT_ATTR(csw-inbound-dirty, 0x16),
468 XGENE_PMU_EVENT_ATTR(cswlf-inbound-snoop-fifo-backpressure, 0x2a),
470 XGENE_PMU_EVENT_ATTR(cswlf-inbound-gack-fifo-backpressure, 0x2c),
472 XGENE_PMU_EVENT_ATTR(cswlf-inbound-data-fifo-backpressure, 0x2e),
473 XGENE_PMU_EVENT_ATTR(cswlf-inbound-req-backpressure, 0x2f),
/kernel/linux/linux-6.6/drivers/perf/
H A Dxgene_pmu.c321 XGENE_PMU_EVENT_ATTR(csw-inbound-dirty, 0x16),
465 XGENE_PMU_EVENT_ATTR(cswlf-inbound-snoop-fifo-backpressure, 0x2a),
467 XGENE_PMU_EVENT_ATTR(cswlf-inbound-gack-fifo-backpressure, 0x2c),
469 XGENE_PMU_EVENT_ATTR(cswlf-inbound-data-fifo-backpressure, 0x2e),
470 XGENE_PMU_EVENT_ATTR(cswlf-inbound-req-backpressure, 0x2f),
/kernel/linux/linux-6.6/net/ipv4/netfilter/
H A Dnf_nat_pptp.c229 /* inbound packets == from PAC to PNS */
270 pr_debug("unknown inbound packet %s\n", pptp_msg_name(msg)); in pptp_inbound_pkt()
300 .inbound = pptp_inbound_pkt,

Completed in 25 milliseconds

12