Home
last modified time | relevance | path

Searched refs:src_list (Results 1 - 25 of 25) sorted by relevance

/kernel/linux/linux-5.10/crypto/async_tx/
H A Dasync_xor.c35 dma_addr_t *src_list = unmap->addr; in do_async_xor() local
62 tmp = src_list[0]; in do_async_xor()
63 if (src_list > unmap->addr) in do_async_xor()
64 src_list[0] = dma_dest; in do_async_xor()
65 tx = dma->device_prep_dma_xor(chan, dma_dest, src_list, in do_async_xor()
76 src_list, in do_async_xor()
80 src_list[0] = tmp; in do_async_xor()
91 src_list += xor_src_cnt - 1; in do_async_xor()
101 struct page **src_list, unsigned int *src_offs, in do_sync_xor_offs()
113 srcs = (void **) src_list; in do_sync_xor_offs()
100 do_sync_xor_offs(struct page *dest, unsigned int offset, struct page **src_list, unsigned int *src_offs, int src_cnt, size_t len, struct async_submit_ctl *submit) do_sync_xor_offs() argument
182 async_xor_offs(struct page *dest, unsigned int offset, struct page **src_list, unsigned int *src_offs, int src_cnt, size_t len, struct async_submit_ctl *submit) async_xor_offs() argument
273 async_xor(struct page *dest, struct page **src_list, unsigned int offset, int src_cnt, size_t len, struct async_submit_ctl *submit) async_xor() argument
287 xor_val_chan(struct async_submit_ctl *submit, struct page *dest, struct page **src_list, int src_cnt, size_t len) xor_val_chan() argument
315 async_xor_val_offs(struct page *dest, unsigned int offset, struct page **src_list, unsigned int *src_offs, int src_cnt, size_t len, enum sum_check_flags *result, struct async_submit_ctl *submit) async_xor_val_offs() argument
409 async_xor_val(struct page *dest, struct page **src_list, unsigned int offset, int src_cnt, size_t len, enum sum_check_flags *result, struct async_submit_ctl *submit) async_xor_val() argument
[all...]
/kernel/linux/linux-6.6/crypto/async_tx/
H A Dasync_xor.c35 dma_addr_t *src_list = unmap->addr; in do_async_xor() local
62 tmp = src_list[0]; in do_async_xor()
63 if (src_list > unmap->addr) in do_async_xor()
64 src_list[0] = dma_dest; in do_async_xor()
65 tx = dma->device_prep_dma_xor(chan, dma_dest, src_list, in do_async_xor()
76 src_list, in do_async_xor()
80 src_list[0] = tmp; in do_async_xor()
91 src_list += xor_src_cnt - 1; in do_async_xor()
101 struct page **src_list, unsigned int *src_offs, in do_sync_xor_offs()
113 srcs = (void **) src_list; in do_sync_xor_offs()
100 do_sync_xor_offs(struct page *dest, unsigned int offset, struct page **src_list, unsigned int *src_offs, int src_cnt, size_t len, struct async_submit_ctl *submit) do_sync_xor_offs() argument
182 async_xor_offs(struct page *dest, unsigned int offset, struct page **src_list, unsigned int *src_offs, int src_cnt, size_t len, struct async_submit_ctl *submit) async_xor_offs() argument
273 async_xor(struct page *dest, struct page **src_list, unsigned int offset, int src_cnt, size_t len, struct async_submit_ctl *submit) async_xor() argument
287 xor_val_chan(struct async_submit_ctl *submit, struct page *dest, struct page **src_list, int src_cnt, size_t len) xor_val_chan() argument
315 async_xor_val_offs(struct page *dest, unsigned int offset, struct page **src_list, unsigned int *src_offs, int src_cnt, size_t len, enum sum_check_flags *result, struct async_submit_ctl *submit) async_xor_val_offs() argument
409 async_xor_val(struct page *dest, struct page **src_list, unsigned int offset, int src_cnt, size_t len, enum sum_check_flags *result, struct async_submit_ctl *submit) async_xor_val() argument
[all...]
/kernel/linux/linux-5.10/drivers/dma-buf/
H A Ddma-resv.c348 struct dma_resv_list *src_list, *dst_list; in dma_resv_copy_fences() local
355 src_list = rcu_dereference(src->fence); in dma_resv_copy_fences()
358 if (src_list) { in dma_resv_copy_fences()
359 unsigned shared_count = src_list->shared_count; in dma_resv_copy_fences()
368 src_list = rcu_dereference(src->fence); in dma_resv_copy_fences()
369 if (!src_list || src_list->shared_count > shared_count) { in dma_resv_copy_fences()
375 for (i = 0; i < src_list->shared_count; ++i) { in dma_resv_copy_fences()
378 fence = rcu_dereference(src_list->shared[i]); in dma_resv_copy_fences()
385 src_list in dma_resv_copy_fences()
[all...]
/kernel/linux/linux-6.6/drivers/net/vxlan/
H A Dvxlan_mdb.c40 struct hlist_head src_list; member
66 struct list_head src_list; member
123 if (hlist_empty(&remote->src_list)) in vxlan_mdb_entry_info_fill_srcs()
130 hlist_for_each_entry(ent, &remote->src_list, node) { in vxlan_mdb_entry_info_fill_srcs()
435 list_add_tail(&src->node, &cfg->src_list); in vxlan_mdb_config_src_entry_init()
453 const struct nlattr *src_list, in vxlan_mdb_config_src_list_init()
460 nla_for_each_nested(src_entry, src_list, rem) { in vxlan_mdb_config_src_list_init()
470 list_for_each_entry_safe_reverse(src, tmp, &cfg->src_list, node) in vxlan_mdb_config_src_list_init()
479 list_for_each_entry_safe_reverse(src, tmp, &cfg->src_list, node) in vxlan_mdb_config_src_list_fini()
550 if (vxlan_mdb_is_star_g(&cfg->group) && list_empty(&cfg->src_list) in vxlan_mdb_config_attrs_init()
451 vxlan_mdb_config_src_list_init(struct vxlan_mdb_config *cfg, __be16 proto, const struct nlattr *src_list, struct netlink_ext_ack *extack) vxlan_mdb_config_src_list_init() argument
[all...]
/kernel/linux/linux-5.10/include/linux/
H A Dasync_tx.h162 async_xor(struct page *dest, struct page **src_list, unsigned int offset,
167 struct page **src_list, unsigned int *src_offset,
171 async_xor_val(struct page *dest, struct page **src_list, unsigned int offset,
177 struct page **src_list, unsigned int *src_offset,
/kernel/linux/linux-6.6/include/linux/
H A Dasync_tx.h162 async_xor(struct page *dest, struct page **src_list, unsigned int offset,
167 struct page **src_list, unsigned int *src_offset,
171 async_xor_val(struct page *dest, struct page **src_list, unsigned int offset,
177 struct page **src_list, unsigned int *src_offset,
/kernel/linux/linux-6.6/net/bridge/
H A Dbr_mdb.c180 if (hlist_empty(&p->src_list)) in __mdb_fill_srcs()
187 hlist_for_each_entry_rcu(ent, &p->src_list, node, in __mdb_fill_srcs()
495 if (!hlist_empty(&pg->src_list)) in rtnl_mdb_nlmsg_size()
498 hlist_for_each_entry(ent, &pg->src_list, node) { in rtnl_mdb_nlmsg_size()
904 hlist_for_each_entry(ent, &pg->src_list, node) in br_mdb_replace_group_srcs()
911 hlist_for_each_entry_safe(ent, tmp, &pg->src_list, node) { in br_mdb_replace_group_srcs()
919 hlist_for_each_entry(ent, &pg->src_list, node) in br_mdb_replace_group_srcs()
1091 static int br_mdb_config_src_list_init(struct nlattr *src_list, in br_mdb_config_src_list_init() argument
1099 nla_for_each_nested(src_entry, src_list, rem) in br_mdb_config_src_list_init()
1113 nla_for_each_nested(src_entry, src_list, re in br_mdb_config_src_list_init()
[all...]
H A Dbr_multicast.c372 hlist_for_each_entry(src_ent, &pg_lst->src_list, node) { in br_multicast_star_g_handle_mode()
423 hlist_for_each_entry(src_ent, &pg->src_list, node) { in br_multicast_star_g_host_state()
796 WARN_ON(!hlist_empty(&pg->src_list)); in br_multicast_destroy_port_group()
814 hlist_for_each_entry_safe(ent, tmp, &pg->src_list, node) in br_multicast_del_pg()
871 hlist_for_each_entry_safe(src_ent, tmp, &pg->src_list, node) { in br_multicast_port_group_expired()
878 if (hlist_empty(&pg->src_list)) { in br_multicast_port_group_expired()
953 hlist_for_each_entry(ent, &pg->src_list, node) { in br_ip4_multicast_alloc_query()
1041 hlist_for_each_entry(ent, &pg->src_list, node) { in br_ip4_multicast_alloc_query()
1103 hlist_for_each_entry(ent, &pg->src_list, node) { in br_ip6_multicast_alloc_query()
1204 hlist_for_each_entry(ent, &pg->src_list, nod in br_ip6_multicast_alloc_query()
[all...]
H A Dbr_multicast_eht.c630 hlist_for_each_entry_safe(src_ent, tmp, &pg->src_list, node) { in __eht_inc_exc()
H A Dbr_private.h352 struct hlist_head src_list; member
/kernel/linux/linux-5.10/net/bridge/
H A Dbr_multicast.c273 hlist_for_each_entry(src_ent, &pg_lst->src_list, node) { in br_multicast_star_g_handle_mode()
324 hlist_for_each_entry(src_ent, &pg->src_list, node) { in br_multicast_star_g_host_state()
578 WARN_ON(!hlist_empty(&pg->src_list)); in br_multicast_destroy_port_group()
595 hlist_for_each_entry_safe(ent, tmp, &pg->src_list, node) in br_multicast_del_pg()
651 hlist_for_each_entry_safe(src_ent, tmp, &pg->src_list, node) { in br_multicast_port_group_expired()
658 if (hlist_empty(&pg->src_list)) { in br_multicast_port_group_expired()
712 hlist_for_each_entry(ent, &pg->src_list, node) { in br_ip4_multicast_alloc_query()
799 hlist_for_each_entry(ent, &pg->src_list, node) { in br_ip4_multicast_alloc_query()
860 hlist_for_each_entry(ent, &pg->src_list, node) { in br_ip6_multicast_alloc_query()
960 hlist_for_each_entry(ent, &pg->src_list, nod in br_ip6_multicast_alloc_query()
[all...]
H A Dbr_mdb.c100 if (hlist_empty(&p->src_list)) in __mdb_fill_srcs()
107 hlist_for_each_entry_rcu(ent, &p->src_list, node, in __mdb_fill_srcs()
455 if (!hlist_empty(&pg->src_list)) in rtnl_mdb_nlmsg_size()
458 hlist_for_each_entry(ent, &pg->src_list, node) { in rtnl_mdb_nlmsg_size()
H A Dbr_private.h258 struct hlist_head src_list; member
/kernel/linux/linux-5.10/sound/core/seq/
H A Dseq_ports.c198 return list_entry(p, struct snd_seq_subscribers, src_list); in get_subscriber()
504 list_add_tail(&subs->src_list, &grp->list_head); in check_and_subscribe_port()
528 list = is_src ? &subs->src_list : &subs->dest_list; in __delete_and_unsubscribe_port()
571 INIT_LIST_HEAD(&subs->src_list); in snd_seq_port_connect()
643 list_for_each_entry(s, &src_grp->list_head, src_list) { in snd_seq_port_get_subscription()
H A Dseq_ports.h30 struct list_head src_list; /* link of sources */ member
H A Dseq_clientmgr.c682 list_for_each_entry(subs, &grp->list_head, src_list) { in deliver_to_subscribers()
1983 s = list_entry(p, struct snd_seq_subscribers, src_list); in snd_seq_ioctl_query_subs()
2399 s = list_entry(p, struct snd_seq_subscribers, src_list); in snd_seq_info_dump_subscribers()
/kernel/linux/linux-6.6/sound/core/seq/
H A Dseq_ports.c210 return list_entry(p, struct snd_seq_subscribers, src_list); in get_subscriber()
534 list_add_tail(&subs->src_list, &grp->list_head); in check_and_subscribe_port()
558 list = is_src ? &subs->src_list : &subs->dest_list; in __delete_and_unsubscribe_port()
601 INIT_LIST_HEAD(&subs->src_list); in snd_seq_port_connect()
673 list_for_each_entry(s, &src_grp->list_head, src_list) { in snd_seq_port_get_subscription()
H A Dseq_ports.h30 struct list_head src_list; /* link of sources */ member
H A Dseq_clientmgr.c735 list_for_each_entry(subs, &grp->list_head, src_list) { in __deliver_to_subscribers()
2016 s = list_entry(p, struct snd_seq_subscribers, src_list); in snd_seq_ioctl_query_subs()
2589 s = list_entry(p, struct snd_seq_subscribers, src_list); in snd_seq_info_dump_subscribers()
/kernel/linux/linux-6.6/drivers/crypto/aspeed/
H A Daspeed-hace-hash.c169 struct aspeed_sg_list *src_list; in aspeed_ahash_dma_prepare_sg() local
189 src_list = (struct aspeed_sg_list *)hash_engine->ahash_src_addr; in aspeed_ahash_dma_prepare_sg()
221 src_list[0].phy_addr = cpu_to_le32(phy_addr); in aspeed_ahash_dma_prepare_sg()
222 src_list[0].len = cpu_to_le32(len); in aspeed_ahash_dma_prepare_sg()
223 src_list++; in aspeed_ahash_dma_prepare_sg()
240 src_list[i].phy_addr = cpu_to_le32(phy_addr); in aspeed_ahash_dma_prepare_sg()
241 src_list[i].len = cpu_to_le32(len); in aspeed_ahash_dma_prepare_sg()
H A Daspeed-hace-crypto.c222 struct aspeed_sg_list *src_list, *dst_list; in aspeed_sk_start_sg() local
267 src_list = (struct aspeed_sg_list *)crypto_engine->cipher_addr; in aspeed_sk_start_sg()
284 src_list[i].phy_addr = cpu_to_le32(phy_addr); in aspeed_sk_start_sg()
285 src_list[i].len = cpu_to_le32(len); in aspeed_sk_start_sg()
294 dst_list = src_list; in aspeed_sk_start_sg()
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/
H A Dfs_counters.c626 mlx5_fc_pool_acquire_from_list(struct list_head *src_list, in mlx5_fc_pool_acquire_from_list() argument
633 if (list_empty(src_list)) in mlx5_fc_pool_acquire_from_list()
636 bulk = list_first_entry(src_list, struct mlx5_fc_bulk, pool_list); in mlx5_fc_pool_acquire_from_list()
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/
H A Dfs_counters.c694 mlx5_fc_pool_acquire_from_list(struct list_head *src_list, in mlx5_fc_pool_acquire_from_list() argument
701 if (list_empty(src_list)) in mlx5_fc_pool_acquire_from_list()
704 bulk = list_first_entry(src_list, struct mlx5_fc_bulk, pool_list); in mlx5_fc_pool_acquire_from_list()
/kernel/linux/linux-6.6/tools/testing/selftests/net/forwarding/
H A Dbridge_mdb.sh276 local src_list
280 src_list=${src_list},${src_prefix}${i}
283 echo $src_list | cut -c 2-
940 local src_list
955 src_list=$valid_src
957 src_list=$invalid_src
976 filter_mode $filter_mode source_list $src_list
/kernel/linux/linux-6.6/tools/testing/selftests/drivers/net/dsa/
H A Dbridge_mdb.sh276 local src_list
280 src_list=${src_list},${src_prefix}${i}
283 echo $src_list | cut -c 2-
940 local src_list
955 src_list=$valid_src
957 src_list=$invalid_src
976 filter_mode $filter_mode source_list $src_list

Completed in 45 milliseconds