Home
last modified time | relevance | path

Searched refs:group_list (Results 1 - 25 of 31) sorted by relevance

12

/kernel/linux/linux-5.10/virt/kvm/
H A Dvfio.c30 struct list_head group_list; member
165 list_for_each_entry(kvg, &kv->group_list, node) { in kvm_vfio_update_coherency()
211 list_for_each_entry(kvg, &kv->group_list, node) { in kvm_vfio_set_group()
226 list_add_tail(&kvg->node, &kv->group_list); in kvm_vfio_set_group()
251 list_for_each_entry(kvg, &kv->group_list, node) { in kvm_vfio_set_group()
310 list_for_each_entry(kvg, &kv->group_list, node) { in kvm_vfio_set_group()
368 list_for_each_entry_safe(kvg, tmp, &kv->group_list, node) { in kvm_vfio_destroy()
409 INIT_LIST_HEAD(&kv->group_list); in kvm_vfio_create()
/kernel/linux/linux-6.6/drivers/vfio/
H A Dcontainer.c20 struct list_head group_list; member
209 if (!list_empty(&container->group_list) && in vfio_container_ioctl_check_extension()
242 list_for_each_entry(group, &container->group_list, container_next) { in __vfio_container_attach_groups()
252 list_for_each_entry_continue_reverse(group, &container->group_list, in __vfio_container_attach_groups()
276 if (list_empty(&container->group_list) || container->iommu_driver) { in vfio_ioctl_set_iommu()
367 INIT_LIST_HEAD(&container->group_list); in vfio_fops_open()
430 if (!list_empty(&container->group_list) && in vfio_container_attach_group()
458 list_add(&group->container_next, &container->group_list); in vfio_container_attach_group()
491 if (driver && list_empty(&container->group_list)) { in vfio_group_detach_container()
H A Dvfio_iommu_spapr_tce.c71 struct list_head group_list; member
200 return !list_empty(&container->group_list); in tce_groups_attached()
280 tcegrp = list_first_entry(&container->group_list, in tce_iommu_enable()
330 INIT_LIST_HEAD_RCU(&container->group_list); in tce_iommu_open()
352 tcegrp = list_first_entry(&container->group_list, in tce_iommu_release()
658 tcegrp = list_first_entry(&container->group_list, in tce_iommu_create_window()
684 list_for_each_entry(tcegrp, &container->group_list, next) { in tce_iommu_create_window()
700 list_for_each_entry(tcegrp, &container->group_list, next) { in tce_iommu_create_window()
724 list_for_each_entry(tcegrp, &container->group_list, next) { in tce_iommu_remove_window()
761 tcegrp = list_first_entry(&container->group_list, in tce_iommu_create_default_window()
[all...]
H A Dgroup.c20 struct list_head group_list; member
21 struct mutex group_lock; /* locks group_list */
516 * group->iommu_group from the vfio.group_list cannot be NULL in vfio_group_find_from_iommu()
519 list_for_each_entry(group, &vfio.group_list, vfio_next) { in vfio_group_find_from_iommu()
602 list_add(&group->vfio_next, &vfio.group_list); in vfio_create_group()
923 INIT_LIST_HEAD(&vfio.group_list); in vfio_group_init()
953 WARN_ON(!list_empty(&vfio.group_list)); in vfio_group_cleanup()
H A Dvfio_iommu_type1.c83 struct list_head group_list; member
1868 list_for_each_entry(g, &domain->group_list, next) { in find_iommu_group()
2258 INIT_LIST_HEAD(&domain->group_list); in vfio_iommu_type1_attach_group()
2259 list_add(&group->next, &domain->group_list); in vfio_iommu_type1_attach_group()
2293 list_add(&group->next, &d->group_list); in vfio_iommu_type1_attach_group()
2434 list_for_each_entry(g, &d->group_list, next) { in vfio_iommu_resv_refresh()
2509 if (list_empty(&domain->group_list)) { in vfio_iommu_type1_detach_group()
2586 &domain->group_list, next) { in vfio_release_domain()
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx4/
H A Dmcg.c134 struct list_head group_list; member
550 req = list_first_entry(&group->pending_list, struct mcast_req, group_list); in mlx4_ib_mcg_timeout_handler()
551 list_del(&req->group_list); in mlx4_ib_mcg_timeout_handler()
599 list_del(&req->group_list); in handle_leave_req()
621 list_del(&req->group_list); in handle_join_req()
630 list_del(&req->group_list); in handle_join_req()
675 struct mcast_req, group_list); in mlx4_ib_mcg_work_handler()
680 list_del(&req->group_list); in mlx4_ib_mcg_work_handler()
711 group_list); in mlx4_ib_mcg_work_handler()
767 struct mcast_req, group_list); in search_relocate_mgid0_group()
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx4/
H A Dmcg.c134 struct list_head group_list; member
550 req = list_first_entry(&group->pending_list, struct mcast_req, group_list); in mlx4_ib_mcg_timeout_handler()
551 list_del(&req->group_list); in mlx4_ib_mcg_timeout_handler()
599 list_del(&req->group_list); in handle_leave_req()
621 list_del(&req->group_list); in handle_join_req()
630 list_del(&req->group_list); in handle_join_req()
675 struct mcast_req, group_list); in mlx4_ib_mcg_work_handler()
680 list_del(&req->group_list); in mlx4_ib_mcg_work_handler()
711 group_list); in mlx4_ib_mcg_work_handler()
767 struct mcast_req, group_list); in search_relocate_mgid0_group()
[all...]
/kernel/linux/linux-5.10/drivers/vfio/
H A Dvfio_iommu_spapr_tce.c69 struct list_head group_list; member
198 return !list_empty(&container->group_list); in tce_groups_attached()
278 tcegrp = list_first_entry(&container->group_list, in tce_iommu_enable()
328 INIT_LIST_HEAD_RCU(&container->group_list); in tce_iommu_open()
350 tcegrp = list_first_entry(&container->group_list, in tce_iommu_release()
657 tcegrp = list_first_entry(&container->group_list, in tce_iommu_create_window()
683 list_for_each_entry(tcegrp, &container->group_list, next) { in tce_iommu_create_window()
699 list_for_each_entry(tcegrp, &container->group_list, next) { in tce_iommu_create_window()
723 list_for_each_entry(tcegrp, &container->group_list, next) { in tce_iommu_remove_window()
760 tcegrp = list_first_entry(&container->group_list, in tce_iommu_create_default_window()
[all...]
H A Dvfio.c44 struct list_head group_list; member
58 struct list_head group_list; member
305 * that the group is no longer in vfio.group_list. in vfio_group_unlock_and_free()
344 * do anything unless it can find the group in vfio.group_list, so in vfio_create_group()
356 list_for_each_entry(tmp, &vfio.group_list, vfio_next) { in vfio_create_group()
383 list_add(&group->vfio_next, &vfio.group_list); in vfio_create_group()
461 list_for_each_entry(group, &vfio.group_list, vfio_next) { in vfio_group_try_get()
479 list_for_each_entry(group, &vfio.group_list, vfio_next) { in vfio_group_get_from_iommu()
1005 if (!list_empty(&container->group_list) && in vfio_ioctl_check_extension()
1040 list_for_each_entry(group, &container->group_list, container_nex in __vfio_container_attach_groups()
[all...]
H A Dvfio_iommu_type1.c83 struct list_head group_list; member
1681 list_for_each_entry(g, &domain->group_list, next) { in find_iommu_group()
1713 list_for_each_entry(group, &domain->group_list, next) { in update_pinned_page_dirty_scope()
1723 list_for_each_entry(group, &domain->group_list, next) { in update_pinned_page_dirty_scope()
2125 INIT_LIST_HEAD(&domain->group_list); in vfio_iommu_type1_attach_group()
2133 &iommu->external_domain->group_list); in vfio_iommu_type1_attach_group()
2209 INIT_LIST_HEAD(&domain->group_list); in vfio_iommu_type1_attach_group()
2210 list_add(&group->next, &domain->group_list); in vfio_iommu_type1_attach_group()
2237 list_add(&group->next, &d->group_list); in vfio_iommu_type1_attach_group()
2376 list_for_each_entry(g, &d->group_list, nex in vfio_iommu_resv_refresh()
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/netronome/nfp/flower/
H A Dlag_conf.c136 list_add_tail(&group->list, &lag->group_list); in nfp_fl_lag_group_create()
150 list_for_each_entry(entry, &lag->group_list, list) in nfp_fl_lag_find_group_for_master_with_lag()
278 list_for_each_entry_safe(entry, storage, &lag->group_list, list) { in nfp_fl_lag_do_work()
452 list_for_each_entry(group_entry, &priv->nfp_lag.group_list, in nfp_flower_lag_unprocessed_msg()
666 INIT_LIST_HEAD(&lag->group_list); in nfp_flower_lag_init()
686 list_for_each_entry_safe(entry, storage, &lag->group_list, list) { in nfp_flower_lag_cleanup()
H A Dmain.h126 * @group_list: List of all master/slave groups offloaded
138 struct list_head group_list; member
/kernel/linux/linux-5.10/drivers/dma/ppc4xx/
H A Dadma.c336 list_for_each_entry(iter, &desc->group_list, chain_node) { in ppc440spe_desc_init_dma01pq()
341 &desc->group_list))) { in ppc440spe_desc_init_dma01pq()
367 iter = list_first_entry(&desc->group_list, in ppc440spe_desc_init_dma01pq()
387 list_for_each_entry_from(iter, &desc->group_list, chain_node) { in ppc440spe_desc_init_dma01pq()
398 iter = list_first_entry(&desc->group_list, in ppc440spe_desc_init_dma01pq()
417 list_for_each_entry_from(iter, &desc->group_list, in ppc440spe_desc_init_dma01pq()
445 iter = list_first_entry(&desc->group_list, in ppc440spe_desc_init_dma01pqzero_sum()
455 list_for_each_entry_from(iter, &desc->group_list, chain_node) { in ppc440spe_desc_init_dma01pqzero_sum()
476 &desc->group_list))) { in ppc440spe_desc_init_dma01pqzero_sum()
1434 list_for_each_entry(iter, &tdesc->group_list, chain_nod in ppc440spe_get_group_entry()
[all...]
H A Dadma.h129 * @group_list: list of slots that make up a multi-descriptor transaction
153 struct list_head group_list; /* list */ member
/kernel/linux/linux-6.6/drivers/dma/ppc4xx/
H A Dadma.c336 list_for_each_entry(iter, &desc->group_list, chain_node) { in ppc440spe_desc_init_dma01pq()
341 &desc->group_list))) { in ppc440spe_desc_init_dma01pq()
367 iter = list_first_entry(&desc->group_list, in ppc440spe_desc_init_dma01pq()
387 list_for_each_entry_from(iter, &desc->group_list, chain_node) { in ppc440spe_desc_init_dma01pq()
398 iter = list_first_entry(&desc->group_list, in ppc440spe_desc_init_dma01pq()
417 list_for_each_entry_from(iter, &desc->group_list, in ppc440spe_desc_init_dma01pq()
445 iter = list_first_entry(&desc->group_list, in ppc440spe_desc_init_dma01pqzero_sum()
455 list_for_each_entry_from(iter, &desc->group_list, chain_node) { in ppc440spe_desc_init_dma01pqzero_sum()
476 &desc->group_list))) { in ppc440spe_desc_init_dma01pqzero_sum()
1432 list_for_each_entry(iter, &tdesc->group_list, chain_nod in ppc440spe_get_group_entry()
[all...]
H A Dadma.h126 * @group_list: list of slots that make up a multi-descriptor transaction
150 struct list_head group_list; /* list */ member
/kernel/linux/linux-5.10/drivers/lightnvm/
H A Dpblk-gc.c372 struct list_head *group_list) in pblk_gc_get_victim_line()
377 victim = list_first_entry(group_list, struct pblk_line, list); in pblk_gc_get_victim_line()
379 list_for_each_entry(line, group_list, list) { in pblk_gc_get_victim_line()
449 struct list_head *group_list; in pblk_gc_run() local
460 group_list = l_mg->gc_lists[gc_group++]; in pblk_gc_run()
465 line = pblk_gc_get_victim_line(pblk, group_list); in pblk_gc_run()
371 pblk_gc_get_victim_line(struct pblk *pblk, struct list_head *group_list) pblk_gc_get_victim_line() argument
/kernel/linux/linux-6.6/drivers/net/ethernet/netronome/nfp/flower/
H A Dlag_conf.c136 list_add_tail(&group->list, &lag->group_list); in nfp_fl_lag_group_create()
150 list_for_each_entry(entry, &lag->group_list, list) in nfp_fl_lag_find_group_for_master_with_lag()
308 list_for_each_entry_safe(entry, storage, &lag->group_list, list) { in nfp_fl_lag_do_work()
487 list_for_each_entry(group_entry, &priv->nfp_lag.group_list, in nfp_flower_lag_unprocessed_msg()
701 INIT_LIST_HEAD(&lag->group_list); in nfp_flower_lag_init()
721 list_for_each_entry_safe(entry, storage, &lag->group_list, list) { in nfp_flower_lag_cleanup()
H A Dmain.h224 * @group_list: List of all master/slave groups offloaded
236 struct list_head group_list; member
/kernel/linux/linux-5.10/security/tomoyo/
H A Dmemory.c111 list = &param->ns->group_list[idx]; in tomoyo_get_group()
H A Dgc.c563 struct list_head *list = &ns->group_list[i]; in tomoyo_collect_entry()
/kernel/linux/linux-6.6/security/tomoyo/
H A Dmemory.c111 list = &param->ns->group_list[idx]; in tomoyo_get_group()
H A Dgc.c563 struct list_head *list = &ns->group_list[i]; in tomoyo_collect_entry()
/kernel/linux/linux-5.10/drivers/iommu/
H A Diommu.c198 static int __iommu_probe_device(struct device *dev, struct list_head *group_list) in __iommu_probe_device() argument
231 if (group_list && !group->default_domain && list_empty(&group->entry)) in __iommu_probe_device()
232 list_add_tail(&group->entry, group_list); in __iommu_probe_device()
1574 struct list_head *group_list = data; in probe_iommu_group() local
1585 ret = __iommu_probe_device(dev, group_list); in probe_iommu_group()
1752 LIST_HEAD(group_list); in bus_iommu_probe()
1760 ret = bus_for_each_dev(bus, NULL, &group_list, probe_iommu_group); in bus_iommu_probe()
1764 list_for_each_entry_safe(group, next, &group_list, entry) { in bus_iommu_probe()
/kernel/linux/linux-6.6/drivers/iommu/
H A Diommu.c484 static int __iommu_probe_device(struct device *dev, struct list_head *group_list) in __iommu_probe_device() argument
530 } else if (!group->default_domain && !group_list) { in __iommu_probe_device()
536 * With a group_list argument we defer the default_domain setup in __iommu_probe_device()
541 list_add_tail(&group->entry, group_list); in __iommu_probe_device()
1781 struct list_head *group_list = data; in probe_iommu_group() local
1785 ret = __iommu_probe_device(dev, group_list); in probe_iommu_group()
1858 LIST_HEAD(group_list); in bus_iommu_probe()
1861 ret = bus_for_each_dev(bus, NULL, &group_list, probe_iommu_group); in bus_iommu_probe()
1865 list_for_each_entry_safe(group, next, &group_list, entry) { in bus_iommu_probe()

Completed in 40 milliseconds

12