Home
last modified time | relevance | path

Searched refs:sched_group (Results 1 - 10 of 10) sorted by relevance

/kernel/linux/linux-5.10/include/linux/sched/
H A Dtopology.h71 struct sched_group;
83 struct sched_group *groups; /* the balancing groups of the domain */
182 struct sched_group *__percpu *sg;
/kernel/linux/linux-6.6/include/linux/sched/
H A Dtopology.h78 struct sched_group;
91 struct sched_group *groups; /* the balancing groups of the domain */
191 struct sched_group *__percpu *sg;
/kernel/linux/linux-6.6/kernel/sched/
H A Dtopology.c38 struct sched_group *group = sd->groups; in sched_domain_debug_one()
610 static void free_sched_groups(struct sched_group *sg, int free_sgc) in free_sched_groups()
612 struct sched_group *tmp, *first; in free_sched_groups()
749 struct sched_group *sg = sd->groups; in cpu_attach_domain()
796 int group_balance_cpu(struct sched_group *sg) in group_balance_cpu()
908 build_balance_mask(struct sched_domain *sd, struct sched_group *sg, struct cpumask *mask) in build_balance_mask()
944 static struct sched_group *
947 struct sched_group *sg; in build_group_from_child_sched_domain()
950 sg = kzalloc_node(sizeof(struct sched_group) + cpumask_size(), in build_group_from_child_sched_domain()
969 struct sched_group *s in init_overlap_sched_group()
[all...]
H A Dsched.h1350 struct sched_group;
1352 static inline struct cpumask *sched_group_span(struct sched_group *sg);
1430 struct sched_group *group) in sched_group_cookie_match()
1490 struct sched_group *group) in sched_group_cookie_match()
2044 struct sched_group { struct
2045 struct sched_group *next; /* Must be a circular list */
2064 static inline struct cpumask *sched_group_span(struct sched_group *sg) in sched_group_span()
2072 static inline struct cpumask *group_balance_mask(struct sched_group *sg) in group_balance_mask()
2078 * group_first_cpu - Returns the first CPU in the cpumask of a sched_group.
2081 static inline unsigned int group_first_cpu(struct sched_group *grou
[all...]
H A Dfair.c7175 static struct sched_group *
7182 find_idlest_group_cpu(struct sched_group *group, struct task_struct *p, int this_cpu) in find_idlest_group_cpu()
7257 struct sched_group *group; in find_idlest_cpu()
9153 * our sched_group. We may want to revisit it if we couldn't in can_migrate_task()
9684 * sg_lb_stats - stats of a sched_group required for load_balancing
9711 struct sched_group *busiest; /* Busiest group in this sd */
9712 struct sched_group *local; /* Local group in this sd */
9776 struct sched_group *sdg = sd->groups; in update_cpu_capacity()
9794 struct sched_group *group, *sdg = sd->groups; in update_group_capacity()
9908 static inline int sg_imbalanced(struct sched_group *grou
[all...]
H A Drt.c1906 struct sched_group *sg = NULL; in find_cas_cpu()
1907 struct sched_group *sg_target = NULL; in find_cas_cpu()
1908 struct sched_group *sg_backup = NULL; in find_cas_cpu()
/kernel/linux/linux-5.10/kernel/sched/
H A Dtopology.c37 struct sched_group *group = sd->groups; in sched_domain_debug_one()
574 static void free_sched_groups(struct sched_group *sg, int free_sgc) in free_sched_groups()
576 struct sched_group *tmp, *first; in free_sched_groups()
748 int group_balance_cpu(struct sched_group *sg) in group_balance_cpu()
860 build_balance_mask(struct sched_domain *sd, struct sched_group *sg, struct cpumask *mask) in build_balance_mask()
896 static struct sched_group *
899 struct sched_group *sg; in build_group_from_child_sched_domain()
902 sg = kzalloc_node(sizeof(struct sched_group) + cpumask_size(), in build_group_from_child_sched_domain()
919 struct sched_group *sg) in init_overlap_sched_group()
974 struct sched_group *firs in build_overlap_sched_groups()
[all...]
H A Dsched.h1615 struct sched_group { struct
1616 struct sched_group *next; /* Must be a circular list */
1633 static inline struct cpumask *sched_group_span(struct sched_group *sg) in sched_group_span()
1641 static inline struct cpumask *group_balance_mask(struct sched_group *sg) in group_balance_mask()
1647 * group_first_cpu - Returns the first CPU in the cpumask of a sched_group.
1650 static inline unsigned int group_first_cpu(struct sched_group *group) in group_first_cpu()
1655 extern int group_balance_cpu(struct sched_group *sg);
3159 extern int group_balance_cpu_not_isolated(struct sched_group *sg);
3161 static inline int group_balance_cpu_not_isolated(struct sched_group *sg) in group_balance_cpu_not_isolated()
H A Dfair.c6219 static struct sched_group *
6226 find_idlest_group_cpu(struct sched_group *group, struct task_struct *p, int this_cpu) in find_idlest_group_cpu()
6297 struct sched_group *group; in find_idlest_cpu()
8021 * our sched_group. We may want to revisit it if we couldn't in can_migrate_task()
8543 * sg_lb_stats - stats of a sched_group required for load_balancing
8569 struct sched_group *busiest; /* Busiest group in this sd */
8570 struct sched_group *local; /* Local group in this sd */
8635 struct sched_group *sdg = sd->groups; in update_cpu_capacity()
8718 struct sched_group *group, *sdg = sd->groups; in update_group_capacity()
8832 static inline int sg_imbalanced(struct sched_group *grou
[all...]
H A Drt.c1732 struct sched_group *sg = NULL; in find_cas_cpu()
1733 struct sched_group *sg_target = NULL; in find_cas_cpu()
1734 struct sched_group *sg_backup = NULL; in find_cas_cpu()

Completed in 46 milliseconds