Searched refs:sched_group (Results 1 - 5 of 5) sorted by relevance
/device/soc/rockchip/common/sdk_linux/kernel/sched/ |
H A D | topology.c | 44 struct sched_group *group = sd->groups; in sched_domain_debug_one() 604 static void free_sched_groups(struct sched_group *sg, int free_sgc) in free_sched_groups() 606 struct sched_group *tmp, *first; in free_sched_groups() 788 int group_balance_cpu(struct sched_group *sg) in group_balance_cpu() 897 static void build_balance_mask(struct sched_domain *sd, struct sched_group *sg, struct cpumask *mask) in build_balance_mask() 935 static struct sched_group *build_group_from_child_sched_domain(struct sched_domain *sd, int cpu) in build_group_from_child_sched_domain() 937 struct sched_group *sg; in build_group_from_child_sched_domain() 940 sg = kzalloc_node(sizeof(struct sched_group) + cpumask_size(), GFP_KERNEL, cpu_to_node(cpu)); in build_group_from_child_sched_domain() 956 static void init_overlap_sched_group(struct sched_domain *sd, struct sched_group *sg) in init_overlap_sched_group() 1008 struct sched_group *firs in build_overlap_sched_groups() [all...] |
H A D | sched.h | 1571 struct sched_group { struct 1572 struct sched_group *next; /* Must be a circular list */ 1589 static inline struct cpumask *sched_group_span(struct sched_group *sg) in sched_group_span() 1597 static inline struct cpumask *group_balance_mask(struct sched_group *sg) in group_balance_mask() 1603 * group_first_cpu - Returns the first CPU in the cpumask of a sched_group. 1606 static inline unsigned int group_first_cpu(struct sched_group *group) in group_first_cpu() 1611 extern int group_balance_cpu(struct sched_group *sg); 3045 extern int group_balance_cpu_not_isolated(struct sched_group *sg); 3047 static inline int group_balance_cpu_not_isolated(struct sched_group *sg) in group_balance_cpu_not_isolated()
|
H A D | fair.c | 6213 static struct sched_group *find_idlest_group(struct sched_domain *sd, struct task_struct *p, int this_cpu); 6218 static int find_idlest_group_cpu(struct sched_group *group, struct task_struct *p, int this_cpu) in find_idlest_group_cpu() 6293 struct sched_group *group; in find_idlest_cpu() 8023 * our sched_group. We may want to revisit it if we couldn't in can_migrate_task() 8582 * sg_lb_stats - stats of a sched_group required for load_balancing 8608 struct sched_group *busiest; /* Busiest group in this sd */ 8609 struct sched_group *local; /* Local group in this sd */ 8674 struct sched_group *sdg = sd->groups; in update_cpu_capacity() 8693 struct sched_group *group, *sdg = sd->groups; in update_group_capacity() 8802 static inline int sg_imbalanced(struct sched_group *grou [all...] |
H A D | rt.c | 1813 struct sched_group *sg = NULL; in find_cas_cpu() 1814 struct sched_group *sg_target = NULL; in find_cas_cpu() 1815 struct sched_group *sg_backup = NULL; in find_cas_cpu()
|
/device/soc/rockchip/rk3588/kernel/include/trace/hooks/ |
H A D | sched.h | 68 struct sched_group; 70 TP_PROTO(struct sched_group *busiest, struct rq *dst_rq, int *out_balance), 97 TP_PROTO(int dst_cpu, struct sched_group *group,
|
Completed in 23 milliseconds