Home
last modified time | relevance | path

Searched refs:task_on_rq_queued (Results 1 - 14 of 14) sorted by relevance

/kernel/linux/linux-6.6/kernel/sched/
H A Ddeadline.c318 if (task_on_rq_queued(p)) in dl_change_utilization()
1153 if (!task_on_rq_queued(p)) { in dl_task_timer()
2244 !task_on_rq_queued(task))) { in find_lock_later_rq()
2280 WARN_ON_ONCE(!task_on_rq_queued(p)); in pick_next_pushable_dl_task()
2429 WARN_ON(!task_on_rq_queued(p)); in pull_dl_task()
2592 if (task_on_rq_queued(p) && p->dl.dl_runtime) in switched_from_dl()
2601 if (!task_on_rq_queued(p)) { in switched_from_dl()
2626 if (!task_on_rq_queued(p) || rq->dl.dl_nr_running) in switched_from_dl()
2648 if (!task_on_rq_queued(p)) { in switched_to_dl()
2675 if (!task_on_rq_queued( in prio_changed_dl()
[all...]
H A Dcore_sched.c79 if (cookie && task_on_rq_queued(p)) in sched_core_update_cookie()
H A Drt.c2208 !task_on_rq_queued(task))) { in find_lock_lowest_rq()
2242 BUG_ON(!task_on_rq_queued(p)); in pick_next_pushable_task()
2617 WARN_ON(!task_on_rq_queued(p)); in pull_rt_task()
2714 if (!task_on_rq_queued(p) || rq->rt.rt_nr_running || in switched_from_rt()
2753 if (task_on_rq_queued(p)) { in switched_to_rt()
2770 if (!task_on_rq_queued(p)) in prio_changed_rt()
2885 if (!task_on_rq_queued(next_task) || in rt_active_load_balance_cpu_stop()
H A Dcore.c1208 if (!task_on_rq_queued(p)) in __need_bw_check()
2118 return task_on_rq_queued(p); in sched_task_on_rq()
2281 if (task_on_rq_queued(rq->curr) && test_tsk_need_resched(rq->curr)) in check_preempt_curr()
2375 queued = task_on_rq_queued(p); in wait_task_inactive()
2684 if (task_on_rq_queued(p)) { in migration_cpu_stop()
2830 queued = task_on_rq_queued(p); in __do_set_cpus_allowed()
3134 if (task_on_rq_queued(p))
3484 if (task_on_rq_queued(p)) { in __migrate_swap_task()
3976 if (task_on_rq_queued(p)) { in ttwu_runnable()
5735 if (!p->on_cpu || !task_on_rq_queued( in task_sched_runtime()
[all...]
H A Dfair.c7803 * Despite the task_on_rq_queued(@p) check there is still a in cpu_util()
7821 else if (p && unlikely(task_on_rq_queued(p) || current == p)) in cpu_util()
10433 if (task_on_rq_queued(p)) in task_running_on_cpu()
11924 if (task_on_rq_queued(push_task) && in active_load_balance_cpu_stop()
13122 if (!task_on_rq_queued(p)) in prio_changed_fair()
13230 if (task_on_rq_queued(p)) { in switched_to_fair()
13253 if (task_on_rq_queued(p)) { in set_next_task_fair()
H A Dsched.h2297 static inline int task_on_rq_queued(struct task_struct *p) in task_on_rq_queued() function
2503 return rq->stop && task_on_rq_queued(rq->stop); in sched_stop_runnable()
H A Dwalt.c679 if (task_on_rq_queued(p) in update_history()
/kernel/linux/linux-5.10/kernel/sched/
H A Ddeadline.c225 if (task_on_rq_queued(p)) in dl_change_utilization()
1089 if (!task_on_rq_queued(p)) { in dl_task_timer()
2062 BUG_ON(!task_on_rq_queued(p)); in pick_next_pushable_dl_task()
2278 WARN_ON(!task_on_rq_queued(p)); in pull_dl_task()
2426 if (task_on_rq_queued(p) && p->dl.dl_runtime) in switched_from_dl()
2435 if (!task_on_rq_queued(p)) { in switched_from_dl()
2460 if (!task_on_rq_queued(p) || rq->dl.dl_nr_running) in switched_from_dl()
2482 if (!task_on_rq_queued(p)) { in switched_to_dl()
2509 if (task_on_rq_queued(p) || rq->curr == p) { in prio_changed_dl()
H A Dcore.c1758 if (task_on_rq_queued(rq->curr) && test_tsk_need_resched(rq->curr)) in check_preempt_curr()
1885 if (task_on_rq_queued(p)) in migration_cpu_stop()
1914 queued = task_on_rq_queued(p); in do_set_cpus_allowed()
2032 } else if (task_on_rq_queued(p)) { in __set_cpus_allowed_ptr()
2107 if (task_on_rq_queued(p)) { in __migrate_swap_task()
2278 queued = task_on_rq_queued(p); in wait_task_inactive()
2684 if (task_on_rq_queued(p)) { in ttwu_runnable()
4155 if (!p->on_cpu || !task_on_rq_queued(p)) in task_sched_runtime()
4165 if (task_current(rq, p) && task_on_rq_queued(p)) { in task_sched_runtime()
4738 psi_sched_switch(prev, next, !task_on_rq_queued(pre in __schedule()
[all...]
H A Drt.c2007 BUG_ON(!task_on_rq_queued(p)); in pick_next_pushable_task()
2393 WARN_ON(!task_on_rq_queued(p)); in pull_rt_task()
2478 if (!task_on_rq_queued(p) || rq->rt.rt_nr_running || in switched_from_rt()
2517 if (task_on_rq_queued(p)) { in switched_to_rt()
2534 if (!task_on_rq_queued(p)) in prio_changed_rt()
2649 if (!task_on_rq_queued(next_task) || in rt_active_load_balance_cpu_stop()
H A Dsched.h1845 static inline int task_on_rq_queued(struct task_struct *p) in task_on_rq_queued() function
2019 return rq->stop && task_on_rq_queued(rq->stop); in sched_stop_runnable()
H A Dfair.c6832 if (unlikely(task_on_rq_queued(p) || current == p)) in cpu_util_without()
9214 if (task_on_rq_queued(p)) in task_running_on_cpu()
10540 if (task_on_rq_queued(push_task) && in active_load_balance_cpu_stop()
11568 if (!task_on_rq_queued(p)) in prio_changed_fair()
11712 if (task_on_rq_queued(p)) { in switched_to_fair()
11735 if (task_on_rq_queued(p)) { in set_next_task_fair()
H A Dpsi.c987 if (task_on_rq_queued(task)) { in cgroup_move_task()
H A Dwalt.c678 if (task_on_rq_queued(p) in update_history()

Completed in 80 milliseconds