/kernel/linux/linux-5.10/include/trace/events/ |
H A D | rtg.h | 43 TP_PROTO(unsigned int id, unsigned int nr_running, struct task_struct *task), 45 TP_ARGS(id, nr_running, task), 49 __field(unsigned int, nr_running) 62 __entry->nr_running = nr_running; 92 TP_PROTO(unsigned int id, unsigned int nr_running, 95 TP_ARGS(id, nr_running, rtg_cpus, valid), 99 __field(unsigned int, nr_running) 106 __entry->nr_running = nr_running; [all...] |
/kernel/linux/linux-6.6/include/trace/events/ |
H A D | rtg.h | 44 TP_PROTO(unsigned int id, unsigned int nr_running, struct task_struct *task), 46 TP_ARGS(id, nr_running, task), 50 __field(unsigned int, nr_running) 63 __entry->nr_running = nr_running; 93 TP_PROTO(unsigned int id, unsigned int nr_running, 96 TP_ARGS(id, nr_running, rtg_cpus, valid), 100 __field(unsigned int, nr_running) 107 __entry->nr_running = nr_running; [all...] |
/kernel/linux/linux-5.10/kernel/sched/ |
H A D | sched_avg.c | 34 * @return: Average nr_running, iowait and nr_big_tasks value since last poll. 38 * Obtains the average nr_running value since the last poll. 51 /* read and reset nr_running counts */ in sched_get_nr_running_avg() 124 * Update average with latest nr_running value for CPU 130 unsigned long flags, nr_running; in sched_update_nr_prod() local 133 nr_running = per_cpu(nr, cpu); in sched_update_nr_prod() 138 per_cpu(nr, cpu) = nr_running + (inc ? delta : -delta); in sched_update_nr_prod() 145 update_last_busy_time(cpu, !inc, nr_running, curr_time); in sched_update_nr_prod() 147 per_cpu(nr_prod_sum, cpu) += nr_running * diff; in sched_update_nr_prod()
|
H A D | fair.c | 716 static u64 __sched_period(unsigned long nr_running) in __sched_period() argument 718 if (unlikely(nr_running > sched_nr_latency)) in __sched_period() 719 return nr_running * sysctl_sched_min_granularity; in __sched_period() 732 unsigned int nr_running = cfs_rq->nr_running; in sched_slice() local 736 nr_running = rq_of(cfs_rq)->cfs.h_nr_running; in sched_slice() 738 slice = __sched_period(nr_running + !se->on_rq); in sched_slice() 1549 unsigned int nr_running; member 1590 static inline long adjust_numa_imbalance(int imbalance, int nr_running); 1596 if ((ns->nr_running > n in numa_classify() 8968 int i, nr_running, local_group; update_sg_lb_stats() local 9260 int i, nr_running; update_sg_wakeup_stats() local 9604 adjust_numa_imbalance(int imbalance, int nr_running) adjust_numa_imbalance() argument 9946 unsigned int nr_running; find_busiest_queue() local [all...] |
H A D | loadavg.c | 17 * The global load average is an exponentially decaying average of nr_running + 24 * nr_active += cpu_of(cpu)->nr_running + cpu_of(cpu)->nr_uninterruptible; 83 nr_active = this_rq->nr_running - adjust; in calc_load_fold_active()
|
/kernel/linux/linux-6.6/kernel/sched/ |
H A D | sched_avg.c | 34 * @return: Average nr_running, iowait and nr_big_tasks value since last poll. 38 * Obtains the average nr_running value since the last poll. 51 /* read and reset nr_running counts */ in sched_get_nr_running_avg() 124 * Update average with latest nr_running value for CPU 130 unsigned long flags, nr_running; in sched_update_nr_prod() local 133 nr_running = per_cpu(nr, cpu); in sched_update_nr_prod() 138 per_cpu(nr, cpu) = nr_running + (inc ? delta : -delta); in sched_update_nr_prod() 145 update_last_busy_time(cpu, !inc, nr_running, curr_time); in sched_update_nr_prod() 147 per_cpu(nr_prod_sum, cpu) += nr_running * diff; in sched_update_nr_prod()
|
H A D | fair.c | 1006 * (Or nr_running == 0 and both are NULL) in __pick_eevdf() 1108 if (cfs_rq->nr_running > 1) { in update_deadline() 2037 unsigned int nr_running; member 2067 if ((ns->nr_running > ns->weight) && in numa_classify() 2072 if ((ns->nr_running < ns->weight) || in numa_classify() 2127 ns->nr_running += rq->cfs.h_nr_running; in update_numa_stats() 2130 if (find_idle && idle_core < 0 && !rq->nr_running && idle_cpu(cpu)) { in update_numa_stats() 2447 src_running = env->src_stats.nr_running - 1; in task_numa_find_cpu() 2448 dst_running = env->dst_stats.nr_running + 1; in task_numa_find_cpu() 3606 cfs_rq->nr_running in account_entity_enqueue() 10145 int i, nr_running, local_group; update_sg_lb_stats() local 10479 int i, nr_running; update_sg_wakeup_stats() local 11272 unsigned int nr_running; find_busiest_queue() local [all...] |
H A D | loadavg.c | 16 * The global load average is an exponentially decaying average of nr_running + 23 * nr_active += cpu_of(cpu)->nr_running + cpu_of(cpu)->nr_uninterruptible; 82 nr_active = this_rq->nr_running - adjust; in calc_load_fold_active()
|
/kernel/linux/linux-5.10/kernel/sched/rtg/ |
H A D | rtg.c | 218 grp->nr_running--; in remove_task_from_group() 220 if ((int)grp->nr_running < 0) { in remove_task_from_group() 222 grp->nr_running = 0; in remove_task_from_group() 271 grp->nr_running++; in add_task_to_group() 272 if (grp->nr_running == 1) in add_task_to_group() 379 grp->nr_running++; in update_group_nr_running() 381 grp->nr_running--; in update_group_nr_running() 383 if ((int)grp->nr_running < 0) { in update_group_nr_running() 385 grp->nr_running = 0; in update_group_nr_running() 553 if (grp->nr_running in update_group_demand() [all...] |
/kernel/linux/linux-6.6/kernel/sched/rtg/ |
H A D | rtg.c | 219 grp->nr_running--; in remove_task_from_group() 221 if ((int)grp->nr_running < 0) { in remove_task_from_group() 223 grp->nr_running = 0; in remove_task_from_group() 272 grp->nr_running++; in add_task_to_group() 273 if (grp->nr_running == 1) in add_task_to_group() 380 grp->nr_running++; in update_group_nr_running() 382 grp->nr_running--; in update_group_nr_running() 384 if ((int)grp->nr_running < 0) { in update_group_nr_running() 386 grp->nr_running = 0; in update_group_nr_running() 554 if (grp->nr_running in update_group_demand() [all...] |
/kernel/linux/linux-5.10/io_uring/ |
H A D | io-wq.c | 76 atomic_t nr_running; member 133 int nr_running; member 195 atomic_dec(&acct->nr_running); in io_worker_cancel_cb() 309 atomic_inc(&acct->nr_running); in io_wqe_create_worker() 318 atomic_inc(&acct->nr_running); in io_wqe_inc_running() 342 atomic_dec(&acct->nr_running); in create_worker_cb() 391 atomic_dec(&acct->nr_running); in io_queue_worker_create() 405 if (atomic_dec_and_test(&acct->nr_running) && io_acct_run_queue(acct)) { 406 atomic_inc(&acct->nr_running); 772 atomic_dec(&acct->nr_running); in create_worker_cont() [all...] |
/kernel/linux/linux-6.6/io_uring/ |
H A D | io-wq.c | 81 atomic_t nr_running; member 131 int nr_running; member 192 atomic_dec(&acct->nr_running); in io_worker_cancel_cb() 322 atomic_inc(&acct->nr_running); in io_wq_create_worker() 331 atomic_inc(&acct->nr_running); in io_wq_inc_running() 355 atomic_dec(&acct->nr_running); in create_worker_cb() 403 atomic_dec(&acct->nr_running); in io_queue_worker_create() 416 if (!atomic_dec_and_test(&acct->nr_running)) in io_wq_dec_running() 422 atomic_inc(&acct->nr_running); in io_wq_dec_running() 782 atomic_dec(&acct->nr_running); in create_worker_cont() [all...] |
/kernel/linux/linux-5.10/arch/s390/appldata/ |
H A D | appldata_os.c | 64 u32 nr_running; /* number of runnable threads */ member 102 os_data->nr_running = nr_running(); in appldata_get_os_data()
|
/kernel/linux/linux-6.6/arch/s390/appldata/ |
H A D | appldata_os.c | 64 u32 nr_running; /* number of runnable threads */ member 102 os_data->nr_running = nr_running(); in appldata_get_os_data()
|
/kernel/linux/linux-5.10/include/linux/sched/ |
H A D | stat.h | 19 extern unsigned long nr_running(void);
|
H A D | rtg.h | 35 unsigned int nr_running; member
|
/kernel/linux/linux-6.6/include/linux/sched/ |
H A D | stat.h | 20 extern unsigned int nr_running(void);
|
H A D | rtg.h | 35 unsigned int nr_running; member
|
/kernel/linux/linux-5.10/include/uapi/linux/ |
H A D | cgroupstats.h | 34 __u64 nr_running; /* Number of tasks running */ member
|
/kernel/linux/linux-6.6/include/uapi/linux/ |
H A D | cgroupstats.h | 32 __u64 nr_running; /* Number of tasks running */ member
|
/kernel/linux/patches/linux-5.10/prebuilts/usr/include/linux/ |
H A D | cgroupstats.h | 25 __u64 nr_running; member
|
/kernel/linux/patches/linux-6.6/prebuilts/usr/include/linux/ |
H A D | cgroupstats.h | 25 __u64 nr_running; member
|
/kernel/linux/patches/linux-4.19/prebuilts/usr/include/linux/ |
H A D | cgroupstats.h | 12 __u64 nr_running; member
|
/kernel/linux/linux-5.10/fs/proc/ |
H A D | loadavg.c | 23 nr_running(), nr_threads, in loadavg_proc_show()
|
/kernel/linux/linux-6.6/fs/proc/ |
H A D | loadavg.c | 24 nr_running(), nr_threads, in loadavg_proc_show()
|