Searched refs:MEMCG_CHARGE_BATCH (Results 1 - 4 of 4) sorted by relevance
/kernel/linux/linux-6.6/mm/ |
H A D | memcontrol.c | 593 * (MEMCG_CHARGE_BATCH * nr_cpus) update events. Though this optimization 594 * will let stats be out of sync by atmost (MEMCG_CHARGE_BATCH * nr_cpus) but 638 if (x > MEMCG_CHARGE_BATCH) { in memcg_rstat_updated() 646 atomic_add(x / MEMCG_CHARGE_BATCH, &stats_flush_threshold); in memcg_rstat_updated() 2269 if (nr_pages > MEMCG_CHARGE_BATCH) in consume_stock() 2345 if (stock->nr_pages > MEMCG_CHARGE_BATCH) in __refill_stock() 2444 reclaim_high(memcg, MEMCG_CHARGE_BATCH, GFP_KERNEL); in high_work_func() 2578 * MEMCG_CHARGE_BATCH pages is nominal, so work out how much smaller or in calculate_high_delay() 2581 return penalty_jiffies * nr_pages / MEMCG_CHARGE_BATCH; in calculate_high_delay() 2670 unsigned int batch = max(MEMCG_CHARGE_BATCH, nr_page in try_charge_memcg() [all...] |
/kernel/linux/linux-5.10/mm/ |
H A D | memcontrol.c | 783 long x, threshold = MEMCG_CHARGE_BATCH; in __mod_memcg_state() 823 long x, threshold = MEMCG_CHARGE_BATCH; in __mod_memcg_lruvec_state() 929 if (unlikely(x > MEMCG_CHARGE_BATCH)) { in __count_memcg_events() 2280 if (nr_pages > MEMCG_CHARGE_BATCH) in consume_stock() 2355 if (stock->nr_pages > MEMCG_CHARGE_BATCH) in refill_stock() 2483 reclaim_high(memcg, MEMCG_CHARGE_BATCH, GFP_KERNEL); in high_work_func() 2617 * MEMCG_CHARGE_BATCH pages is nominal, so work out how much smaller or in calculate_high_delay() 2620 return penalty_jiffies * nr_pages / MEMCG_CHARGE_BATCH; in calculate_high_delay() 2709 unsigned int batch = max(MEMCG_CHARGE_BATCH, nr_pages); in try_charge()
|
/kernel/linux/linux-5.10/include/linux/ |
H A D | memcontrol.h | 356 #define MEMCG_CHARGE_BATCH 32U macro
|
/kernel/linux/linux-6.6/include/linux/ |
H A D | memcontrol.h | 358 #define MEMCG_CHARGE_BATCH 64U macro
|
Completed in 21 milliseconds