Lines Matching defs:swap_lock
53 DEFINE_SPINLOCK(swap_lock);
62 /* protected with swap_lock. reading in vm_swap_full() doesn't need lock */
73 * protected with swap_lock, and ordered by priority.
83 * This uses its own lock instead of swap_lock because when a
86 * is held and the locking order requires swap_lock to be taken
1839 spin_lock(&swap_lock);
1850 spin_unlock(&swap_lock);
1855 spin_unlock(&swap_lock);
1863 spin_lock(&swap_lock);
1870 spin_unlock(&swap_lock);
1873 spin_unlock(&swap_lock);
1901 spin_lock(&swap_lock);
1913 spin_unlock(&swap_lock);
2162 * No need for swap_lock here: we're just looking
2165 * allocations from this area (while holding swap_lock).
2296 * we can empty the mmlist. swap_lock must be held on entry and exit.
2297 * Note that mmlist_lock nests inside swap_lock, and an mm must be
2519 assert_spin_locked(&swap_lock);
2540 spin_lock(&swap_lock);
2544 spin_unlock(&swap_lock);
2550 spin_lock(&swap_lock);
2554 spin_unlock(&swap_lock);
2559 spin_lock(&swap_lock);
2564 spin_unlock(&swap_lock);
2571 spin_lock(&swap_lock);
2574 spin_unlock(&swap_lock);
2606 spin_lock(&swap_lock);
2617 spin_unlock(&swap_lock);
2624 spin_unlock(&swap_lock);
2648 spin_unlock(&swap_lock);
2665 spin_lock(&swap_lock);
2669 spin_unlock(&swap_lock);
2686 spin_lock(&swap_lock);
2694 spin_unlock(&swap_lock);
2696 spin_lock(&swap_lock);
2710 spin_unlock(&swap_lock);
2744 spin_lock(&swap_lock);
2746 spin_unlock(&swap_lock);
2906 spin_lock(&swap_lock);
2912 spin_unlock(&swap_lock);
2939 spin_unlock(&swap_lock);
3416 spin_lock(&swap_lock);
3419 spin_unlock(&swap_lock);
3446 spin_lock(&swap_lock);
3455 spin_unlock(&swap_lock);
3465 spin_lock(&swap_lock);
3473 spin_unlock(&swap_lock);