Lines Matching refs:freelist

72  *	A. slab->freelist	-> List of free objects in a slab
82 * processors may put objects onto the freelist but the processor that
84 * slab's freelist.
113 * taken but it still utilizes the freelist for the common operations.
160 * freelist that allows lockless access to
161 * free objects in addition to the regular freelist
364 * freeptr_t represents a SLUB freelist pointer, which might be encoded
370 * Returns freelist pointer (ptr). With hardening, this is obfuscated
421 * memory chunk in the freelist. In that case this_cpu_cmpxchg_double() in
534 freelist_aba_t old = { .freelist = freelist_old, .counter = counters_old };
535 freelist_aba_t new = { .freelist = freelist_new, .counter = counters_new };
551 if (slab->freelist == freelist_old &&
553 slab->freelist = freelist_new;
642 for (p = slab->freelist; p; p = get_freepointer(s, p))
859 slab, slab->objects, slab->inuse, slab->freelist,
996 void **freelist, void *nextfree)
999 !check_valid_pointer(s, slab, nextfree) && freelist) {
1000 object_err(s, slab, *freelist, "Freechain corrupt");
1001 *freelist = NULL;
1284 * Determine if a certain object in a slab is on the freelist. Must hold the
1294 fp = slab->freelist;
1305 slab->freelist = NULL;
1344 slab->freelist);
1465 slab->freelist = NULL;
1755 void **freelist, void *nextfree)
1817 /* Head and tail of the reconstructed freelist */
1827 /* Move object to the new freelist */
1834 * Adjust the reconstructed freelist depth
1915 /* Initialize each random sequence freelist per cache */
1928 /* Get the next entry on the pre-computed freelist randomized */
1950 /* Shuffle the single linked freelist based on a random pre-computed sequence */
1967 /* First entry is used as the base of the freelist */
1971 slab->freelist = cur;
2050 slab->freelist = start;
2160 object = slab->freelist;
2161 slab->freelist = get_freepointer(s, object);
2179 * allocated slab. Allocate a single object instead of whole freelist
2191 object = slab->freelist;
2192 slab->freelist = get_freepointer(s, object);
2218 * return the pointer to the freelist.
2226 void *freelist;
2233 * Zap the freelist and set the frozen bit.
2234 * The old freelist is the list of objects for the
2237 freelist = slab->freelist;
2242 new.freelist = NULL;
2244 new.freelist = freelist;
2251 freelist, counters,
2252 new.freelist, new.counters,
2257 WARN_ON(!freelist);
2258 return freelist;
2487 * Finishes removing the cpu slab. Merges cpu's freelist with slab's freelist,
2493 void *freelist)
2505 if (slab->freelist) {
2511 * Stage one: Count the objects on cpu's freelist as free_delta and
2515 freelist_iter = freelist;
2535 * freelist to the head of slab's freelist.
2547 old.freelist = READ_ONCE(slab->freelist);
2555 set_freepointer(s, freelist_tail, old.freelist);
2556 new.freelist = freelist;
2558 new.freelist = old.freelist;
2564 } else if (new.freelist) {
2577 old.freelist, old.counters,
2578 new.freelist, new.counters,
2624 old.freelist = slab->freelist;
2629 new.freelist = old.freelist;
2634 old.freelist, old.counters,
2635 new.freelist, new.counters,
2748 void *freelist;
2753 freelist = c->freelist;
2756 c->freelist = NULL;
2762 deactivate_slab(s, slab, freelist);
2770 void *freelist = c->freelist;
2774 c->freelist = NULL;
2778 deactivate_slab(s, slab, freelist);
2906 /* Supports checking bulk free of a constructed freelist */
2942 /* Reached end of constructed freelist yet? */
3035 freelist_aba_t old = { .freelist = freelist_old, .counter = tid };
3036 freelist_aba_t new = { .freelist = freelist_new, .counter = next_tid(tid) };
3043 * Check the slab->freelist and either transfer the freelist to the
3044 * per cpu freelist or deactivate the slab.
3054 void *freelist;
3059 freelist = slab->freelist;
3066 new.frozen = freelist != NULL;
3069 freelist, counters,
3073 return freelist;
3077 * Slow path. The lockless freelist is empty or we need to perform
3081 * regular freelist. In that case we simply take over the regular freelist
3082 * as the lockless freelist and zap the regular freelist.
3085 * first element of the freelist as the object to allocate now and move the
3086 * rest of the freelist to the lockless freelist.
3098 void *freelist;
3147 freelist = c->freelist;
3148 if (freelist)
3151 freelist = get_freelist(s, slab);
3153 if (!freelist) {
3168 * freelist is pointing to the list of objects to be used.
3173 c->freelist = get_freepointer(s, freelist);
3176 return freelist;
3185 freelist = c->freelist;
3187 c->freelist = NULL;
3190 deactivate_slab(s, slab, freelist);
3218 freelist = get_partial(s, node, &pc);
3219 if (freelist)
3234 freelist = alloc_single_from_new_slab(s, slab, orig_size);
3236 if (unlikely(!freelist))
3240 set_track(s, freelist, TRACK_ALLOC, addr);
3242 return freelist;
3249 freelist = slab->freelist;
3250 slab->freelist = NULL;
3265 set_track(s, freelist, TRACK_ALLOC, addr);
3267 return freelist;
3272 * For !pfmemalloc_match() case we don't load freelist so that
3275 deactivate_slab(s, slab, get_freepointer(s, freelist));
3276 return freelist;
3283 void *flush_freelist = c->freelist;
3287 c->freelist = NULL;
3370 object = c->freelist;
3386 * 2. Verify that tid and freelist have not been changed
3387 * 3. If they were not changed replace tid and freelist
3433 * zeroing out freelist pointer.
3448 * The fastpath works by first checking if the lockless freelist can be used.
3548 void *prior = slab->freelist;
3553 slab->freelist = head;
3627 prior = slab->freelist;
3730 * Bulk free of a freelist with several objects (all pointing to the
3741 void **freelist;
3762 freelist = READ_ONCE(c->freelist);
3764 set_freepointer(s, tail_obj, freelist);
3766 if (unlikely(!__update_cpu_freelist_fast(s, freelist, head, tid))) {
3779 freelist = c->freelist;
3781 set_freepointer(s, tail_obj, freelist);
3782 c->freelist = head;
3806 * With KASAN enabled slab_free_freelist_hook modifies the freelist
3838 void *freelist;
3846 * slab. It builds a detached freelist directly within the given
3849 * The freelist is build up as a single linked list in the objects.
3850 * The idea is, that this detached freelist can then be bulk
3851 * transferred to the real freelist(s), but only requiring a single
3881 /* Start new detached freelist */
3883 df->freelist = object;
3896 /* Opportunity build freelist */
3897 set_freepointer(df->s, object, df->freelist);
3898 df->freelist = object;
3927 slab_free(df.s, df.slab, df.freelist, df.tail, &p[size], df.cnt,
3957 object = c->freelist;
3960 * We may have removed an object from c->freelist using
3972 * of re-populating per CPU c->freelist
3986 c->freelist = get_freepointer(s, object);
4270 n = slab->freelist;
4277 slab->freelist = get_freepointer(kmem_cache_node, n);
4435 * Store freelist pointer near middle of object to keep
4551 /* Initialize the pre-computed randomized freelist if slab is up */
5155 /* Now we know that a valid freelist exists */