Lines Matching refs:object
136 void __kasan_unpoison_object_data(struct kmem_cache *cache, void *object)
138 kasan_unpoison(object, cache->object_size, false);
141 void __kasan_poison_object_data(struct kmem_cache *cache, void *object)
143 kasan_poison(object, round_up(cache->object_size, KASAN_GRANULE_SIZE),
148 * This function assigns a tag to an object considering the following:
150 * object somewhere (e.g. in the object itself). We preassign a tag for
151 * each object in caches with constructors during slab creation and reuse
152 * the same tag each time a particular object is allocated.
162 const void *object, bool init)
169 * set, assign a tag when the object is being allocated (init == false).
176 /* For SLAB assign tags based on the object index in the freelist. */
177 return (u8)obj_to_index(cache, virt_to_slab(object), (void *)object);
183 return init ? kasan_random_tag() : get_tag(object);
188 const void *object)
190 /* Initialize per-object metadata if it is present. */
192 kasan_init_object_meta(cache, object);
195 object = set_tag(object, assign_tag(cache, object, true));
197 return (void *)object;
200 static inline bool ____kasan_slab_free(struct kmem_cache *cache, void *object,
208 tagged_object = object;
209 object = kasan_reset_tag(object);
211 if (is_kfence_address(object))
214 if (unlikely(nearest_obj(cache, virt_to_slab(object), object) !=
215 object)) {
229 kasan_poison(object, round_up(cache->object_size, KASAN_GRANULE_SIZE),
238 return kasan_quarantine_put(cache, object);
241 bool __kasan_slab_free(struct kmem_cache *cache, void *object,
244 return ____kasan_slab_free(cache, object, ip, true, init);
263 * The object will be poisoned by kasan_poison_pages() or
299 void *object, gfp_t flags, bool init)
307 if (unlikely(object == NULL))
310 if (is_kfence_address(object))
311 return (void *)object;
317 tag = assign_tag(cache, object, false);
318 tagged_object = set_tag(object, tag);
321 * Unpoison the whole object.
334 const void *object, size_t size, gfp_t flags)
342 if (unlikely(object == NULL))
345 if (is_kfence_address(kasan_reset_tag(object)))
346 return (void *)object;
349 * The object has already been unpoisoned by kasan_slab_alloc() for
355 * Partially poison the last object granule to cover the unaligned
359 kasan_poison_last_granule((void *)object, size);
362 redzone_start = round_up((unsigned long)(object + size),
364 redzone_end = round_up((unsigned long)(object + cache->object_size),
374 kasan_save_alloc_info(cache, (void *)object, flags);
377 return (void *)object;
380 void * __must_check __kasan_kmalloc(struct kmem_cache *cache, const void *object,
383 return ____kasan_kmalloc(cache, object, size, flags);
400 * The object has already been unpoisoned by kasan_unpoison_pages() for
406 * Partially poison the last object granule to cover the unaligned
422 void * __must_check __kasan_krealloc(const void *object, size_t size, gfp_t flags)
426 if (unlikely(object == ZERO_SIZE_PTR))
427 return (void *)object;
430 * Unpoison the object's data.
434 kasan_unpoison(object, size, false);
436 slab = virt_to_slab(object);
440 return __kasan_kmalloc_large(object, size, flags);
442 return ____kasan_kmalloc(slab->slab_cache, object, size, flags);