Home
last modified time | relevance | path

Searched refs:gfp_mask (Results 1 - 20 of 20) sorted by relevance

/device/soc/rockchip/common/sdk_linux/include/linux/
H A Dcpuset.h68 extern bool _cpuset_node_allowed(int node, gfp_t gfp_mask);
70 static inline bool cpuset_node_allowed(int node, gfp_t gfp_mask) in cpuset_node_allowed() argument
73 return _cpuset_node_allowed(node, gfp_mask); in cpuset_node_allowed()
78 static inline bool __cpuset_zone_allowed(struct zone *z, gfp_t gfp_mask) in __cpuset_zone_allowed() argument
80 return _cpuset_node_allowed(zone_to_nid(z), gfp_mask); in __cpuset_zone_allowed()
83 static inline bool cpuset_zone_allowed(struct zone *z, gfp_t gfp_mask) in cpuset_zone_allowed() argument
86 return __cpuset_zone_allowed(z, gfp_mask); in cpuset_zone_allowed()
228 static inline bool cpuset_node_allowed(int node, gfp_t gfp_mask) in cpuset_node_allowed() argument
233 static inline bool __cpuset_zone_allowed(struct zone *z, gfp_t gfp_mask) in __cpuset_zone_allowed() argument
238 static inline bool cpuset_zone_allowed(struct zone *z, gfp_t gfp_mask) in cpuset_zone_allowed() argument
[all...]
H A Dmemory_group_manager.h57 * @gfp_mask: Bitmask of Get Free Page flags affecting allocator
64 struct page *(*mgm_alloc_page)(struct memory_group_manager_device *mgm_dev, int group_id, gfp_t gfp_mask,
H A Dmm.h536 * MM layer fills up gfp_mask for page allocations but fault handler might
544 gfp_t gfp_mask; /* gfp mask to be used for allocations */ member
1844 extern int try_to_release_page(struct page *page, gfp_t gfp_mask);
2515 extern __printf(3, 4) void warn_alloc(gfp_t gfp_mask, nodemask_t *nodemask, const char *fmt, ...);
/device/soc/rockchip/common/vendor/drivers/dma-buf/heaps/
H A Dpage_pool.c26 return alloc_pages(pool->gfp_mask, pool->order); in dmabuf_page_pool_alloc_pages()
116 struct dmabuf_page_pool *dmabuf_page_pool_create(gfp_t gfp_mask, unsigned int order) in dmabuf_page_pool_create() argument
129 pool->gfp_mask = gfp_mask | __GFP_COMP; in dmabuf_page_pool_create()
162 static int dmabuf_page_pool_do_shrink(struct dmabuf_page_pool *pool, gfp_t gfp_mask, int nr_to_scan) in dmabuf_page_pool_do_shrink() argument
170 high = !!(gfp_mask & __GFP_HIGHMEM); in dmabuf_page_pool_do_shrink()
197 static int dmabuf_page_pool_shrink(gfp_t gfp_mask, int nr_to_scan) in dmabuf_page_pool_shrink() argument
212 nr_total += dmabuf_page_pool_do_shrink(pool, gfp_mask, nr_to_scan); in dmabuf_page_pool_shrink()
214 nr_freed = dmabuf_page_pool_do_shrink(pool, gfp_mask, nr_to_scan); in dmabuf_page_pool_shrink()
229 return dmabuf_page_pool_shrink(sc->gfp_mask, in dmabuf_page_pool_shrink_count()
[all...]
H A Dpage_pool.h34 * @gfp_mask: gfp_mask to use from alloc
44 gfp_t gfp_mask; member
49 struct dmabuf_page_pool *dmabuf_page_pool_create(gfp_t gfp_mask, unsigned int order);
/device/soc/rockchip/rk3588/kernel/drivers/dma-buf/heaps/
H A Dpage_pool.c26 return alloc_pages(pool->gfp_mask, pool->order); in dmabuf_page_pool_alloc_pages()
114 struct dmabuf_page_pool *dmabuf_page_pool_create(gfp_t gfp_mask, unsigned int order) in dmabuf_page_pool_create() argument
126 pool->gfp_mask = gfp_mask | __GFP_COMP; in dmabuf_page_pool_create()
158 static int dmabuf_page_pool_do_shrink(struct dmabuf_page_pool *pool, gfp_t gfp_mask, in dmabuf_page_pool_do_shrink() argument
167 high = !!(gfp_mask & __GFP_HIGHMEM); in dmabuf_page_pool_do_shrink()
190 static int dmabuf_page_pool_shrink(gfp_t gfp_mask, int nr_to_scan) in dmabuf_page_pool_shrink() argument
204 gfp_mask, in dmabuf_page_pool_shrink()
208 gfp_mask, in dmabuf_page_pool_shrink()
224 return dmabuf_page_pool_shrink(sc->gfp_mask, in dmabuf_page_pool_shrink_count()
[all...]
H A Dpage_pool.h34 * @gfp_mask: gfp_mask to use from alloc
44 gfp_t gfp_mask; member
49 struct dmabuf_page_pool *dmabuf_page_pool_create(gfp_t gfp_mask,
/device/soc/hisilicon/common/platform/wifi/hi3881v100/driver/oal/
H A Doal_scatterlist.h119 static struct scatterlist *sg_kmalloc(unsigned int nents, gfp_t gfp_mask) in sg_kmalloc() argument
121 hi_unref_param(gfp_mask); in sg_kmalloc()
122 return kmalloc(nents * sizeof(struct scatterlist), gfp_mask); in sg_kmalloc()
144 int __sg_alloc_table(struct sg_table *table, unsigned int nents, unsigned int max_ents, gfp_t gfp_mask, in __sg_alloc_table() argument
169 sg = alloc_fn(alloc_size, gfp_mask); in __sg_alloc_table()
210 int sg_alloc_table(struct sg_table *table, unsigned int nents, gfp_t gfp_mask) in sg_alloc_table() argument
214 ret = __sg_alloc_table(table, nents, SG_MAX_SINGLE_ALLOC, gfp_mask, sg_kmalloc); in sg_alloc_table()
/device/soc/rockchip/common/vendor/include/
H A Dpage_pool.h34 * @gfp_mask: gfp_mask to use from alloc
44 gfp_t gfp_mask; member
49 struct dmabuf_page_pool *dmabuf_page_pool_create(gfp_t gfp_mask, unsigned int order);
/device/soc/rockchip/rk3588/kernel/include/linux/
H A Dpage_pool.h34 * @gfp_mask: gfp_mask to use from alloc
44 gfp_t gfp_mask; member
49 struct dmabuf_page_pool *dmabuf_page_pool_create(gfp_t gfp_mask,
H A Dmemory_group_manager.h57 * @gfp_mask: Bitmask of Get Free Page flags affecting allocator
66 gfp_t gfp_mask, unsigned int order);
/device/soc/rockchip/common/sdk_linux/kernel/power/
H A Dsnapshot.c157 * @gfp_mask: GFP mask for the allocation.
168 static void *get_image_page(gfp_t gfp_mask, int safe_needed) in get_image_page() argument
172 res = (void *)get_zeroed_page(gfp_mask); in get_image_page()
178 res = (void *)get_zeroed_page(gfp_mask); in get_image_page()
188 static void *_get_safe_page(gfp_t gfp_mask) in _get_safe_page() argument
197 return get_image_page(gfp_mask, PG_SAFE); in _get_safe_page()
200 unsigned long get_safe_page(gfp_t gfp_mask) in get_safe_page() argument
202 return (unsigned long)_get_safe_page(gfp_mask); in get_safe_page()
205 static struct page *alloc_image_page(gfp_t gfp_mask) in alloc_image_page() argument
209 page = alloc_page(gfp_mask); in alloc_image_page()
275 gfp_t gfp_mask; /* mask for allocating pages */ global() member
279 chain_init(struct chain_allocator *ca, gfp_t gfp_mask, int safe_needed) chain_init() argument
414 alloc_rtree_node(gfp_t gfp_mask, int safe_needed, struct chain_allocator *ca, struct list_head *list) alloc_rtree_node() argument
441 add_rtree_block(struct mem_zone_bm_rtree *zone, gfp_t gfp_mask, int safe_needed, struct chain_allocator *ca) add_rtree_block() argument
510 create_zone_bm_rtree(gfp_t gfp_mask, int safe_needed, struct chain_allocator *ca, unsigned long start, unsigned long end) create_zone_bm_rtree() argument
593 create_mem_extents(struct list_head *list, gfp_t gfp_mask) create_mem_extents() argument
653 memory_bm_create(struct memory_bitmap *bm, gfp_t gfp_mask, int safe_needed) memory_bm_create() argument
[all...]
/device/soc/rockchip/rk3588/kernel/include/trace/hooks/
H A Dvmscan.h20 TP_PROTO(gfp_t gfp_mask, int nid, struct mem_cgroup *memcg, int priority, bool *bypass),
21 TP_ARGS(gfp_mask, nid, memcg, priority, bypass));
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/
H A Dmali_kbase_native_mgm.c35 * @gfp_mask: Bitmask of Get Free Page flags affecting allocator behavior.
46 gfp_t gfp_mask, unsigned int order) in kbase_native_mgm_alloc()
64 return alloc_pages(gfp_mask, order); in kbase_native_mgm_alloc()
44 kbase_native_mgm_alloc( struct memory_group_manager_device *mgm_dev, int group_id, gfp_t gfp_mask, unsigned int order) kbase_native_mgm_alloc() argument
H A Dmali_kbase_mem_linux.c644 WARN((sc->gfp_mask & __GFP_ATOMIC), in kbase_mem_evictable_reclaim_count_objects()
645 "Shrinkers cannot be called for GFP_ATOMIC allocations. Check kernel mm for problems. gfp_mask==%x\n", in kbase_mem_evictable_reclaim_count_objects()
646 sc->gfp_mask); in kbase_mem_evictable_reclaim_count_objects()
648 "Shrinker called whilst in atomic context. The caller must switch to using GFP_ATOMIC or similar. gfp_mask==%x\n", in kbase_mem_evictable_reclaim_count_objects()
649 sc->gfp_mask); in kbase_mem_evictable_reclaim_count_objects()
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/
H A Dmali_kbase_native_mgm.c36 * @gfp_mask: Bitmask of Get Free Page flags affecting allocator behavior.
45 static struct page *kbase_native_mgm_alloc(struct memory_group_manager_device *mgm_dev, int group_id, gfp_t gfp_mask, in kbase_native_mgm_alloc() argument
62 return alloc_pages(gfp_mask, order); in kbase_native_mgm_alloc()
/device/soc/rockchip/common/vendor/drivers/gpu/arm/mali400/mali/linux/
H A Dmali_memory_os_alloc.c34 static int mali_mem_os_shrink(int nr_to_scan, gfp_t gfp_mask);
36 static int mali_mem_os_shrink(struct shrinker *shrinker, int nr_to_scan, gfp_t gfp_mask);
640 static int mali_mem_os_shrink(int nr_to_scan, gfp_t gfp_mask) in mali_mem_os_shrink() argument
642 static int mali_mem_os_shrink(struct shrinker *shrinker, int nr_to_scan, gfp_t gfp_mask) in mali_mem_os_shrink()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/mali400/mali/linux/
H A Dmali_memory_os_alloc.c33 static int mali_mem_os_shrink(int nr_to_scan, gfp_t gfp_mask);
35 static int mali_mem_os_shrink(struct shrinker *shrinker, int nr_to_scan, gfp_t gfp_mask);
650 static int mali_mem_os_shrink(int nr_to_scan, gfp_t gfp_mask) in mali_mem_os_shrink() argument
652 static int mali_mem_os_shrink(struct shrinker *shrinker, int nr_to_scan, gfp_t gfp_mask) in mali_mem_os_shrink()
/device/soc/rockchip/common/sdk_linux/drivers/gpu/drm/rockchip/
H A Drockchip_drm_gem.c603 gfp_t gfp_mask = GFP_HIGHUSER | __GFP_RECLAIMABLE | __GFP_DMA32; in rockchip_gem_alloc_object() local
605 gfp_t gfp_mask = GFP_HIGHUSER | __GFP_RECLAIMABLE; in rockchip_gem_alloc_object() local
619 mapping_set_gfp_mask(mapping, gfp_mask); in rockchip_gem_alloc_object()
/device/soc/rockchip/common/sdk_linux/kernel/cgroup/
H A Dcpuset.c3507 * @gfp_mask: memory allocation flags
3529 * has __GFP_HARDWALL set in gfp_mask, enforcing hardwall cpusets,
3544 bool _cpuset_node_allowed(int node, gfp_t gfp_mask) in _cpuset_node_allowed() argument
3563 if (gfp_mask & __GFP_HARDWALL) { /* If hardwall request, stop here */ in _cpuset_node_allowed()

Completed in 23 milliseconds