Home
last modified time | relevance | path

Searched refs:cma (Results 1 - 25 of 65) sorted by relevance

123

/kernel/linux/linux-6.6/mm/
H A Dcma.c15 #define pr_fmt(fmt) "cma: " fmt
30 #include <linux/cma.h>
34 #include <trace/events/cma.h>
37 #include "cma.h"
39 struct cma cma_areas[MAX_CMA_AREAS];
43 phys_addr_t cma_get_base(const struct cma *cma) in cma_get_base() argument
45 return PFN_PHYS(cma->base_pfn); in cma_get_base()
48 unsigned long cma_get_size(const struct cma *cma) in cma_get_size() argument
53 cma_get_name(const struct cma *cma) cma_get_name() argument
58 cma_bitmap_aligned_mask(const struct cma *cma, unsigned int align_order) cma_bitmap_aligned_mask() argument
70 cma_bitmap_aligned_offset(const struct cma *cma, unsigned int align_order) cma_bitmap_aligned_offset() argument
77 cma_bitmap_pages_to_bits(const struct cma *cma, unsigned long pages) cma_bitmap_pages_to_bits() argument
83 cma_clear_bitmap(struct cma *cma, unsigned long pfn, unsigned long count) cma_clear_bitmap() argument
97 cma_activate_area(struct cma *cma) cma_activate_area() argument
157 cma_reserve_pages_on_error(struct cma *cma) cma_reserve_pages_on_error() argument
179 struct cma *cma; cma_init_reserved_mem() local
391 cma_debug_show_areas(struct cma *cma) cma_debug_show_areas() argument
416 cma_debug_show_areas(struct cma *cma) cma_debug_show_areas() argument
429 cma_alloc(struct cma *cma, unsigned long count, unsigned int align, bool no_warn) cma_alloc() argument
531 cma_pages_valid(struct cma *cma, const struct page *pages, unsigned long count) cma_pages_valid() argument
560 cma_release(struct cma *cma, const struct page *pages, unsigned long count) cma_release() argument
581 cma_for_each_area(int (*it)(struct cma *cma, void *data), void *data) cma_for_each_area() argument
[all...]
H A Dcma_debug.c10 #include <linux/cma.h>
16 #include "cma.h"
36 struct cma *cma = data; in cma_used_get() local
39 spin_lock_irq(&cma->lock); in cma_used_get()
41 used = bitmap_weight(cma->bitmap, (int)cma_bitmap_maxno(cma)); in cma_used_get()
42 spin_unlock_irq(&cma->lock); in cma_used_get()
43 *val = (u64)used << cma->order_per_bit; in cma_used_get()
51 struct cma *cm in cma_maxchunk_get() local
71 cma_add_to_cma_mem_list(struct cma *cma, struct cma_mem *mem) cma_add_to_cma_mem_list() argument
78 cma_get_entry_from_list(struct cma *cma) cma_get_entry_from_list() argument
92 cma_free_mem(struct cma *cma, int count) cma_free_mem() argument
125 struct cma *cma = data; cma_free_write() local
131 cma_alloc_mem(struct cma *cma, int count) cma_alloc_mem() argument
157 struct cma *cma = data; cma_alloc_write() local
163 cma_debugfs_add_one(struct cma *cma, struct dentry *root_dentry) cma_debugfs_add_one() argument
[all...]
H A Dcma_sysfs.c8 #include <linux/cma.h>
12 #include "cma.h"
17 void cma_sysfs_account_success_pages(struct cma *cma, unsigned long nr_pages) in cma_sysfs_account_success_pages() argument
19 atomic64_add(nr_pages, &cma->nr_pages_succeeded); in cma_sysfs_account_success_pages()
22 void cma_sysfs_account_fail_pages(struct cma *cma, unsigned long nr_pages) in cma_sysfs_account_fail_pages() argument
24 atomic64_add(nr_pages, &cma->nr_pages_failed); in cma_sysfs_account_fail_pages()
27 static inline struct cma *cma_from_kobj(struct kobject *kobj) in cma_from_kobj()
29 return container_of(kobj, struct cma_kobject, kobj)->cma; in cma_from_kobj()
35 struct cma *cma = cma_from_kobj(kobj); alloc_pages_success_show() local
45 struct cma *cma = cma_from_kobj(kobj); alloc_pages_fail_show() local
53 struct cma *cma = cma_from_kobj(kobj); cma_kobj_release() local
65 ATTRIBUTE_GROUPS(cma); global() variable
77 struct cma *cma; cma_sysfs_init() local
[all...]
H A Dcma.h10 struct cma *cma; member
13 struct cma { struct
36 extern struct cma cma_areas[MAX_CMA_AREAS];
39 static inline unsigned long cma_bitmap_maxno(struct cma *cma) in cma_bitmap_maxno() argument
41 return cma->count >> cma->order_per_bit; in cma_bitmap_maxno()
45 void cma_sysfs_account_success_pages(struct cma *cma, unsigne
48 cma_sysfs_account_success_pages(struct cma *cma, unsigned long nr_pages) cma_sysfs_account_success_pages() argument
50 cma_sysfs_account_fail_pages(struct cma *cma, unsigned long nr_pages) cma_sysfs_account_fail_pages() argument
[all...]
/kernel/linux/linux-5.10/mm/
H A Dcma_debug.c10 #include <linux/cma.h>
16 #include "cma.h"
36 struct cma *cma = data; in cma_used_get() local
39 mutex_lock(&cma->lock); in cma_used_get()
41 used = bitmap_weight(cma->bitmap, (int)cma_bitmap_maxno(cma)); in cma_used_get()
42 mutex_unlock(&cma->lock); in cma_used_get()
43 *val = (u64)used << cma->order_per_bit; in cma_used_get()
51 struct cma *cm in cma_maxchunk_get() local
71 cma_add_to_cma_mem_list(struct cma *cma, struct cma_mem *mem) cma_add_to_cma_mem_list() argument
78 cma_get_entry_from_list(struct cma *cma) cma_get_entry_from_list() argument
92 cma_free_mem(struct cma *cma, int count) cma_free_mem() argument
125 struct cma *cma = data; cma_free_write() local
131 cma_alloc_mem(struct cma *cma, int count) cma_alloc_mem() argument
157 struct cma *cma = data; cma_alloc_write() local
163 cma_debugfs_add_one(struct cma *cma, struct dentry *root_dentry) cma_debugfs_add_one() argument
[all...]
H A Dcma.c15 #define pr_fmt(fmt) "cma: " fmt
31 #include <linux/cma.h>
35 #include <trace/events/cma.h>
37 #include "cma.h"
39 struct cma cma_areas[MAX_CMA_AREAS];
43 phys_addr_t cma_get_base(const struct cma *cma) in cma_get_base() argument
45 return PFN_PHYS(cma->base_pfn); in cma_get_base()
48 unsigned long cma_get_size(const struct cma *cma) in cma_get_size() argument
53 cma_get_name(const struct cma *cma) cma_get_name() argument
58 cma_bitmap_aligned_mask(const struct cma *cma, unsigned int align_order) cma_bitmap_aligned_mask() argument
70 cma_bitmap_aligned_offset(const struct cma *cma, unsigned int align_order) cma_bitmap_aligned_offset() argument
77 cma_bitmap_pages_to_bits(const struct cma *cma, unsigned long pages) cma_bitmap_pages_to_bits() argument
83 cma_clear_bitmap(struct cma *cma, unsigned long pfn, unsigned int count) cma_clear_bitmap() argument
96 cma_activate_area(struct cma *cma) cma_activate_area() argument
172 struct cma *cma; cma_init_reserved_mem() local
373 cma_debug_show_areas(struct cma *cma) cma_debug_show_areas() argument
398 cma_debug_show_areas(struct cma *cma) cma_debug_show_areas() argument
411 cma_alloc(struct cma *cma, size_t count, unsigned int align, bool no_warn) cma_alloc() argument
508 cma_release(struct cma *cma, const struct page *pages, unsigned int count) cma_release() argument
531 cma_for_each_area(int (*it)(struct cma *cma, void *data), void *data) cma_for_each_area() argument
[all...]
H A Dcma.h7 struct cma { struct
21 extern struct cma cma_areas[MAX_CMA_AREAS];
24 static inline unsigned long cma_bitmap_maxno(struct cma *cma) in cma_bitmap_maxno() argument
26 return cma->count >> cma->order_per_bit; in cma_bitmap_maxno()
/kernel/linux/linux-6.6/include/linux/
H A Dcma.h27 struct cma;
30 extern phys_addr_t cma_get_base(const struct cma *cma);
31 extern unsigned long cma_get_size(const struct cma *cma);
32 extern const char *cma_get_name(const struct cma *cma);
37 bool fixed, const char *name, struct cma **res_cma,
42 bool fixed, const char *name, struct cma **res_cma) in cma_declare_contiguous()
50 struct cma **res_cm
[all...]
H A Ddma-map-ops.h13 struct cma;
114 extern struct cma *dma_contiguous_default_area;
116 static inline struct cma *dev_get_cma_area(struct device *dev) in dev_get_cma_area()
125 phys_addr_t limit, struct cma **res_cma, bool fixed);
136 static inline struct cma *dev_get_cma_area(struct device *dev) in dev_get_cma_area()
144 phys_addr_t base, phys_addr_t limit, struct cma **res_cma, in dma_contiguous_reserve_area()
/kernel/linux/linux-5.10/include/linux/
H A Dcma.h23 struct cma;
26 extern phys_addr_t cma_get_base(const struct cma *cma);
27 extern unsigned long cma_get_size(const struct cma *cma);
28 extern const char *cma_get_name(const struct cma *cma);
33 bool fixed, const char *name, struct cma **res_cma,
38 bool fixed, const char *name, struct cma **res_cma) in cma_declare_contiguous()
46 struct cma **res_cm
[all...]
H A Ddma-map-ops.h12 struct cma;
101 extern struct cma *dma_contiguous_default_area;
103 static inline struct cma *dev_get_cma_area(struct device *dev) in dev_get_cma_area()
112 phys_addr_t limit, struct cma **res_cma, bool fixed);
123 static inline struct cma *dev_get_cma_area(struct device *dev) in dev_get_cma_area()
131 phys_addr_t base, phys_addr_t limit, struct cma **res_cma, in dma_contiguous_reserve_area()
/kernel/linux/linux-6.6/kernel/dma/
H A Dcontiguous.c38 #define pr_fmt(fmt) "cma: " fmt
52 #include <linux/cma.h>
61 struct cma *dma_contiguous_default_area;
71 * should use cma= kernel parameter.
98 early_param("cma", early_cma);
102 static struct cma *dma_contiguous_numa_area[MAX_NUMNODES];
104 static struct cma *dma_contiguous_pernuma_area[MAX_NUMNODES];
172 struct cma **cma; in dma_numa_cma_reserve() local
182 cma in dma_numa_cma_reserve()
335 cma_alloc_aligned(struct cma *cma, size_t size, gfp_t gfp) cma_alloc_aligned() argument
373 struct cma *cma = dma_contiguous_pernuma_area[nid]; dma_alloc_contiguous() local
467 struct cma *cma; rmem_cma_setup() local
[all...]
H A Dpool.c6 #include <linux/cma.h>
60 struct cma *cma; in cma_in_zone() local
62 cma = dev_get_cma_area(NULL); in cma_in_zone()
63 if (!cma) in cma_in_zone()
66 size = cma_get_size(cma); in cma_in_zone()
71 end = cma_get_base(cma) + size - 1; in cma_in_zone()
/kernel/linux/linux-5.10/drivers/staging/android/ion/
H A Dion_cma_heap.c13 #include <linux/cma.h>
21 struct cma *cma; member
42 pages = cma_alloc(cma_heap->cma, nr_pages, align, false); in ion_cma_allocate()
79 cma_release(cma_heap->cma, pages, nr_pages); in ion_cma_allocate()
90 cma_release(cma_heap->cma, pages, nr_pages); in ion_cma_free()
104 static struct ion_heap *__ion_cma_heap_create(struct cma *cma) in __ion_cma_heap_create() argument
114 cma_heap->cma = cma; in __ion_cma_heap_create()
119 __ion_add_cma_heaps(struct cma *cma, void *data) __ion_add_cma_heaps() argument
[all...]
/kernel/linux/linux-5.10/kernel/dma/
H A Dcontiguous.c38 #define pr_fmt(fmt) "cma: " fmt
52 #include <linux/cma.h>
60 struct cma *dma_contiguous_default_area;
70 * should use cma= kernel parameter.
97 early_param("cma", early_cma);
101 static struct cma *dma_contiguous_pernuma_area[MAX_NUMNODES];
141 struct cma **cma = &dma_contiguous_pernuma_area[nid]; in dma_pernuma_cma_reserve() local
145 0, false, name, cma, nid); in dma_pernuma_cma_reserve()
215 * @res_cma: Pointer to store the created cma regio
282 cma_alloc_aligned(struct cma *cma, size_t size, gfp_t gfp) cma_alloc_aligned() argument
320 struct cma *cma = dma_contiguous_pernuma_area[nid]; dma_alloc_contiguous() local
406 struct cma *cma; rmem_cma_setup() local
[all...]
H A Dpool.c6 #include <linux/cma.h>
63 struct cma *cma; in cma_in_zone() local
65 cma = dev_get_cma_area(NULL); in cma_in_zone()
66 if (!cma) in cma_in_zone()
69 size = cma_get_size(cma); in cma_in_zone()
74 end = cma_get_base(cma) + size - 1; in cma_in_zone()
/kernel/linux/linux-5.10/drivers/dma-buf/heaps/
H A Dcma_heap.c9 #include <linux/cma.h>
26 struct cma *cma; member
38 cma_release(cma_heap->cma, cma_pages, nr_pages); in cma_heap_free()
69 cma_pages = cma_alloc(cma_heap->cma, nr_pages, align, false); in cma_heap_allocate()
130 cma_release(cma_heap->cma, cma_pages, nr_pages); in cma_heap_allocate()
140 static int __add_cma_heap(struct cma *cma, void *data) in __add_cma_heap() argument
148 cma_heap->cma = cma; in __add_cma_heap()
[all...]
/kernel/linux/linux-6.6/drivers/dma-buf/heaps/
H A Dcma_heap.c12 #include <linux/cma.h>
28 struct cma *cma; member
259 cma_release(cma_heap->cma, buffer->cma_pages, buffer->pagecount); in cma_heap_dma_buf_release()
303 cma_pages = cma_alloc(cma_heap->cma, pagecount, align, false); in cma_heap_allocate()
307 /* Clear the cma pages */ in cma_heap_allocate()
359 cma_release(cma_heap->cma, cma_pages, pagecount); in cma_heap_allocate()
370 static int __add_cma_heap(struct cma *cma, void *data) in __add_cma_heap() argument
378 cma_heap->cma in __add_cma_heap()
[all...]
/kernel/linux/linux-6.6/arch/s390/mm/
H A Dinit.c31 #include <linux/cma.h>
232 /* Prevent memory blocks which contain cma regions from going offline */
239 static int s390_cma_check_range(struct cma *cma, void *data) in s390_cma_check_range() argument
245 start = cma_get_base(cma); in s390_cma_check_range()
246 end = start + cma_get_size(cma); in s390_cma_check_range()
/kernel/linux/linux-5.10/drivers/gpu/drm/loongson/
H A Dloongson_cursor.c114 struct drm_gem_cma_object *cma, *cursor = ldev->cursor; in loongson_crtc_cursor_set2() local
132 cma = to_drm_gem_cma_obj(obj); in loongson_crtc_cursor_set2()
134 flush_scache_range(cma->vaddr, 32*32*4); in loongson_crtc_cursor_set2()
135 memcpy(cursor->vaddr, cma->vaddr, 32*32*4); in loongson_crtc_cursor_set2()
/kernel/linux/linux-5.10/arch/s390/mm/
H A Dinit.c31 #include <linux/cma.h>
232 /* Prevent memory blocks which contain cma regions from going offline */
239 static int s390_cma_check_range(struct cma *cma, void *data) in s390_cma_check_range() argument
245 start = cma_get_base(cma); in s390_cma_check_range()
246 end = start + cma_get_size(cma); in s390_cma_check_range()
/kernel/linux/linux-5.10/drivers/of/
H A Dof_reserved_mem.c475 int cma = 0; in dt_reserved_memory_debug_show() local
479 " [d/s] [cma] [name]\n"); in dt_reserved_memory_debug_show()
482 cma = 0; in dt_reserved_memory_debug_show()
494 /* find out cma reserved memory node */ in dt_reserved_memory_debug_show()
497 cma = 1; in dt_reserved_memory_debug_show()
508 (cma == 1) ? "y" : "n", in dt_reserved_memory_debug_show()
/kernel/linux/linux-5.10/lib/
H A Dshow_mem.c9 #include <linux/cma.h>
39 printk("%lu pages cma reserved\n", totalcma_pages); in show_mem()
/kernel/linux/linux-5.10/drivers/s390/char/
H A Dvmcp.c24 #include <linux/cma.h>
42 static struct cma *vmcp_cma;
/kernel/linux/linux-6.6/drivers/s390/char/
H A Dvmcp.c24 #include <linux/cma.h>
42 static struct cma *vmcp_cma;

Completed in 11 milliseconds

123