Lines Matching refs:cma

15 #define pr_fmt(fmt) "cma: " fmt
31 #include <linux/cma.h>
35 #include <trace/events/cma.h>
37 #include "cma.h"
39 struct cma cma_areas[MAX_CMA_AREAS];
43 phys_addr_t cma_get_base(const struct cma *cma)
45 return PFN_PHYS(cma->base_pfn);
48 unsigned long cma_get_size(const struct cma *cma)
50 return cma->count << PAGE_SHIFT;
53 const char *cma_get_name(const struct cma *cma)
55 return cma->name;
58 static unsigned long cma_bitmap_aligned_mask(const struct cma *cma,
61 if (align_order <= cma->order_per_bit)
63 return (1UL << (align_order - cma->order_per_bit)) - 1;
70 static unsigned long cma_bitmap_aligned_offset(const struct cma *cma,
73 return (cma->base_pfn & ((1UL << align_order) - 1))
74 >> cma->order_per_bit;
77 static unsigned long cma_bitmap_pages_to_bits(const struct cma *cma,
80 return ALIGN(pages, 1UL << cma->order_per_bit) >> cma->order_per_bit;
83 static void cma_clear_bitmap(struct cma *cma, unsigned long pfn,
88 bitmap_no = (pfn - cma->base_pfn) >> cma->order_per_bit;
89 bitmap_count = cma_bitmap_pages_to_bits(cma, count);
91 mutex_lock(&cma->lock);
92 bitmap_clear(cma->bitmap, bitmap_no, bitmap_count);
93 mutex_unlock(&cma->lock);
96 static void __init cma_activate_area(struct cma *cma)
98 unsigned long base_pfn = cma->base_pfn, pfn = base_pfn;
99 unsigned i = cma->count >> pageblock_order;
102 cma->bitmap = bitmap_zalloc(cma_bitmap_maxno(cma), GFP_KERNEL);
103 if (!cma->bitmap)
127 mutex_init(&cma->lock);
130 INIT_HLIST_HEAD(&cma->mem_head);
131 spin_lock_init(&cma->mem_head_lock);
137 bitmap_free(cma->bitmap);
139 cma->count = 0;
140 pr_err("CMA area %s could not be activated\n", cma->name);
163 * @res_cma: Pointer to store the created cma region.
170 struct cma **res_cma)
172 struct cma *cma;
199 cma = &cma_areas[cma_area_count];
202 snprintf(cma->name, CMA_MAX_NAME, name);
204 snprintf(cma->name, CMA_MAX_NAME, "cma%d\n", cma_area_count);
206 cma->base_pfn = PFN_DOWN(base);
207 cma->count = size >> PAGE_SHIFT;
208 cma->order_per_bit = order_per_bit;
209 *res_cma = cma;
225 * @res_cma: Pointer to store the created cma region.
239 bool fixed, const char *name, struct cma **res_cma,
373 static void cma_debug_show_areas(struct cma *cma)
378 unsigned long nbits = cma_bitmap_maxno(cma);
380 mutex_lock(&cma->lock);
383 next_zero_bit = find_next_zero_bit(cma->bitmap, nbits, start);
386 next_set_bit = find_next_bit(cma->bitmap, nbits, next_zero_bit);
388 nr_part = nr_zero << cma->order_per_bit;
394 pr_cont("=> %lu free of %lu total pages\n", nr_total, cma->count);
395 mutex_unlock(&cma->lock);
398 static inline void cma_debug_show_areas(struct cma *cma) { }
403 * @cma: Contiguous memory region for which the allocation is performed.
411 struct page *cma_alloc(struct cma *cma, size_t count, unsigned int align,
422 if (!cma || !cma->count || !cma->bitmap)
425 pr_debug("%s(cma %p, count %zu, align %d)\n", __func__, (void *)cma,
431 mask = cma_bitmap_aligned_mask(cma, align);
432 offset = cma_bitmap_aligned_offset(cma, align);
433 bitmap_maxno = cma_bitmap_maxno(cma);
434 bitmap_count = cma_bitmap_pages_to_bits(cma, count);
440 mutex_lock(&cma->lock);
441 bitmap_no = bitmap_find_next_zero_area_off(cma->bitmap,
445 mutex_unlock(&cma->lock);
448 bitmap_set(cma->bitmap, bitmap_no, bitmap_count);
454 mutex_unlock(&cma->lock);
456 pfn = cma->base_pfn + (bitmap_no << cma->order_per_bit);
466 cma_clear_bitmap(cma, pfn, count);
491 cma_debug_show_areas(cma);
500 * @cma: Contiguous memory region for which the allocation is performed.
508 bool cma_release(struct cma *cma, const struct page *pages, unsigned int count)
512 if (!cma || !pages)
519 if (pfn < cma->base_pfn || pfn >= cma->base_pfn + cma->count)
522 VM_BUG_ON(pfn + count > cma->base_pfn + cma->count);
525 cma_clear_bitmap(cma, pfn, count);
531 int cma_for_each_area(int (*it)(struct cma *cma, void *data), void *data)