Lines Matching refs:base
67 * Find the offset of the base PFN from the specified align_order.
164 * @base: Base address of the reserved area
174 int __init cma_init_reserved_mem(phys_addr_t base, phys_addr_t size,
187 if (!size || !memblock_is_region_reserved(base, size))
195 if (!IS_ALIGNED(base | size, CMA_MIN_ALIGNMENT_BYTES))
209 cma->base_pfn = PFN_DOWN(base);
221 * @base: Base address of the reserved area optional, use 0 for any
236 * If @fixed is true, reserve contiguous area at exactly @base. If false,
237 * reserve in range from @base to @limit.
239 int __init cma_declare_contiguous_nid(phys_addr_t base,
256 pr_debug("%s(size %pa, base %pa, limit %pa alignment %pa)\n",
257 __func__, &size, &base, &limit, &alignment);
275 if (fixed && base & (alignment - 1)) {
278 &base, &alignment);
281 base = ALIGN(base, alignment);
285 if (!base)
293 * If allocating at a fixed base the request region must not cross the
296 if (fixed && base < highmem_start && base + size > highmem_start) {
299 &base, &highmem_start);
311 if (base + size > limit) {
314 &size, &base, &limit);
320 if (memblock_is_region_reserved(base, size) ||
321 memblock_reserve(base, size) < 0) {
351 if (!addr && base < highmem_start && limit > highmem_start) {
358 addr = memblock_alloc_range_nid(size, alignment, base,
371 base = addr;
374 ret = cma_init_reserved_mem(base, size, order_per_bit, name, res_cma);
379 &base, nid);
383 memblock_phys_free(base, size);