Lines Matching refs:sz
36 unsigned long sz, const int op, const int full_page);
38 void (*__dma_cache_wback_inv)(phys_addr_t start, unsigned long sz);
39 void (*__dma_cache_inv)(phys_addr_t start, unsigned long sz);
40 void (*__dma_cache_wback)(phys_addr_t start, unsigned long sz);
55 p_slc->sz_k = 128 << slc_cfg.sz;
119 p_ic->sz_k = 1 << (ibcr.sz - 1);
145 p_dc->sz_k = 1 << (dbcr.sz - 1);
190 unsigned long sz, const int op, const int full_page)
207 * -@sz will be integral multiple of line size (being page sized).
210 sz += paddr & ~CACHE_LINE_MASK;
214 num_lines = DIV_ROUND_UP(sz, L1_CACHE_BYTES);
250 unsigned long sz, const int op, const int full_page)
266 * -@sz will be integral multiple of line size (being page sized).
269 sz += paddr & ~CACHE_LINE_MASK;
273 num_lines = DIV_ROUND_UP(sz, L1_CACHE_BYTES);
304 unsigned long sz, const int op, const int full_page)
319 sz += paddr & ~CACHE_LINE_MASK;
326 sz += L1_CACHE_BYTES - 1;
338 write_aux_reg(e, paddr + sz); /* ENDR is exclusive */
456 #define __dc_line_op_k(p, sz, op) __dc_line_op(p, p, sz, op)
462 unsigned long sz, const int op)
464 const int full_page = __builtin_constant_p(sz) && sz == PAGE_SIZE;
471 __cache_line_loop(paddr, vaddr, sz, op, full_page);
483 #define __dc_line_op(paddr, vaddr, sz, op)
484 #define __dc_line_op_k(paddr, sz, op)
498 unsigned long sz)
500 const int full_page = __builtin_constant_p(sz) && sz == PAGE_SIZE;
504 (*_cache_line_loop_ic_fn)(paddr, vaddr, sz, OP_INV_IC, full_page);
516 int sz;
523 __ic_line_inv_vaddr_local(ic_inv->paddr, ic_inv->vaddr, ic_inv->sz);
527 unsigned long sz)
532 .sz = sz
543 #define __ic_line_inv_vaddr(pstart, vstart, sz)
547 static noinline void slc_op_rgn(phys_addr_t paddr, unsigned long sz, const int op)
588 * END can't be same as START, so add (l2_line_sz - 1) to sz
590 end = paddr + sz + l2_line_sz - 1;
610 static __maybe_unused noinline void slc_op_line(phys_addr_t paddr, unsigned long sz, const int op)
641 sz += paddr & ~SLC_LINE_MASK;
644 num_lines = DIV_ROUND_UP(sz, l2_line_sz);
660 #define slc_op(paddr, sz, op) slc_op_rgn(paddr, sz, op)
764 static void __dma_cache_wback_inv_l1(phys_addr_t start, unsigned long sz)
766 __dc_line_op_k(start, sz, OP_FLUSH_N_INV);
769 static void __dma_cache_inv_l1(phys_addr_t start, unsigned long sz)
771 __dc_line_op_k(start, sz, OP_INV);
774 static void __dma_cache_wback_l1(phys_addr_t start, unsigned long sz)
776 __dc_line_op_k(start, sz, OP_FLUSH);
783 static void __dma_cache_wback_inv_slc(phys_addr_t start, unsigned long sz)
785 __dc_line_op_k(start, sz, OP_FLUSH_N_INV);
786 slc_op(start, sz, OP_FLUSH_N_INV);
789 static void __dma_cache_inv_slc(phys_addr_t start, unsigned long sz)
791 __dc_line_op_k(start, sz, OP_INV);
792 slc_op(start, sz, OP_INV);
795 static void __dma_cache_wback_slc(phys_addr_t start, unsigned long sz)
797 __dc_line_op_k(start, sz, OP_FLUSH);
798 slc_op(start, sz, OP_FLUSH);
804 void dma_cache_wback_inv(phys_addr_t start, unsigned long sz)
806 __dma_cache_wback_inv(start, sz);
810 void dma_cache_inv(phys_addr_t start, unsigned long sz)
812 __dma_cache_inv(start, sz);
816 void dma_cache_wback(phys_addr_t start, unsigned long sz)
818 __dma_cache_wback(start, sz);
866 unsigned int off, sz;
872 sz = min_t(unsigned int, tot_sz, PAGE_SIZE - off);
873 __sync_icache_dcache(phy, kstart, sz);
874 kstart += sz;
875 tot_sz -= sz;
1026 SYSCALL_DEFINE3(cacheflush, uint32_t, start, uint32_t, sz, uint32_t, flags)