Home
last modified time | relevance | path

Searched refs:SHMLBA (Results 1 - 25 of 73) sorted by relevance

123

/kernel/linux/linux-6.6/arch/arm/mm/
H A Dmmap.c17 ((((addr)+SHMLBA-1)&~(SHMLBA-1)) + \
18 (((pgoff)<<PAGE_SHIFT) & (SHMLBA-1)))
24 * SHMLBA bytes.
51 (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) in arch_get_unmapped_area()
75 info.align_mask = do_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; in arch_get_unmapped_area()
105 (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) in arch_get_unmapped_area_topdown()
126 info.align_mask = do_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; in arch_get_unmapped_area_topdown()
/kernel/linux/linux-5.10/arch/csky/abiv1/
H A Dmmap.c13 ((((addr)+SHMLBA-1)&~(SHMLBA-1)) + \
14 (((pgoff)<<PAGE_SHIFT) & (SHMLBA-1)))
20 * SHMLBA bytes.
44 (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) in arch_get_unmapped_area()
68 info.align_mask = do_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; in arch_get_unmapped_area()
/kernel/linux/linux-5.10/arch/nds32/mm/
H A Dmmap.c9 ((((addr)+SHMLBA-1)&~(SHMLBA-1)) + \
10 (((pgoff)<<PAGE_SHIFT) & (SHMLBA-1)))
16 * SHMLBA bytes.
46 (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) in arch_get_unmapped_area()
70 info.align_mask = do_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; in arch_get_unmapped_area()
/kernel/linux/linux-5.10/arch/arc/mm/
H A Dmmap.c18 ((((addr) + SHMLBA - 1) & ~(SHMLBA - 1)) + \
19 (((pgoff) << PAGE_SHIFT) & (SHMLBA - 1)))
26 * SHMLBA bytes.
49 (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) in arch_get_unmapped_area()
73 info.align_mask = do_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; in arch_get_unmapped_area()
/kernel/linux/linux-6.6/arch/csky/abiv1/
H A Dmmap.c13 ((((addr)+SHMLBA-1)&~(SHMLBA-1)) + \
14 (((pgoff)<<PAGE_SHIFT) & (SHMLBA-1)))
20 * SHMLBA bytes.
44 (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) in arch_get_unmapped_area()
68 info.align_mask = do_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; in arch_get_unmapped_area()
/kernel/linux/linux-5.10/arch/xtensa/kernel/
H A Dsyscall.c40 ((((addr) + SHMLBA - 1) & ~(SHMLBA - 1)) + \
41 (((pgoff) << PAGE_SHIFT) & (SHMLBA - 1)))
48 err = do_shmat(shmid, shmaddr, shmflg, &ret, SHMLBA); in xtensa_shmat()
71 ((addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1))) in arch_get_unmapped_area()
/kernel/linux/linux-6.6/arch/arc/mm/
H A Dmmap.c18 ((((addr) + SHMLBA - 1) & ~(SHMLBA - 1)) + \
19 (((pgoff) << PAGE_SHIFT) & (SHMLBA - 1)))
26 * SHMLBA bytes.
49 (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) in arch_get_unmapped_area()
73 info.align_mask = do_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; in arch_get_unmapped_area()
/kernel/linux/linux-6.6/arch/xtensa/kernel/
H A Dsyscall.c36 ((((addr) + SHMLBA - 1) & ~(SHMLBA - 1)) + \
37 (((pgoff) << PAGE_SHIFT) & (SHMLBA - 1)))
44 err = do_shmat(shmid, shmaddr, shmflg, &ret, SHMLBA); in xtensa_shmat()
68 ((addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1))) in arch_get_unmapped_area()
/kernel/linux/linux-5.10/arch/arm/mm/
H A Dmmap.c17 ((((addr)+SHMLBA-1)&~(SHMLBA-1)) + \
18 (((pgoff)<<PAGE_SHIFT) & (SHMLBA-1)))
24 * SHMLBA bytes.
51 (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) in arch_get_unmapped_area()
75 info.align_mask = do_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; in arch_get_unmapped_area()
105 (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) in arch_get_unmapped_area_topdown()
126 info.align_mask = do_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; in arch_get_unmapped_area_topdown()
/kernel/linux/linux-5.10/arch/sparc/kernel/
H A Dsys_sparc_64.c84 unsigned long base = (addr+SHMLBA-1)&~(SHMLBA-1); in COLOR_ALIGN()
85 unsigned long off = (pgoff<<PAGE_SHIFT) & (SHMLBA-1); in COLOR_ALIGN()
103 ((addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1))) in arch_get_unmapped_area()
133 info.align_mask = do_color_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; in arch_get_unmapped_area()
167 ((addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1))) in arch_get_unmapped_area_topdown()
196 info.align_mask = do_color_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; in arch_get_unmapped_area_topdown()
393 err = do_shmat(first, ptr, (int)second, &raddr, SHMLBA); in SYSCALL_DEFINE6()
/kernel/linux/linux-6.6/arch/sparc/kernel/
H A Dsys_sparc_64.c84 unsigned long base = (addr+SHMLBA-1)&~(SHMLBA-1); in COLOR_ALIGN()
85 unsigned long off = (pgoff<<PAGE_SHIFT) & (SHMLBA-1); in COLOR_ALIGN()
103 ((addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1))) in arch_get_unmapped_area()
133 info.align_mask = do_color_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; in arch_get_unmapped_area()
167 ((addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1))) in arch_get_unmapped_area_topdown()
196 info.align_mask = do_color_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; in arch_get_unmapped_area_topdown()
393 err = do_shmat(first, ptr, (int)second, &raddr, SHMLBA); in SYSCALL_DEFINE6()
/kernel/linux/linux-5.10/drivers/infiniband/sw/rxe/
H A Drxe_mmap.c133 rxe->mmap_offset = ALIGN(PAGE_SIZE, SHMLBA); in rxe_create_mmap_info()
136 rxe->mmap_offset += ALIGN(size, SHMLBA); in rxe_create_mmap_info()
/kernel/linux/linux-6.6/drivers/infiniband/sw/rxe/
H A Drxe_mmap.c132 rxe->mmap_offset = ALIGN(PAGE_SIZE, SHMLBA); in rxe_create_mmap_info()
135 rxe->mmap_offset += ALIGN(size, SHMLBA); in rxe_create_mmap_info()
/kernel/linux/linux-5.10/arch/ia64/include/asm/
H A Dshmparam.h6 * SHMLBA controls minimum alignment at which shared memory segments
9 * To reduce the chance of this, we set SHMLBA to 1MB. --davidm 00/12/20
11 #define SHMLBA (1024*1024) macro
/kernel/linux/linux-5.10/arch/nios2/include/asm/
H A Dshmparam.h8 #define SHMLBA CONFIG_NIOS2_DCACHE_SIZE macro
/kernel/linux/linux-5.10/arch/powerpc/include/asm/
H A Dshmparam.h5 #define SHMLBA PAGE_SIZE /* attach addr a multiple of this */ macro
/kernel/linux/linux-5.10/arch/x86/include/asm/
H A Dshmparam.h5 #define SHMLBA PAGE_SIZE /* attach addr a multiple of this */ macro
/kernel/linux/linux-5.10/arch/s390/include/asm/
H A Dshmparam.h10 #define SHMLBA PAGE_SIZE /* attach addr a multiple of this */ macro
/kernel/linux/linux-5.10/arch/alpha/include/asm/
H A Dshmparam.h5 #define SHMLBA PAGE_SIZE /* attach addr a multiple of this */ macro
/kernel/linux/linux-6.6/arch/ia64/include/asm/
H A Dshmparam.h6 * SHMLBA controls minimum alignment at which shared memory segments
9 * To reduce the chance of this, we set SHMLBA to 1MB. --davidm 00/12/20
11 #define SHMLBA (1024*1024) macro
/kernel/linux/linux-6.6/arch/alpha/include/asm/
H A Dshmparam.h5 #define SHMLBA PAGE_SIZE /* attach addr a multiple of this */ macro
/kernel/linux/linux-6.6/arch/powerpc/include/asm/
H A Dshmparam.h5 #define SHMLBA PAGE_SIZE /* attach addr a multiple of this */ macro
/kernel/linux/linux-6.6/arch/nios2/include/asm/
H A Dshmparam.h8 #define SHMLBA CONFIG_NIOS2_DCACHE_SIZE macro
/kernel/linux/linux-5.10/include/asm-generic/
H A Dshmparam.h5 #define SHMLBA PAGE_SIZE /* attach addr a multiple of this */ macro
/kernel/linux/linux-6.6/include/asm-generic/
H A Dshmparam.h5 #define SHMLBA PAGE_SIZE /* attach addr a multiple of this */ macro

Completed in 9 milliseconds

123