Home
last modified time | relevance | path

Searched refs:percpu (Results 1 - 25 of 918) sorted by relevance

12345678910>>...37

/kernel/linux/linux-6.6/fs/squashfs/
H A Ddecompressor_multi_percpu.c9 #include <linux/percpu.h>
18 * This file implements multi-threaded decompression using percpu
31 struct squashfs_stream __percpu *percpu; in squashfs_decompressor_create() local
34 percpu = alloc_percpu(struct squashfs_stream); in squashfs_decompressor_create()
35 if (percpu == NULL) in squashfs_decompressor_create()
39 stream = per_cpu_ptr(percpu, cpu); in squashfs_decompressor_create()
49 return (__force void *) percpu; in squashfs_decompressor_create()
53 stream = per_cpu_ptr(percpu, cpu); in squashfs_decompressor_create()
57 free_percpu(percpu); in squashfs_decompressor_create()
63 struct squashfs_stream __percpu *percpu in squashfs_decompressor_destroy() local
81 struct squashfs_stream __percpu *percpu = squashfs_decompress() local
[all...]
/kernel/linux/linux-5.10/fs/squashfs/
H A Ddecompressor_multi_percpu.c9 #include <linux/percpu.h>
19 * This file implements multi-threaded decompression using percpu
32 struct squashfs_stream __percpu *percpu; in squashfs_decompressor_create() local
35 percpu = alloc_percpu(struct squashfs_stream); in squashfs_decompressor_create()
36 if (percpu == NULL) in squashfs_decompressor_create()
40 stream = per_cpu_ptr(percpu, cpu); in squashfs_decompressor_create()
50 return (__force void *) percpu; in squashfs_decompressor_create()
54 stream = per_cpu_ptr(percpu, cpu); in squashfs_decompressor_create()
58 free_percpu(percpu); in squashfs_decompressor_create()
64 struct squashfs_stream __percpu *percpu in squashfs_decompressor_destroy() local
[all...]
/kernel/linux/linux-6.6/kernel/bpf/
H A Dmemalloc.c254 static void free_one(void *obj, bool percpu) in free_one() argument
256 if (percpu) { in free_one()
265 static int free_all(struct llist_node *llnode, bool percpu) in free_all() argument
271 free_one(pos, percpu); in free_all()
496 int bpf_mem_alloc_init(struct bpf_mem_alloc *ma, int size, bool percpu) in bpf_mem_alloc_init() argument
504 ma->percpu = percpu; in bpf_mem_alloc_init()
511 if (percpu) in bpf_mem_alloc_init()
535 /* size == 0 && percpu is an invalid combination */ in bpf_mem_alloc_init()
536 if (WARN_ON_ONCE(percpu)) in bpf_mem_alloc_init()
564 bool percpu = !!c->percpu_size; drain_mem_cache() local
[all...]
H A Dbpf_lru_list.c6 #include <linux/percpu.h>
501 if (lru->percpu) in bpf_lru_pop_free()
558 if (lru->percpu) in bpf_lru_push_free()
616 if (lru->percpu) in bpf_lru_populate()
651 int bpf_lru_init(struct bpf_lru *lru, bool percpu, u32 hash_offset, in bpf_lru_init() argument
656 if (percpu) { in bpf_lru_init()
686 lru->percpu = percpu; in bpf_lru_init()
696 if (lru->percpu) in bpf_lru_destroy()
H A Dbpf_lru_list.h62 bool percpu; member
71 int bpf_lru_init(struct bpf_lru *lru, bool percpu, u32 hash_offset,
/kernel/linux/linux-5.10/arch/alpha/boot/
H A Dbootp.c71 struct percpu_struct * percpu; in pal_init() local
101 percpu = (struct percpu_struct *) in pal_init()
103 rev = percpu->pal_revision = percpu->palcode_avail[2]; in pal_init()
H A Dmain.c65 struct percpu_struct * percpu; in pal_init() local
95 percpu = (struct percpu_struct *) in pal_init()
97 rev = percpu->pal_revision = percpu->palcode_avail[2]; in pal_init()
H A Dbootpz.c119 struct percpu_struct * percpu; in pal_init() local
149 percpu = (struct percpu_struct *) in pal_init()
151 rev = percpu->pal_revision = percpu->palcode_avail[2]; in pal_init()
/kernel/linux/linux-6.6/arch/alpha/boot/
H A Dbootp.c71 struct percpu_struct * percpu; in pal_init() local
101 percpu = (struct percpu_struct *) in pal_init()
103 rev = percpu->pal_revision = percpu->palcode_avail[2]; in pal_init()
H A Dmain.c65 struct percpu_struct * percpu; in pal_init() local
95 percpu = (struct percpu_struct *) in pal_init()
97 rev = percpu->pal_revision = percpu->palcode_avail[2]; in pal_init()
H A Dbootpz.c119 struct percpu_struct * percpu; in pal_init() local
149 percpu = (struct percpu_struct *) in pal_init()
151 rev = percpu->pal_revision = percpu->palcode_avail[2]; in pal_init()
/kernel/linux/linux-5.10/net/rds/
H A Dib_recv.c106 cache->percpu = alloc_percpu_gfp(struct rds_ib_cache_head, gfp); in rds_ib_recv_alloc_cache()
107 if (!cache->percpu) in rds_ib_recv_alloc_cache()
111 head = per_cpu_ptr(cache->percpu, cpu); in rds_ib_recv_alloc_cache()
129 free_percpu(ic->i_cache_incs.percpu); in rds_ib_recv_alloc_caches()
142 head = per_cpu_ptr(cache->percpu, cpu); in rds_ib_cache_splice_all_lists()
165 free_percpu(ic->i_cache_incs.percpu); in rds_ib_recv_free_caches()
176 free_percpu(ic->i_cache_frags.percpu); in rds_ib_recv_free_caches()
470 * First, we put the memory on a percpu list. When this reaches a certain size,
471 * We move it to an intermediate non-percpu list in a lockless manner, with some
486 chpfirst = __this_cpu_read(cache->percpu in rds_ib_recv_cache_put()
[all...]
/kernel/linux/linux-6.6/net/rds/
H A Dib_recv.c107 cache->percpu = alloc_percpu_gfp(struct rds_ib_cache_head, gfp); in rds_ib_recv_alloc_cache()
108 if (!cache->percpu) in rds_ib_recv_alloc_cache()
112 head = per_cpu_ptr(cache->percpu, cpu); in rds_ib_recv_alloc_cache()
130 free_percpu(ic->i_cache_incs.percpu); in rds_ib_recv_alloc_caches()
143 head = per_cpu_ptr(cache->percpu, cpu); in rds_ib_cache_splice_all_lists()
166 free_percpu(ic->i_cache_incs.percpu); in rds_ib_recv_free_caches()
177 free_percpu(ic->i_cache_frags.percpu); in rds_ib_recv_free_caches()
471 * First, we put the memory on a percpu list. When this reaches a certain size,
472 * We move it to an intermediate non-percpu list in a lockless manner, with some
487 chpfirst = __this_cpu_read(cache->percpu in rds_ib_recv_cache_put()
[all...]
/kernel/linux/linux-6.6/include/linux/
H A Dbpf_mem_alloc.h14 bool percpu; member
25 int bpf_mem_alloc_init(struct bpf_mem_alloc *ma, int size, bool percpu);
H A Deventfd.h14 #include <linux/percpu-defs.h>
15 #include <linux/percpu.h>
/kernel/linux/linux-5.10/kernel/bpf/
H A Dbpf_lru_list.c6 #include <linux/percpu.h>
501 if (lru->percpu) in bpf_lru_pop_free()
558 if (lru->percpu) in bpf_lru_push_free()
616 if (lru->percpu) in bpf_lru_populate()
651 int bpf_lru_init(struct bpf_lru *lru, bool percpu, u32 hash_offset, in bpf_lru_init() argument
656 if (percpu) { in bpf_lru_init()
686 lru->percpu = percpu; in bpf_lru_init()
696 if (lru->percpu) in bpf_lru_destroy()
H A Dbpf_lru_list.h61 bool percpu; member
70 int bpf_lru_init(struct bpf_lru *lru, bool percpu, u32 hash_offset,
/kernel/linux/linux-5.10/tools/testing/selftests/cgroup/
H A Dtest_kmem.c23 * percpu batches 32 pages big (look at MEMCG_CHARGE_BATCH). So the maximum
346 * is greater than 0 and approximates matches the percpu value
353 long current, percpu; in test_percpu_basic() local
378 percpu = cg_read_key_long(parent, "memory.stat", "percpu "); in test_percpu_basic()
380 if (current > 0 && percpu > 0 && abs(current - percpu) < in test_percpu_basic()
385 current, percpu); in test_percpu_basic()
/kernel/linux/linux-6.6/tools/testing/selftests/cgroup/
H A Dtest_kmem.c22 * Memory cgroup charging is performed using percpu batches 64 pages
349 * is greater than 0 and approximates matches the percpu value
356 long current, percpu; in test_percpu_basic() local
381 percpu = cg_read_key_long(parent, "memory.stat", "percpu "); in test_percpu_basic()
383 if (current > 0 && percpu > 0 && abs(current - percpu) < in test_percpu_basic()
388 current, percpu); in test_percpu_basic()
/kernel/linux/linux-5.10/include/linux/
H A Deventfd.h15 #include <linux/percpu-defs.h>
16 #include <linux/percpu.h>
/kernel/linux/linux-5.10/include/asm-generic/
H A Dvmlinux.lds.h336 *(.data..shared_aligned) /* percpu related */ \
1002 *(.data..percpu..decrypted) \
1063 * PERCPU_INPUT - the percpu input sections
1066 * The core percpu section names and core symbols which do not rely
1074 *(.data..percpu..first) \
1076 *(.data..percpu..page_aligned) \
1078 *(.data..percpu..read_mostly) \
1080 *(.data..percpu) \
1081 *(.data..percpu..shared_aligned) \
1086 * PERCPU_VADDR - define output section for percpu are
[all...]
/kernel/linux/linux-6.6/include/asm-generic/
H A Dvmlinux.lds.h357 *(.data..shared_aligned) /* percpu related */ \
951 *(.data..percpu..decrypted) \
1018 * PERCPU_INPUT - the percpu input sections
1021 * The core percpu section names and core symbols which do not rely
1029 *(.data..percpu..first) \
1031 *(.data..percpu..page_aligned) \
1033 *(.data..percpu..read_mostly) \
1035 *(.data..percpu) \
1036 *(.data..percpu..shared_aligned) \
1041 * PERCPU_VADDR - define output section for percpu are
[all...]
/kernel/linux/linux-5.10/arch/sparc/kernel/
H A Dsun4m_irq.c107 bool percpu; member
200 if (handler_data->percpu) { in sun4m_mask_irq()
219 if (handler_data->percpu) { in sun4m_unmask_irq()
278 handler_data->percpu = real_irq < OBP_INT_LEVEL_ONBOARD; in sun4m_build_device_irq()
/kernel/linux/linux-6.6/arch/sparc/kernel/
H A Dsun4m_irq.c107 bool percpu; member
200 if (handler_data->percpu) { in sun4m_mask_irq()
219 if (handler_data->percpu) { in sun4m_unmask_irq()
278 handler_data->percpu = real_irq < OBP_INT_LEVEL_ONBOARD; in sun4m_build_device_irq()
/kernel/linux/linux-6.6/drivers/md/
H A Draid5.c1552 static struct page **to_addr_page(struct raid5_percpu *percpu, int i) in to_addr_page() argument
1554 return percpu->scribble + i * percpu->scribble_obj_size; in to_addr_page()
1559 struct raid5_percpu *percpu, int i) in to_addr_conv()
1561 return (void *) (to_addr_page(percpu, i) + sh->disks + 2); in to_addr_conv()
1568 to_addr_offs(struct stripe_head *sh, struct raid5_percpu *percpu) in to_addr_offs() argument
1570 return (unsigned int *) (to_addr_conv(sh, percpu, 0) + sh->disks + 2); in to_addr_offs()
1574 ops_run_compute5(struct stripe_head *sh, struct raid5_percpu *percpu) in ops_run_compute5() argument
1577 struct page **xor_srcs = to_addr_page(percpu, 0); in ops_run_compute5()
1578 unsigned int *off_srcs = to_addr_offs(sh, percpu); in ops_run_compute5()
1558 to_addr_conv(struct stripe_head *sh, struct raid5_percpu *percpu, int i) to_addr_conv() argument
1671 ops_run_compute6_1(struct stripe_head *sh, struct raid5_percpu *percpu) ops_run_compute6_1() argument
1735 ops_run_compute6_2(struct stripe_head *sh, struct raid5_percpu *percpu) ops_run_compute6_2() argument
1870 ops_run_prexor5(struct stripe_head *sh, struct raid5_percpu *percpu, struct dma_async_tx_descriptor *tx) ops_run_prexor5() argument
1912 ops_run_prexor6(struct stripe_head *sh, struct raid5_percpu *percpu, struct dma_async_tx_descriptor *tx) ops_run_prexor6() argument
2053 ops_run_reconstruct5(struct stripe_head *sh, struct raid5_percpu *percpu, struct dma_async_tx_descriptor *tx) ops_run_reconstruct5() argument
2151 ops_run_reconstruct6(struct stripe_head *sh, struct raid5_percpu *percpu, struct dma_async_tx_descriptor *tx) ops_run_reconstruct6() argument
2225 ops_run_check_p(struct stripe_head *sh, struct raid5_percpu *percpu) ops_run_check_p() argument
2266 ops_run_check_pq(struct stripe_head *sh, struct raid5_percpu *percpu, int checkp) ops_run_check_pq() argument
2295 struct raid5_percpu *percpu; raid_run_ops() local
2472 scribble_alloc(struct raid5_percpu *percpu, int num, int cnt) scribble_alloc() argument
2514 struct raid5_percpu *percpu; resize_chunks() local
7358 free_scratch_buffer(struct r5conf *conf, struct raid5_percpu *percpu) free_scratch_buffer() argument
7366 alloc_scratch_buffer(struct r5conf *conf, struct raid5_percpu *percpu) alloc_scratch_buffer() argument
7428 struct raid5_percpu *percpu = per_cpu_ptr(conf->percpu, cpu); raid456_cpu_up_prepare() local
[all...]

Completed in 22 milliseconds

12345678910>>...37