Home
last modified time | relevance | path

Searched refs:max_entries (Results 1 - 25 of 653) sorted by relevance

12345678910>>...27

/kernel/linux/linux-5.10/tools/testing/selftests/bpf/map_tests/
H A Dhtab_map_batch_ops.c13 static void map_batch_update(int map_fd, __u32 max_entries, int *keys, in map_batch_update() argument
27 for (i = 0; i < max_entries; i++) { in map_batch_update()
36 err = bpf_map_update_batch(map_fd, keys, values, &max_entries, &opts); in map_batch_update()
40 static void map_batch_verify(int *visited, __u32 max_entries, in map_batch_verify() argument
50 memset(visited, 0, max_entries * sizeof(*visited)); in map_batch_verify()
51 for (i = 0; i < max_entries; i++) { in map_batch_verify()
70 for (i = 0; i < max_entries; i++) { in map_batch_verify()
81 const __u32 max_entries = 10; in __test_map_lookup_and_delete_batch() local
82 value pcpu_values[max_entries]; in __test_map_lookup_and_delete_batch()
98 xattr.max_entries in __test_map_lookup_and_delete_batch()
[all...]
H A Darray_map_batch_ops.c12 static void map_batch_update(int map_fd, __u32 max_entries, int *keys, in map_batch_update() argument
21 for (i = 0; i < max_entries; i++) { in map_batch_update()
26 err = bpf_map_update_batch(map_fd, keys, values, &max_entries, &opts); in map_batch_update()
30 static void map_batch_verify(int *visited, __u32 max_entries, in map_batch_verify() argument
35 memset(visited, 0, max_entries * sizeof(*visited)); in map_batch_verify()
36 for (i = 0; i < max_entries; i++) { in map_batch_verify()
41 for (i = 0; i < max_entries; i++) { in map_batch_verify()
57 const __u32 max_entries = 10; in test_array_map_batch_ops() local
66 xattr.max_entries = max_entries; in test_array_map_batch_ops()
[all...]
/kernel/linux/linux-6.6/tools/testing/selftests/bpf/map_tests/
H A Dhtab_map_batch_ops.c14 static void map_batch_update(int map_fd, __u32 max_entries, int *keys, in map_batch_update() argument
28 for (i = 0; i < max_entries; i++) { in map_batch_update()
37 err = bpf_map_update_batch(map_fd, keys, values, &max_entries, &opts); in map_batch_update()
41 static void map_batch_verify(int *visited, __u32 max_entries, in map_batch_verify() argument
51 memset(visited, 0, max_entries * sizeof(*visited)); in map_batch_verify()
52 for (i = 0; i < max_entries; i++) { in map_batch_verify()
71 for (i = 0; i < max_entries; i++) { in map_batch_verify()
82 const __u32 max_entries = 10; in __test_map_lookup_and_delete_batch() local
83 value pcpu_values[max_entries]; in __test_map_lookup_and_delete_batch()
93 "hash_map", sizeof(int), sizeof(int), max_entries, NUL in __test_map_lookup_and_delete_batch()
[all...]
H A Dlpm_trie_map_batch_ops.c22 static void map_batch_update(int map_fd, __u32 max_entries, in map_batch_update() argument
33 for (i = 0; i < max_entries; i++) { in map_batch_update()
40 err = bpf_map_update_batch(map_fd, keys, values, &max_entries, &opts); in map_batch_update()
44 static void map_batch_verify(int *visited, __u32 max_entries, in map_batch_verify() argument
51 memset(visited, 0, max_entries * sizeof(*visited)); in map_batch_verify()
52 for (i = 0; i < max_entries; i++) { in map_batch_verify()
60 for (i = 0; i < max_entries; i++) { in map_batch_verify()
72 const __u32 max_entries = 10; in test_lpm_trie_map_batch_ops() local
82 max_entries, &create_opts); in test_lpm_trie_map_batch_ops()
86 keys = malloc(max_entries * sizeo in test_lpm_trie_map_batch_ops()
[all...]
H A Darray_map_batch_ops.c15 static void map_batch_update(int map_fd, __u32 max_entries, int *keys, in map_batch_update() argument
25 for (i = 0; i < max_entries; i++) { in map_batch_update()
36 err = bpf_map_update_batch(map_fd, keys, values, &max_entries, &opts); in map_batch_update()
40 static void map_batch_verify(int *visited, __u32 max_entries, int *keys, in map_batch_verify() argument
46 memset(visited, 0, max_entries * sizeof(*visited)); in map_batch_verify()
47 for (i = 0; i < max_entries; i++) { in map_batch_verify()
64 for (i = 0; i < max_entries; i++) { in map_batch_verify()
74 const __u32 max_entries = 10; in __test_map_lookup_and_update_batch() local
84 "array_map", sizeof(int), sizeof(__s64), max_entries, NULL); in __test_map_lookup_and_update_batch()
92 keys = calloc(max_entries, sizeo in __test_map_lookup_and_update_batch()
[all...]
H A Dmap_in_map_batch_ops.c127 int err, max_entries = OUTER_MAP_ENTRIES; in fetch_and_validate() local
131 fetched_keys = calloc(max_entries, value_size); in fetch_and_validate()
132 fetched_values = calloc(max_entries, value_size); in fetch_and_validate()
138 step_size <= max_entries; in fetch_and_validate()
171 CHECK((total_fetched != max_entries), in fetch_and_validate()
173 "total_fetched(%d) and max_entries(%d) error: (%d):%s\n", in fetch_and_validate()
174 total_fetched, max_entries, errno, strerror(errno)); in fetch_and_validate()
190 __u32 max_entries = OUTER_MAP_ENTRIES; in _map_in_map_batch_ops() local
197 outer_map_keys = calloc(max_entries, value_size); in _map_in_map_batch_ops()
198 inner_map_fds = calloc(max_entries, value_siz in _map_in_map_batch_ops()
[all...]
/kernel/linux/linux-5.10/tools/testing/selftests/bpf/progs/
H A Dtest_btf_map_in_map.c8 __uint(max_entries, 1);
16 __uint(max_entries, 2);
23 __uint(max_entries, 3);
29 /* changing max_entries to 2 will fail during load
31 __uint(max_entries, 1);
47 __uint(max_entries, 3);
56 __uint(max_entries, 5);
63 __uint(max_entries, 3);
69 __uint(max_entries, 1);
83 __uint(max_entries,
[all...]
H A Dmap_ptr_kern.c37 __u32 max_entries; member
43 __u32 value_size, __u32 max_entries) in check_bpf_map_fields()
48 VERIFY(map->max_entries == max_entries); in check_bpf_map_fields()
61 VERIFY(indirect->max_entries == direct->max_entries); in check_bpf_map_ptr()
69 __u32 key_size, __u32 value_size, __u32 max_entries) in check()
73 max_entries)); in check()
107 __uint(max_entries, MAX_ENTRIES);
143 __uint(max_entries, MAX_ENTRIE
42 check_bpf_map_fields(struct bpf_map *map, __u32 key_size, __u32 value_size, __u32 max_entries) check_bpf_map_fields() argument
68 check(struct bpf_map *indirect, struct bpf_map *direct, __u32 key_size, __u32 value_size, __u32 max_entries) check() argument
[all...]
/kernel/linux/linux-6.6/tools/testing/selftests/bpf/progs/
H A Dtest_btf_map_in_map.c8 __uint(max_entries, 1);
16 __uint(max_entries, 2);
23 __uint(max_entries, 3);
29 /* changing max_entries to 2 will fail during load
31 __uint(max_entries, 1);
47 __uint(max_entries, 3);
56 __uint(max_entries, 5);
63 __uint(max_entries, 3);
69 __uint(max_entries, 1);
83 __uint(max_entries,
[all...]
H A Dmap_ptr_kern.c34 __u32 max_entries; member
39 __u32 value_size, __u32 max_entries) in check_bpf_map_fields()
44 VERIFY(map->max_entries == max_entries); in check_bpf_map_fields()
56 VERIFY(indirect->max_entries == direct->max_entries); in check_bpf_map_ptr()
63 __u32 key_size, __u32 value_size, __u32 max_entries) in check()
67 max_entries)); in check()
101 __uint(max_entries, MAX_ENTRIES);
142 __uint(max_entries, MAX_ENTRIE
38 check_bpf_map_fields(struct bpf_map *map, __u32 key_size, __u32 value_size, __u32 max_entries) check_bpf_map_fields() argument
62 check(struct bpf_map *indirect, struct bpf_map *direct, __u32 key_size, __u32 value_size, __u32 max_entries) check() argument
[all...]
H A Dlsm.c16 __uint(max_entries, 1);
23 __uint(max_entries, 1);
30 __uint(max_entries, 1);
37 __uint(max_entries, 1);
44 __uint(max_entries, 1);
51 __uint(max_entries, 1);
58 __uint(max_entries, 1);
65 __uint(max_entries, 1);
75 __uint(max_entries, 1);
H A Dtest_unpriv_bpf_disabled.c17 __uint(max_entries, 1);
24 __uint(max_entries, 1);
31 __uint(max_entries, 1);
38 __uint(max_entries, 1);
51 __uint(max_entries, 1 << 12);
56 __uint(max_entries, 1);
/kernel/linux/linux-5.10/drivers/md/persistent-data/
H A Ddm-array.c25 __le32 max_entries; member
47 bh_le->csum = cpu_to_le32(dm_bm_checksum(&bh_le->max_entries, in array_block_prepare_for_write()
66 csum_disk = cpu_to_le32(dm_bm_checksum(&bh_le->max_entries, in array_block_check()
153 uint32_t max_entries, in alloc_ablock()
163 (*ab)->max_entries = cpu_to_le32(max_entries); in alloc_ablock()
182 BUG_ON(new_nr > le32_to_cpu(ab->max_entries)); in fill_ablock()
206 BUG_ON(new_nr > le32_to_cpu(ab->max_entries)); in trim_ablock()
251 * / max_entries).
352 uint32_t max_entries, in insert_new_ablock()
152 alloc_ablock(struct dm_array_info *info, size_t size_of_block, uint32_t max_entries, struct dm_block **block, struct array_block **ab) alloc_ablock() argument
351 insert_new_ablock(struct dm_array_info *info, size_t size_of_block, uint32_t max_entries, unsigned block_index, uint32_t nr, const void *value, dm_block_t *root) insert_new_ablock() argument
371 insert_full_ablocks(struct dm_array_info *info, size_t size_of_block, unsigned begin_block, unsigned end_block, unsigned max_entries, const void *value, dm_block_t *root) insert_full_ablocks() argument
409 unsigned max_entries; global() member
727 unsigned block_index, end_block, size_of_block, max_entries; dm_array_new() local
775 unsigned entry, max_entries; dm_array_get_value() local
803 unsigned max_entries; array_set_value() local
862 unsigned nr_entries, max_entries; walk_ablock() local
[all...]
H A Ddm-btree-remove.c72 BUG_ON(nr_entries + shift > le32_to_cpu(n->header.max_entries)); in node_shift()
90 BUG_ON(nr_left + shift > le32_to_cpu(left->header.max_entries)); in node_copy()
98 BUG_ON(shift > le32_to_cpu(right->header.max_entries)); in node_copy()
133 return le32_to_cpu(n->header.max_entries) / 3; in merge_threshold()
177 uint32_t max_entries = le32_to_cpu(left->header.max_entries); in shift() local
178 uint32_t r_max_entries = le32_to_cpu(right->header.max_entries); in shift()
180 BUG_ON(max_entries != r_max_entries); in shift()
181 BUG_ON(nr_left - count > max_entries); in shift()
182 BUG_ON(nr_right + count > max_entries); in shift()
274 uint32_t max_entries = le32_to_cpu(left->header.max_entries); delete_center_node() local
306 uint32_t max_entries = le32_to_cpu(left->header.max_entries); redistribute3() local
[all...]
/kernel/linux/linux-6.6/drivers/md/persistent-data/
H A Ddm-array.c26 __le32 max_entries; member
48 bh_le->csum = cpu_to_le32(dm_bm_checksum(&bh_le->max_entries, in array_block_prepare_for_write()
67 csum_disk = cpu_to_le32(dm_bm_checksum(&bh_le->max_entries, in array_block_check()
153 uint32_t max_entries, in alloc_ablock()
163 (*ab)->max_entries = cpu_to_le32(max_entries); in alloc_ablock()
181 BUG_ON(new_nr > le32_to_cpu(ab->max_entries)); in fill_ablock()
204 BUG_ON(new_nr > le32_to_cpu(ab->max_entries)); in trim_ablock()
249 * / max_entries).
350 uint32_t max_entries, in insert_new_ablock()
152 alloc_ablock(struct dm_array_info *info, size_t size_of_block, uint32_t max_entries, struct dm_block **block, struct array_block **ab) alloc_ablock() argument
349 insert_new_ablock(struct dm_array_info *info, size_t size_of_block, uint32_t max_entries, unsigned int block_index, uint32_t nr, const void *value, dm_block_t *root) insert_new_ablock() argument
369 insert_full_ablocks(struct dm_array_info *info, size_t size_of_block, unsigned int begin_block, unsigned int end_block, unsigned int max_entries, const void *value, dm_block_t *root) insert_full_ablocks() argument
407 unsigned int max_entries; global() member
737 unsigned int block_index, end_block, size_of_block, max_entries; dm_array_new() local
785 unsigned int entry, max_entries; dm_array_get_value() local
813 unsigned int max_entries; array_set_value() local
872 unsigned int nr_entries, max_entries; walk_ablock() local
[all...]
/kernel/linux/linux-6.6/tools/testing/selftests/bpf/prog_tests/
H A Dbtf.c72 __u32 max_entries; member
140 .max_entries = 4,
195 .max_entries = 4,
220 .max_entries = 4,
261 .max_entries = 4,
306 .max_entries = 1,
328 .max_entries = 1,
350 .max_entries = 1,
372 .max_entries = 1,
397 .max_entries
[all...]
H A Dfor_each.c13 int i, err, max_entries; in test_hash_map() local
29 max_entries = bpf_map__max_entries(skel->maps.hashmap); in test_hash_map()
30 for (i = 0; i < max_entries; i++) { in test_hash_map()
60 ASSERT_EQ(skel->bss->hashmap_elems, max_entries, "hashmap_elems"); in test_hash_map()
79 __u32 key, num_cpus, max_entries; in test_array_map() local
96 max_entries = bpf_map__max_entries(skel->maps.arraymap); in test_array_map()
97 for (i = 0; i < max_entries; i++) { in test_array_map()
101 if (i != max_entries - 1) in test_array_map()
/kernel/linux/linux-6.6/kernel/bpf/
H A Darraymap.c26 for (i = 0; i < array->map.max_entries; i++) { in bpf_array_free_percpu()
37 for (i = 0; i < array->map.max_entries; i++) { in bpf_array_alloc_percpu()
58 if (attr->max_entries == 0 || attr->key_size != 4 || in array_map_alloc_check()
84 u32 elem_size, index_mask, max_entries; in array_map_alloc() local
91 max_entries = attr->max_entries; in array_map_alloc()
93 /* On 32 bit archs roundup_pow_of_two() with max_entries that has in array_map_alloc()
97 mask64 = fls_long(max_entries - 1); in array_map_alloc()
106 max_entries = index_mask + 1; in array_map_alloc()
108 if (max_entries < att in array_map_alloc()
[all...]
/kernel/linux/linux-5.10/tools/testing/selftests/bpf/prog_tests/
H A Dbtf.c75 __u32 max_entries; member
143 .max_entries = 4,
198 .max_entries = 4,
223 .max_entries = 4,
264 .max_entries = 4,
309 .max_entries = 1,
331 .max_entries = 1,
353 .max_entries = 1,
375 .max_entries = 1,
400 .max_entries
[all...]
/kernel/linux/linux-5.10/arch/loongarch/kernel/
H A Dstacktrace.c23 if (trace->nr_entries >= trace->max_entries) in consume_entry()
32 return trace->nr_entries < trace->max_entries; in consume_entry()
80 WARN_ON(trace->nr_entries || !trace->max_entries); in save_stack_trace()
92 WARN_ON(trace->nr_entries || !trace->max_entries); in save_stack_trace_regs()
103 WARN_ON(trace->nr_entries || !trace->max_entries); in save_stack_trace_tsk()
205 if (trace->nr_entries < trace->max_entries) in __save_stack_trace_user()
208 while (trace->nr_entries < trace->max_entries && fp && !((unsigned long)fp & 0xf)) { in __save_stack_trace_user()
231 if (trace->nr_entries < trace->max_entries) in save_stack_trace_user()
/kernel/linux/linux-5.10/kernel/bpf/
H A Darraymap.c25 for (i = 0; i < array->map.max_entries; i++) { in bpf_array_free_percpu()
36 for (i = 0; i < array->map.max_entries; i++) { in bpf_array_alloc_percpu()
57 if (attr->max_entries == 0 || attr->key_size != 4 || in array_map_alloc_check()
85 u32 elem_size, index_mask, max_entries; in array_map_alloc() local
93 max_entries = attr->max_entries; in array_map_alloc()
95 /* On 32 bit archs roundup_pow_of_two() with max_entries that has in array_map_alloc()
99 mask64 = fls_long(max_entries - 1); in array_map_alloc()
108 max_entries = index_mask + 1; in array_map_alloc()
110 if (max_entries < att in array_map_alloc()
[all...]
/kernel/linux/linux-5.10/samples/bpf/
H A Dmap_perf_test_kern.c23 __uint(max_entries, MAX_ENTRIES);
30 __uint(max_entries, 10000);
37 __uint(max_entries, 10000);
45 __uint(max_entries, MAX_ENTRIES);
52 __uint(max_entries, MAX_NR_CPUS);
64 __uint(max_entries, MAX_ENTRIES);
71 __uint(max_entries, MAX_ENTRIES);
79 __uint(max_entries, MAX_ENTRIES);
87 __uint(max_entries, 10000);
95 __uint(max_entries, MAX_ENTRIE
[all...]
/kernel/linux/linux-5.10/tools/lib/perf/
H A Dcpumap.c104 int max_entries = 0; in perf_cpu_map__read() local
120 if (new_max >= max_entries) { in perf_cpu_map__read()
121 max_entries = new_max + MAX_NR_CPUS / 2; in perf_cpu_map__read()
122 tmp = realloc(tmp_cpus, max_entries * sizeof(int)); in perf_cpu_map__read()
131 if (nr_cpus == max_entries) { in perf_cpu_map__read()
132 max_entries += MAX_NR_CPUS; in perf_cpu_map__read()
133 tmp = realloc(tmp_cpus, max_entries * sizeof(int)); in perf_cpu_map__read()
178 int max_entries = 0; in perf_cpu_map__new() local
221 if (nr_cpus == max_entries) { in perf_cpu_map__new()
222 max_entries in perf_cpu_map__new()
[all...]
/kernel/linux/linux-6.6/samples/bpf/
H A Dmap_perf_test.bpf.c21 __uint(max_entries, MAX_ENTRIES);
28 __uint(max_entries, 10000);
35 __uint(max_entries, 10000);
43 __uint(max_entries, MAX_ENTRIES);
50 __uint(max_entries, MAX_NR_CPUS);
62 __uint(max_entries, MAX_ENTRIES);
69 __uint(max_entries, MAX_ENTRIES);
77 __uint(max_entries, MAX_ENTRIES);
85 __uint(max_entries, 10000);
93 __uint(max_entries, MAX_ENTRIE
[all...]
/kernel/linux/linux-6.6/net/xdp/
H A Dxskmap.c70 if (attr->max_entries == 0 || attr->key_size != 4 || in xsk_map_alloc()
76 size = struct_size(m, xsk_map, attr->max_entries); in xsk_map_alloc()
92 return struct_size(m, xsk_map, map->max_entries) + in xsk_map_mem_usage()
110 if (index >= m->map.max_entries) { in xsk_map_get_next_key()
115 if (index == m->map.max_entries - 1) in xsk_map_get_next_key()
127 *insn++ = BPF_JMP_IMM(BPF_JGE, ret, map->max_entries, 5); in xsk_map_gen_lookup()
145 if (key >= map->max_entries) in __xsk_map_lookup_elem()
174 if (unlikely(i >= m->map.max_entries)) in xsk_map_update_elem()
229 if (k >= map->max_entries) in xsk_map_delete_elem()
262 return meta0->max_entries in xsk_map_meta_equal()
[all...]

Completed in 17 milliseconds

12345678910>>...27