Home
last modified time | relevance | path

Searched refs:cpu_map (Results 1 - 25 of 74) sorted by relevance

123

/kernel/linux/linux-5.10/kernel/sched/
H A Dtopology.c280 static void perf_domain_debug(const struct cpumask *cpu_map, in perf_domain_debug() argument
286 printk(KERN_DEBUG "root_domain %*pbl:", cpumask_pr_args(cpu_map)); in perf_domain_debug()
347 static bool build_perf_domains(const struct cpumask *cpu_map) in build_perf_domains() argument
349 int i, nr_pd = 0, nr_ps = 0, nr_cpus = cpumask_weight(cpu_map); in build_perf_domains()
351 int cpu = cpumask_first(cpu_map); in build_perf_domains()
363 cpumask_pr_args(cpu_map)); in build_perf_domains()
371 cpumask_pr_args(cpu_map)); in build_perf_domains()
375 for_each_cpu(i, cpu_map) { in build_perf_domains()
389 cpumask_pr_args(cpu_map)); in build_perf_domains()
411 cpumask_pr_args(cpu_map)); in build_perf_domains()
1306 __free_domain_allocs(struct s_data *d, enum s_alloc what, const struct cpumask *cpu_map) __free_domain_allocs() argument
1326 __visit_domain_allocation_hell(struct s_data *d, const struct cpumask *cpu_map) __visit_domain_allocation_hell() argument
1399 sd_init(struct sched_domain_topology_level *tl, const struct cpumask *cpu_map, struct sched_domain *child, int dflags, int cpu) sd_init() argument
1456 cpumask_and(sched_domain_span(sd), cpu_map, tl->mask(cpu)); sd_init() local
1820 __sdt_alloc(const struct cpumask *cpu_map) __sdt_alloc() argument
1889 __sdt_free(const struct cpumask *cpu_map) __sdt_free() argument
1925 build_sched_domain(struct sched_domain_topology_level *tl, const struct cpumask *cpu_map, struct sched_domain_attr *attr, struct sched_domain *child, int dflags, int cpu) build_sched_domain() argument
1959 topology_span_sane(struct sched_domain_topology_level *tl, const struct cpumask *cpu_map, int cpu) topology_span_sane() argument
1996 asym_cpu_capacity_level(const struct cpumask *cpu_map) asym_cpu_capacity_level() argument
2055 build_sched_domains(const struct cpumask *cpu_map, struct sched_domain_attr *attr) build_sched_domains() argument
2219 sched_init_domains(const struct cpumask *cpu_map) sched_init_domains() argument
2243 detach_destroy_domains(const struct cpumask *cpu_map) detach_destroy_domains() argument
[all...]
/kernel/linux/linux-6.6/kernel/sched/
H A Dtopology.c306 static void perf_domain_debug(const struct cpumask *cpu_map, in perf_domain_debug() argument
312 printk(KERN_DEBUG "root_domain %*pbl:", cpumask_pr_args(cpu_map)); in perf_domain_debug()
374 static bool build_perf_domains(const struct cpumask *cpu_map) in build_perf_domains() argument
376 int i, nr_pd = 0, nr_ps = 0, nr_cpus = cpumask_weight(cpu_map); in build_perf_domains()
378 int cpu = cpumask_first(cpu_map); in build_perf_domains()
390 cpumask_pr_args(cpu_map)); in build_perf_domains()
398 cpumask_pr_args(cpu_map)); in build_perf_domains()
405 cpumask_pr_args(cpu_map)); in build_perf_domains()
410 for_each_cpu(i, cpu_map) { in build_perf_domains()
424 cpumask_pr_args(cpu_map)); in build_perf_domains()
1352 asym_cpu_capacity_classify(const struct cpumask *sd_span, const struct cpumask *cpu_map) asym_cpu_capacity_classify() argument
1476 __free_domain_allocs(struct s_data *d, enum s_alloc what, const struct cpumask *cpu_map) __free_domain_allocs() argument
1496 __visit_domain_allocation_hell(struct s_data *d, const struct cpumask *cpu_map) __visit_domain_allocation_hell() argument
1568 sd_init(struct sched_domain_topology_level *tl, const struct cpumask *cpu_map, struct sched_domain *child, int cpu) sd_init() argument
2197 __sdt_alloc(const struct cpumask *cpu_map) __sdt_alloc() argument
2266 __sdt_free(const struct cpumask *cpu_map) __sdt_free() argument
2302 build_sched_domain(struct sched_domain_topology_level *tl, const struct cpumask *cpu_map, struct sched_domain_attr *attr, struct sched_domain *child, int cpu) build_sched_domain() argument
2336 topology_span_sane(struct sched_domain_topology_level *tl, const struct cpumask *cpu_map, int cpu) topology_span_sane() argument
2373 build_sched_domains(const struct cpumask *cpu_map, struct sched_domain_attr *attr) build_sched_domains() argument
2589 sched_init_domains(const struct cpumask *cpu_map) sched_init_domains() argument
2613 detach_destroy_domains(const struct cpumask *cpu_map) detach_destroy_domains() argument
[all...]
/kernel/linux/linux-6.6/tools/power/x86/intel-speed-select/
H A Disst-config.c68 struct _cpu_map *cpu_map; variable
328 if (cpu_map && cpu_map[cpu].initialized) in get_physical_package_id()
329 return cpu_map[cpu].pkg_id; in get_physical_package_id()
352 if (cpu_map && cpu_map[cpu].initialized) in get_physical_core_id()
353 return cpu_map[cpu].core_id; in get_physical_core_id()
376 if (cpu_map && cpu_map[cpu].initialized) in get_physical_die_id()
377 return cpu_map[cp in get_physical_die_id()
716 update_punit_cpu_info(__u32 physical_cpu, struct _cpu_map *cpu_map) update_punit_cpu_info() argument
[all...]
/kernel/linux/linux-5.10/arch/mips/kernel/
H A Dcacheinfo.c53 static void fill_cpumask_siblings(int cpu, cpumask_t *cpu_map) in fill_cpumask_siblings() argument
59 cpumask_set_cpu(cpu1, cpu_map); in fill_cpumask_siblings()
62 static void fill_cpumask_cluster(int cpu, cpumask_t *cpu_map) in fill_cpumask_cluster() argument
69 cpumask_set_cpu(cpu1, cpu_map); in fill_cpumask_cluster()
/kernel/linux/linux-6.6/arch/mips/kernel/
H A Dcacheinfo.c58 static void fill_cpumask_siblings(int cpu, cpumask_t *cpu_map) in fill_cpumask_siblings() argument
64 cpumask_set_cpu(cpu1, cpu_map); in fill_cpumask_siblings()
67 static void fill_cpumask_cluster(int cpu, cpumask_t *cpu_map) in fill_cpumask_cluster() argument
74 cpumask_set_cpu(cpu1, cpu_map); in fill_cpumask_cluster()
/kernel/linux/linux-5.10/tools/power/x86/intel-speed-select/
H A Disst-config.c66 struct _cpu_map *cpu_map; variable
558 if (cpu_map[i].pkg_id == pkg_id && in get_max_punit_core_id()
559 cpu_map[i].die_id == die_id && in get_max_punit_core_id()
560 cpu_map[i].punit_cpu_core > max_id) in get_max_punit_core_id()
561 max_id = cpu_map[i].punit_cpu_core; in get_max_punit_core_id()
597 cpu_map = malloc(sizeof(*cpu_map) * topo_max_cpus); in create_cpu_map()
598 if (!cpu_map) in create_cpu_map()
610 map.cpu_map[0].logical_cpu = i; in create_cpu_map()
613 map.cpu_map[ in create_cpu_map()
[all...]
/kernel/linux/linux-6.6/tools/testing/selftests/bpf/progs/
H A Dfreplace_progmap.c10 } cpu_map SEC(".maps");
21 return bpf_redirect_map(&cpu_map, 0, XDP_PASS); in xdp_cpumap_prog()
H A Dtest_xdp_with_cpumap_helpers.c13 } cpu_map SEC(".maps");
18 return bpf_redirect_map(&cpu_map, 1, 0); in xdp_redir_prog()
H A Dtest_xdp_with_cpumap_frags_helpers.c13 } cpu_map SEC(".maps");
/kernel/linux/linux-5.10/tools/testing/selftests/bpf/progs/
H A Dtest_xdp_with_cpumap_helpers.c13 } cpu_map SEC(".maps");
18 return bpf_redirect_map(&cpu_map, 1, 0); in xdp_redir_prog()
/kernel/linux/linux-5.10/tools/perf/util/
H A Dmmap.c244 const struct perf_cpu_map *cpu_map = NULL; in build_node_mask() local
246 cpu_map = cpu_map__online(); in build_node_mask()
247 if (!cpu_map) in build_node_mask()
250 nr_cpus = perf_cpu_map__nr(cpu_map); in build_node_mask()
252 cpu = cpu_map->map[c]; /* map c index to online cpu index */ in build_node_mask()
/kernel/linux/linux-5.10/arch/ia64/mm/
H A Ddiscontig.c183 unsigned int *cpu_map; in setup_per_cpu_areas() local
193 cpu_map = ai->groups[0].cpu_map; in setup_per_cpu_areas()
202 /* build cpu_map, units are grouped by node */ in setup_per_cpu_areas()
207 cpu_map[unit++] = cpu; in setup_per_cpu_areas()
226 * CPUs are put into groups according to node. Walk cpu_map in setup_per_cpu_areas()
232 cpu = cpu_map[unit]; in setup_per_cpu_areas()
244 gi->cpu_map = &cpu_map[unit]; in setup_per_cpu_areas()
/kernel/linux/linux-6.6/arch/ia64/mm/
H A Ddiscontig.c184 unsigned int *cpu_map; in setup_per_cpu_areas() local
194 cpu_map = ai->groups[0].cpu_map; in setup_per_cpu_areas()
203 /* build cpu_map, units are grouped by node */ in setup_per_cpu_areas()
208 cpu_map[unit++] = cpu; in setup_per_cpu_areas()
227 * CPUs are put into groups according to node. Walk cpu_map in setup_per_cpu_areas()
233 cpu = cpu_map[unit]; in setup_per_cpu_areas()
245 gi->cpu_map = &cpu_map[unit]; in setup_per_cpu_areas()
/kernel/linux/linux-5.10/samples/bpf/
H A Dxdp_redirect_cpu_kern.c26 } cpu_map SEC(".maps");
230 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum0_no_touch()
273 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum1_touch_data()
321 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum2_round_robin()
389 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum3_proto_separate()
465 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum4_ddos_filter_pktgen()
568 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum5_lb_hash_ip_pairs()
/kernel/linux/linux-5.10/drivers/platform/x86/intel_speed_select_if/
H A Disst_if_common.c377 struct isst_if_cpu_map *cpu_map; in isst_if_proc_phyid_req() local
379 cpu_map = (struct isst_if_cpu_map *)cmd_ptr; in isst_if_proc_phyid_req()
380 if (cpu_map->logical_cpu >= nr_cpu_ids || in isst_if_proc_phyid_req()
381 cpu_map->logical_cpu >= num_possible_cpus()) in isst_if_proc_phyid_req()
385 cpu_map->physical_cpu = isst_cpu_info[cpu_map->logical_cpu].punit_cpu_id; in isst_if_proc_phyid_req()
508 cmd_cb.offset = offsetof(struct isst_if_cpu_maps, cpu_map); in isst_if_def_ioctl()
/kernel/linux/linux-5.10/kernel/bpf/
H A Dcpumap.c77 struct bpf_cpu_map_entry **cpu_map; member
123 cmap->cpu_map = bpf_map_area_alloc(cmap->map.max_entries * in cpu_map_alloc()
126 if (!cmap->cpu_map) in cpu_map_alloc()
462 get_cpu_map_entry(rcpu); /* 1-refcnt for being in cmap->cpu_map[] */ in __cpu_map_entry_alloc()
525 old_rcpu = xchg(&cmap->cpu_map[key_cpu], rcpu); in __cpu_map_entry_replace()
594 * no further "XDP/bpf-side" reads against bpf_cpu_map->cpu_map. in cpu_map_free()
602 /* For cpu_map the remote CPUs can still be using the entries in cpu_map_free()
608 rcpu = READ_ONCE(cmap->cpu_map[i]); in cpu_map_free()
615 bpf_map_area_free(cmap->cpu_map); in cpu_map_free()
627 rcpu = READ_ONCE(cmap->cpu_map[ke in __cpu_map_lookup_elem()
[all...]
/kernel/linux/linux-6.6/drivers/platform/x86/intel/speed_select_if/
H A Disst_if_common.c483 struct isst_if_cpu_map *cpu_map; in isst_if_proc_phyid_req() local
485 cpu_map = (struct isst_if_cpu_map *)cmd_ptr; in isst_if_proc_phyid_req()
486 if (cpu_map->logical_cpu >= nr_cpu_ids || in isst_if_proc_phyid_req()
487 cpu_map->logical_cpu >= num_possible_cpus()) in isst_if_proc_phyid_req()
491 cpu_map->physical_cpu = isst_cpu_info[cpu_map->logical_cpu].punit_cpu_id; in isst_if_proc_phyid_req()
615 cmd_cb.offset = offsetof(struct isst_if_cpu_maps, cpu_map); in isst_if_def_ioctl()
/kernel/linux/linux-6.6/kernel/bpf/
H A Dcpumap.c78 struct bpf_cpu_map_entry __rcu **cpu_map; member
106 cmap->cpu_map = bpf_map_area_alloc(cmap->map.max_entries * in cpu_map_alloc()
109 if (!cmap->cpu_map) { in cpu_map_alloc()
506 old_rcpu = unrcu_pointer(xchg(&cmap->cpu_map[key_cpu], RCU_INITIALIZER(rcpu))); in __cpu_map_entry_replace()
574 * bpf_cpu_map->cpu_map, but also ensure pending flush operations in cpu_map_free()
585 rcpu = rcu_dereference_raw(cmap->cpu_map[i]); in cpu_map_free()
592 bpf_map_area_free(cmap->cpu_map); in cpu_map_free()
608 rcpu = rcu_dereference_check(cmap->cpu_map[key], in __cpu_map_lookup_elem()
/kernel/linux/linux-5.10/mm/
H A Dpercpu.c2241 * @nr_units units. The returned ai's groups[0].cpu_map points to the
2242 * cpu_map array which is long enough for @nr_units and filled with
2243 * NR_CPUS. It's the caller's responsibility to initialize cpu_map
2259 __alignof__(ai->groups[0].cpu_map[0])); in pcpu_alloc_alloc_info()
2260 ai_size = base_size + nr_units * sizeof(ai->groups[0].cpu_map[0]); in pcpu_alloc_alloc_info()
2268 ai->groups[0].cpu_map = ptr; in pcpu_alloc_alloc_info()
2271 ai->groups[0].cpu_map[unit] = NR_CPUS; in pcpu_alloc_alloc_info()
2337 if (gi->cpu_map[unit] != NR_CPUS) in pcpu_dump_alloc_info()
2339 cpu_width, gi->cpu_map[unit]); in pcpu_dump_alloc_info()
2485 cpu = gi->cpu_map[ in pcpu_setup_first_chunk()
2679 unsigned int *cpu_map; pcpu_build_alloc_info() local
[all...]
/kernel/linux/linux-6.6/tools/perf/util/
H A Dmmap.c249 const struct perf_cpu_map *cpu_map = NULL; in build_node_mask() local
251 cpu_map = cpu_map__online(); in build_node_mask()
252 if (!cpu_map) in build_node_mask()
255 nr_cpus = perf_cpu_map__nr(cpu_map); in build_node_mask()
257 cpu = perf_cpu_map__cpu(cpu_map, idx); /* map c index to online cpu index */ in build_node_mask()
/kernel/linux/linux-5.10/tools/perf/tests/
H A Dcpumap.c19 struct perf_record_cpu_map *map_event = &event->cpu_map; in process_event_mask()
53 struct perf_record_cpu_map *map_event = &event->cpu_map; in process_event_cpus()
/kernel/linux/linux-6.6/tools/testing/selftests/bpf/prog_tests/
H A Dxdp_cpumap_attach.c35 map_fd = bpf_map__fd(skel->maps.cpu_map); in test_xdp_with_cpumap_helpers()
87 map_fd = bpf_map__fd(skel->maps.cpu_map); in test_xdp_with_cpumap_frags_helpers()
/kernel/linux/linux-5.10/tools/perf/arch/nds32/util/
H A Dheader.c15 struct cpu_map *cpus; in get_cpuid_str()
/kernel/linux/linux-6.6/tools/perf/tests/
H A Dcpumap.c19 struct perf_record_cpu_map *map_event = &event->cpu_map; in process_event_mask()
55 struct perf_record_cpu_map *map_event = &event->cpu_map; in process_event_cpus()
81 struct perf_record_cpu_map *map_event = &event->cpu_map; in process_event_range_cpus()
/kernel/linux/linux-6.6/mm/
H A Dpercpu.c2421 * @nr_units units. The returned ai's groups[0].cpu_map points to the
2422 * cpu_map array which is long enough for @nr_units and filled with
2423 * NR_CPUS. It's the caller's responsibility to initialize cpu_map
2439 __alignof__(ai->groups[0].cpu_map[0])); in pcpu_alloc_alloc_info()
2440 ai_size = base_size + nr_units * sizeof(ai->groups[0].cpu_map[0]); in pcpu_alloc_alloc_info()
2448 ai->groups[0].cpu_map = ptr; in pcpu_alloc_alloc_info()
2451 ai->groups[0].cpu_map[unit] = NR_CPUS; in pcpu_alloc_alloc_info()
2517 if (gi->cpu_map[unit] != NR_CPUS) in pcpu_dump_alloc_info()
2519 cpu_width, gi->cpu_map[unit]); in pcpu_dump_alloc_info()
2661 cpu = gi->cpu_map[ in pcpu_setup_first_chunk()
2852 unsigned int *cpu_map; pcpu_build_alloc_info() local
[all...]

Completed in 26 milliseconds

123