Home
last modified time | relevance | path

Searched refs:all_cpus (Results 1 - 20 of 20) sorted by relevance

/kernel/linux/linux-5.10/tools/bpf/bpftool/
H A Dmap_perf_ring.c60 bool all_cpus; member
75 int idx = ctx->all_cpus ? cpu : ctx->idx; in print_bpf_output()
130 .all_cpus = true, in do_event_pipe()
181 ctx.all_cpus = false; in do_event_pipe()
184 if (!ctx.all_cpus) { in do_event_pipe()
197 opts.cpu_cnt = ctx.all_cpus ? 0 : 1; in do_event_pipe()
/kernel/linux/linux-6.6/tools/bpf/bpftool/
H A Dmap_perf_ring.c52 bool all_cpus; member
67 int idx = ctx->all_cpus ? cpu : ctx->idx; in print_bpf_output()
122 .all_cpus = true, in do_event_pipe()
173 ctx.all_cpus = false; in do_event_pipe()
176 if (!ctx.all_cpus) { in do_event_pipe()
186 opts.cpu_cnt = ctx.all_cpus ? 0 : 1; in do_event_pipe()
/kernel/linux/linux-6.6/tools/lib/perf/
H A Devlist.c78 evlist->all_cpus = perf_cpu_map__merge(evlist->all_cpus, evsel->cpus); in __perf_evlist__propagate_maps()
154 perf_cpu_map__put(evlist->all_cpus); in perf_evlist__exit()
157 evlist->all_cpus = NULL; in perf_evlist__exit()
321 int nr_cpus = perf_cpu_map__nr(evlist->all_cpus); in perf_evlist__alloc_pollfd()
453 struct perf_cpu evlist_cpu = perf_cpu_map__cpu(evlist->all_cpus, cpu_idx); in mmap_per_evsel()
547 int nr_cpus = perf_cpu_map__nr(evlist->all_cpus); in mmap_per_thread()
589 int nr_cpus = perf_cpu_map__nr(evlist->all_cpus); in mmap_per_cpu()
621 nr_mmaps = perf_cpu_map__nr(evlist->all_cpus); in perf_evlist__nr_mmaps()
622 if (perf_cpu_map__empty(evlist->all_cpus)) { in perf_evlist__nr_mmaps()
[all...]
/kernel/linux/linux-6.6/arch/riscv/kernel/vdso/
H A Dhwprobe.c25 bool all_cpus = !cpu_count && !cpus; in __vdso_riscv_hwprobe() local
35 if ((flags != 0) || (!all_cpus && !avd->homogeneous_cpus)) in __vdso_riscv_hwprobe()
/kernel/linux/linux-5.10/lib/
H A Dtest_lockup.c39 static bool all_cpus; variable
40 module_param(all_cpus, bool, 0400);
41 MODULE_PARM_DESC(all_cpus, "trigger lockup at all cpus at once");
543 all_cpus ? "all_cpus " : "", in test_lockup_init()
560 if (all_cpus) { in test_lockup_init()
/kernel/linux/linux-6.6/lib/
H A Dtest_lockup.c39 static bool all_cpus; variable
40 module_param(all_cpus, bool, 0400);
41 MODULE_PARM_DESC(all_cpus, "trigger lockup at all cpus at once");
559 all_cpus ? "all_cpus " : "", in test_lockup_init()
576 if (all_cpus) { in test_lockup_init()
/kernel/linux/linux-5.10/tools/lib/perf/
H A Devlist.c58 evlist->all_cpus = perf_cpu_map__merge(evlist->all_cpus, evsel->cpus); in __perf_evlist__propagate_maps()
129 perf_cpu_map__put(evlist->all_cpus); in perf_evlist__exit()
132 evlist->all_cpus = NULL; in perf_evlist__exit()
170 if (!evlist->all_cpus && cpus) in perf_evlist__set_maps()
171 evlist->all_cpus = perf_cpu_map__get(cpus); in perf_evlist__set_maps()
/kernel/linux/linux-6.6/tools/perf/util/
H A Dbpf_counter_cgroup.c91 if (evsel__open_per_cpu(cgrp_switch, evlist->core.all_cpus, -1) < 0) { in bperf_load_program()
96 perf_cpu_map__for_each_cpu(cpu, i, evlist->core.all_cpus) { in bperf_load_program()
205 perf_cpu_map__for_each_cpu(cpu, idx, evlist->core.all_cpus) in bperf_cgrp__sync_counters()
H A Devlist.c407 .evlist_cpu_map_nr = perf_cpu_map__nr(evlist->core.all_cpus), in evlist__cpu_begin()
418 itr.cpu = perf_cpu_map__cpu(evlist->core.all_cpus, 0); in evlist__cpu_begin()
446 perf_cpu_map__cpu(evlist_cpu_itr->container->core.all_cpus, in evlist_cpu_iterator__next()
H A Dauxtrace.c187 mp->cpu = perf_cpu_map__cpu(evlist->core.all_cpus, idx); in auxtrace_mmap_params__set_idx()
654 struct perf_cpu evlist_cpu = perf_cpu_map__cpu(evlist->core.all_cpus, idx); in evlist__enable_event_idx()
/kernel/linux/linux-6.6/drivers/platform/x86/intel/ifs/
H A Druntest.c242 int all_cpus = cpumask_weight(smt_mask); in wait_for_sibling_cpu() local
245 while (atomic_read(t) < all_cpus) { in wait_for_sibling_cpu()
/kernel/linux/linux-5.10/arch/x86/kvm/
H A Dhyperv.c1514 bool all_cpus; in kvm_hv_flush_tlb() local
1533 all_cpus = (flush.flags & HV_FLUSH_ALL_PROCESSORS) || in kvm_hv_flush_tlb()
1546 all_cpus = flush_ex.hv_vp_set.format != in kvm_hv_flush_tlb()
1553 if (!sparse_banks_len && !all_cpus) in kvm_hv_flush_tlb()
1556 if (!all_cpus && in kvm_hv_flush_tlb()
1571 if (all_cpus) { in kvm_hv_flush_tlb()
1619 bool all_cpus; in kvm_hv_send_ipi() local
1635 all_cpus = false; in kvm_hv_send_ipi()
1653 all_cpus = send_ipi_ex.vp_set.format == HV_GENERIC_SET_ALL; in kvm_hv_send_ipi()
1655 if (all_cpus) in kvm_hv_send_ipi()
[all...]
/kernel/linux/linux-5.10/arch/x86/kernel/cpu/microcode/
H A Dcore.c433 int all_cpus = num_online_cpus(); in __wait_for_cpus() local
437 while (atomic_read(t) < all_cpus) { in __wait_for_cpus()
440 all_cpus - atomic_read(t)); in __wait_for_cpus()
/kernel/linux/linux-6.6/arch/x86/kernel/cpu/microcode/
H A Dcore.c364 int all_cpus = num_online_cpus(); in __wait_for_cpus() local
368 while (atomic_read(t) < all_cpus) { in __wait_for_cpus()
371 all_cpus - atomic_read(t)); in __wait_for_cpus()
/kernel/linux/linux-6.6/arch/x86/kvm/
H A Dhyperv.c1971 bool all_cpus; in kvm_hv_flush_tlb() local
2021 all_cpus = (flush.flags & HV_FLUSH_ALL_PROCESSORS) || in kvm_hv_flush_tlb()
2043 all_cpus = flush_ex.hv_vp_set.format != in kvm_hv_flush_tlb()
2049 if (!all_cpus) { in kvm_hv_flush_tlb()
2084 if (all_cpus && !is_guest_mode(vcpu)) { in kvm_hv_flush_tlb()
2126 if (!all_cpus && in kvm_hv_flush_tlb()
2176 bool all_cpus; in kvm_hv_send_ipi() local
2192 all_cpus = false; in kvm_hv_send_ipi()
2213 all_cpus = send_ipi_ex.vp_set.format == HV_GENERIC_SET_ALL; in kvm_hv_send_ipi()
2218 if (all_cpus) in kvm_hv_send_ipi()
[all...]
/kernel/linux/linux-5.10/tools/lib/perf/include/internal/
H A Devlist.h21 struct perf_cpu_map *all_cpus; member
/kernel/linux/linux-6.6/tools/lib/perf/include/internal/
H A Devlist.h28 struct perf_cpu_map *all_cpus; member
/kernel/linux/linux-5.10/tools/perf/util/
H A Devlist.h344 perf_cpu_map__for_each_cpu (cpu, index, (evlist)->core.all_cpus)
/kernel/linux/linux-6.6/tools/perf/
H A Dbuiltin-record.c998 struct perf_cpu_map *cpus = evlist->core.all_cpus; in record__thread_data_init_maps()
2048 err = perf_event__synthesize_cpu_map(&rec->tool, rec->evlist->core.all_cpus, in record__synthesize()
3892 struct perf_cpu_map *cpus = rec->evlist->core.all_cpus; in record__init_thread_masks()
/kernel/linux/linux-5.10/tools/perf/
H A Dbuiltin-stat.c391 ncpus = perf_cpu_map__nr(evsel_list->core.all_cpus); in read_affinity_counters()

Completed in 44 milliseconds