Home
last modified time | relevance | path

Searched refs:pmu (Results 1 - 25 of 568) sorted by relevance

12345678910>>...23

/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/
H A Dbase.c32 struct nvkm_pmu *pmu = device->pmu; in nvkm_pmu_fan_controlled() local
37 if (pmu && pmu->func->code.size) in nvkm_pmu_fan_controlled()
48 nvkm_pmu_pgob(struct nvkm_pmu *pmu, bool enable) in nvkm_pmu_pgob() argument
50 if (pmu && pmu->func->pgob) in nvkm_pmu_pgob()
51 pmu->func->pgob(pmu, enable); in nvkm_pmu_pgob()
57 struct nvkm_pmu *pmu in nvkm_pmu_recv() local
62 nvkm_pmu_send(struct nvkm_pmu *pmu, u32 reply[2], u32 process, u32 message, u32 data0, u32 data1) nvkm_pmu_send() argument
73 struct nvkm_pmu *pmu = nvkm_pmu(subdev); nvkm_pmu_intr() local
82 struct nvkm_pmu *pmu = nvkm_pmu(subdev); nvkm_pmu_fini() local
98 nvkm_pmu_reset(struct nvkm_pmu *pmu) nvkm_pmu_reset() argument
119 struct nvkm_pmu *pmu = nvkm_pmu(subdev); nvkm_pmu_preinit() local
127 struct nvkm_pmu *pmu = nvkm_pmu(subdev); nvkm_pmu_init() local
150 struct nvkm_pmu *pmu = nvkm_pmu(subdev); nvkm_pmu_dtor() local
169 nvkm_pmu_ctor(const struct nvkm_pmu_fwif *fwif, struct nvkm_device *device, int index, struct nvkm_pmu *pmu) nvkm_pmu_ctor() argument
205 struct nvkm_pmu *pmu; nvkm_pmu_new_() local
[all...]
H A Dgk20a.c51 gk20a_pmu_dvfs_target(struct gk20a_pmu *pmu, int *state) in gk20a_pmu_dvfs_target() argument
53 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_target()
59 gk20a_pmu_dvfs_get_cur_state(struct gk20a_pmu *pmu, int *state) in gk20a_pmu_dvfs_get_cur_state() argument
61 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_get_cur_state()
67 gk20a_pmu_dvfs_get_target_state(struct gk20a_pmu *pmu, in gk20a_pmu_dvfs_get_target_state() argument
70 struct gk20a_pmu_dvfs_data *data = pmu->data; in gk20a_pmu_dvfs_get_target_state()
71 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_get_target_state()
86 nvkm_trace(&pmu->base.subdev, "cur level = %d, new level = %d\n", in gk20a_pmu_dvfs_get_target_state()
95 gk20a_pmu_dvfs_get_dev_status(struct gk20a_pmu *pmu, in gk20a_pmu_dvfs_get_dev_status() argument
98 struct nvkm_falcon *falcon = &pmu in gk20a_pmu_dvfs_get_dev_status()
105 gk20a_pmu_dvfs_reset_dev_status(struct gk20a_pmu *pmu) gk20a_pmu_dvfs_reset_dev_status() argument
116 struct gk20a_pmu *pmu = gk20a_pmu_dvfs_work() local
158 gk20a_pmu_fini(struct nvkm_pmu *pmu) gk20a_pmu_fini() argument
167 gk20a_pmu_init(struct nvkm_pmu *pmu) gk20a_pmu_init() argument
215 struct gk20a_pmu *pmu; gk20a_pmu_new() local
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/
H A Dbase.c32 struct nvkm_pmu *pmu = device->pmu; in nvkm_pmu_fan_controlled() local
37 if (pmu && pmu->func->code.size) in nvkm_pmu_fan_controlled()
48 nvkm_pmu_pgob(struct nvkm_pmu *pmu, bool enable) in nvkm_pmu_pgob() argument
50 if (pmu && pmu->func->pgob) in nvkm_pmu_pgob()
51 pmu->func->pgob(pmu, enable); in nvkm_pmu_pgob()
57 struct nvkm_pmu *pmu in nvkm_pmu_recv() local
62 nvkm_pmu_send(struct nvkm_pmu *pmu, u32 reply[2], u32 process, u32 message, u32 data0, u32 data1) nvkm_pmu_send() argument
73 struct nvkm_pmu *pmu = nvkm_pmu(subdev); nvkm_pmu_intr() local
82 struct nvkm_pmu *pmu = nvkm_pmu(subdev); nvkm_pmu_fini() local
96 struct nvkm_pmu *pmu = nvkm_pmu(subdev); nvkm_pmu_init() local
107 struct nvkm_pmu *pmu = nvkm_pmu(subdev); nvkm_pmu_dtor() local
126 nvkm_pmu_ctor(const struct nvkm_pmu_fwif *fwif, struct nvkm_device *device, enum nvkm_subdev_type type, int inst, struct nvkm_pmu *pmu) nvkm_pmu_ctor() argument
163 struct nvkm_pmu *pmu; nvkm_pmu_new_() local
[all...]
H A Dgk20a.c51 gk20a_pmu_dvfs_target(struct gk20a_pmu *pmu, int *state) in gk20a_pmu_dvfs_target() argument
53 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_target()
59 gk20a_pmu_dvfs_get_cur_state(struct gk20a_pmu *pmu, int *state) in gk20a_pmu_dvfs_get_cur_state() argument
61 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_get_cur_state()
67 gk20a_pmu_dvfs_get_target_state(struct gk20a_pmu *pmu, in gk20a_pmu_dvfs_get_target_state() argument
70 struct gk20a_pmu_dvfs_data *data = pmu->data; in gk20a_pmu_dvfs_get_target_state()
71 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_get_target_state()
86 nvkm_trace(&pmu->base.subdev, "cur level = %d, new level = %d\n", in gk20a_pmu_dvfs_get_target_state()
95 gk20a_pmu_dvfs_get_dev_status(struct gk20a_pmu *pmu, in gk20a_pmu_dvfs_get_dev_status() argument
98 struct nvkm_falcon *falcon = &pmu in gk20a_pmu_dvfs_get_dev_status()
105 gk20a_pmu_dvfs_reset_dev_status(struct gk20a_pmu *pmu) gk20a_pmu_dvfs_reset_dev_status() argument
116 struct gk20a_pmu *pmu = gk20a_pmu_dvfs_work() local
158 gk20a_pmu_fini(struct nvkm_pmu *pmu) gk20a_pmu_fini() argument
167 gk20a_pmu_init(struct nvkm_pmu *pmu) gk20a_pmu_init() argument
215 struct gk20a_pmu *pmu; gk20a_pmu_new() local
[all...]
H A Dgt215.c30 gt215_pmu_send(struct nvkm_pmu *pmu, u32 reply[2], in gt215_pmu_send() argument
33 struct nvkm_subdev *subdev = &pmu->subdev; in gt215_pmu_send()
37 mutex_lock(&pmu->send.mutex); in gt215_pmu_send()
45 mutex_unlock(&pmu->send.mutex); in gt215_pmu_send()
54 pmu->recv.message = message; in gt215_pmu_send()
55 pmu->recv.process = process; in gt215_pmu_send()
65 pmu->send.base)); in gt215_pmu_send()
77 wait_event(pmu->recv.wait, (pmu->recv.process == 0)); in gt215_pmu_send()
78 reply[0] = pmu in gt215_pmu_send()
87 gt215_pmu_recv(struct nvkm_pmu *pmu) gt215_pmu_recv() argument
139 gt215_pmu_intr(struct nvkm_pmu *pmu) gt215_pmu_intr() argument
178 gt215_pmu_fini(struct nvkm_pmu *pmu) gt215_pmu_fini() argument
185 gt215_pmu_reset(struct nvkm_pmu *pmu) gt215_pmu_reset() argument
195 gt215_pmu_enabled(struct nvkm_pmu *pmu) gt215_pmu_enabled() argument
201 gt215_pmu_init(struct nvkm_pmu *pmu) gt215_pmu_init() argument
[all...]
/kernel/linux/linux-6.6/tools/perf/util/
H A Dpmus.c16 #include "pmu.h"
22 * must have pmu->is_core=1. If there are more than one PMU in
32 * must have pmu->is_core=0 but pmu->is_uncore could be 0 or 1.
66 struct perf_pmu *pmu, *tmp; in perf_pmus__destroy() local
68 list_for_each_entry_safe(pmu, tmp, &core_pmus, list) { in perf_pmus__destroy()
69 list_del(&pmu->list); in perf_pmus__destroy()
71 perf_pmu__delete(pmu); in perf_pmus__destroy()
73 list_for_each_entry_safe(pmu, tmp, &other_pmus, list) { in perf_pmus__destroy()
74 list_del(&pmu in perf_pmus__destroy()
84 struct perf_pmu *pmu; pmu_find() local
102 struct perf_pmu *pmu; perf_pmus__find() local
131 struct perf_pmu *pmu; perf_pmu__find2() local
217 struct perf_pmu *pmu; __perf_pmus__find_by_type() local
233 struct perf_pmu *pmu = __perf_pmus__find_by_type(type); perf_pmus__find_by_type() local
247 perf_pmus__scan(struct perf_pmu *pmu) perf_pmus__scan() argument
267 perf_pmus__scan_core(struct perf_pmu *pmu) perf_pmus__scan_core() argument
279 perf_pmus__scan_skip_duplicates(struct perf_pmu *pmu) perf_pmus__scan_skip_duplicates() argument
318 struct perf_pmu *pmu = NULL; perf_pmus__pmu_for_pmu_filter() local
346 const struct perf_pmu *pmu; global() member
436 struct perf_pmu *pmu; perf_pmus__print_pmu_events() local
503 struct perf_pmu *pmu = perf_pmus__find(pname); perf_pmus__have_event() local
513 struct perf_pmu *pmu = NULL; perf_pmus__num_core_pmus() local
523 struct perf_pmu *pmu = NULL; __perf_pmus__supports_extended_type() local
587 struct perf_pmu *pmu = evsel->pmu; evsel__find_pmu() local
[all...]
H A Dpmu.c20 #include "pmu.h"
22 #include <util/pmu-bison.h>
23 #include <util/pmu-flex.h>
40 * pmu-events.c, created by parsing the pmu-events json files.
115 static int pmu_aliases_parse(struct perf_pmu *pmu);
158 static void perf_pmu_format__load(struct perf_pmu *pmu, struct perf_pmu_format *format) in perf_pmu_format__load() argument
166 if (!perf_pmu__pathname_scnprintf(path, sizeof(path), pmu->name, "format")) in perf_pmu_format__load()
184 int perf_pmu__format_parse(struct perf_pmu *pmu, int dirfd, bool eager_load) in perf_pmu__format_parse() argument
201 format = perf_pmu__new_format(&pmu in perf_pmu__format_parse()
234 pmu_format(struct perf_pmu *pmu, int dirfd, const char *name) pmu_format() argument
286 perf_pmu__parse_scale(struct perf_pmu *pmu, struct perf_pmu_alias *alias) perf_pmu__parse_scale() argument
322 perf_pmu__parse_unit(struct perf_pmu *pmu, struct perf_pmu_alias *alias) perf_pmu__parse_unit() argument
358 perf_pmu__parse_per_pkg(struct perf_pmu *pmu, struct perf_pmu_alias *alias) perf_pmu__parse_per_pkg() argument
379 perf_pmu__parse_snapshot(struct perf_pmu *pmu, struct perf_pmu_alias *alias) perf_pmu__parse_snapshot() argument
411 perf_pmu__del_aliases(struct perf_pmu *pmu) perf_pmu__del_aliases() argument
421 perf_pmu__find_alias(struct perf_pmu *pmu, const char *name, bool load) perf_pmu__find_alias() argument
455 read_alias_info(struct perf_pmu *pmu, struct perf_pmu_alias *alias) read_alias_info() argument
470 struct perf_pmu *pmu; global() member
500 perf_pmu__new_alias(struct perf_pmu *pmu, const char *name, const char *desc, const char *val, FILE *val_fd, const struct pmu_event *pe) perf_pmu__new_alias() argument
598 pmu_aliases_parse(struct perf_pmu *pmu) pmu_aliases_parse() argument
697 struct perf_pmu pmu = {.name = pmu_name}; pmu_cpumask() local
758 perf_pmu__getcpuid(struct perf_pmu *pmu) perf_pmu__getcpuid() argument
881 struct perf_pmu *pmu = vdata; pmu_add_cpu_aliases_map_callback() local
891 pmu_add_cpu_aliases_table(struct perf_pmu *pmu, const struct pmu_events_table *table) pmu_add_cpu_aliases_table() argument
896 pmu_add_cpu_aliases(struct perf_pmu *pmu) pmu_add_cpu_aliases() argument
912 struct perf_pmu *pmu = vdata; pmu_add_sys_aliases_iter_fn() local
930 pmu_add_sys_aliases(struct perf_pmu *pmu) pmu_add_sys_aliases() argument
956 pmu_max_precise(int dirfd, struct perf_pmu *pmu) pmu_max_precise() argument
966 struct perf_pmu *pmu; perf_pmu__lookup() local
1029 struct perf_pmu *pmu = zalloc(sizeof(*pmu)); perf_pmu__create_placeholder_core_pmu() local
1051 perf_pmu__warn_invalid_formats(struct perf_pmu *pmu) perf_pmu__warn_invalid_formats() argument
1077 struct perf_pmu *pmu = evsel__find_pmu(evsel); evsel__is_aux_event() local
1091 evsel__set_config_if_unset(struct perf_pmu *pmu, struct evsel *evsel, const char *config_name, u64 val) evsel__set_config_if_unset() argument
1123 perf_pmu__format_bits(struct perf_pmu *pmu, const char *name) perf_pmu__format_bits() argument
1138 perf_pmu__format_type(struct perf_pmu *pmu, const char *name) perf_pmu__format_type() argument
1236 pmu_config_term(struct perf_pmu *pmu, struct perf_event_attr *attr, struct parse_events_term *term, struct list_head *head_terms, bool zero, struct parse_events_error *err) pmu_config_term() argument
1359 perf_pmu__config_terms(struct perf_pmu *pmu, struct perf_event_attr *attr, struct list_head *head_terms, bool zero, struct parse_events_error *err) perf_pmu__config_terms() argument
1379 perf_pmu__config(struct perf_pmu *pmu, struct perf_event_attr *attr, struct list_head *head_terms, struct parse_events_error *err) perf_pmu__config() argument
1388 pmu_find_alias(struct perf_pmu *pmu, struct parse_events_term *term) pmu_find_alias() argument
1427 check_info_data(struct perf_pmu *pmu, struct perf_pmu_alias *alias, struct perf_pmu_info *info, struct parse_events_error *err, int column) check_info_data() argument
1474 perf_pmu__check_alias(struct perf_pmu *pmu, struct list_head *head_terms, struct perf_pmu_info *info, struct parse_events_error *err) perf_pmu__check_alias() argument
1544 perf_pmu__find_event(struct perf_pmu *pmu, const char *event, void *state, pmu_event_callback cb) perf_pmu__find_event() argument
1568 perf_pmu__has_format(const struct perf_pmu *pmu, const char *name) perf_pmu__has_format() argument
1584 perf_pmu__supports_legacy_cache(const struct perf_pmu *pmu) perf_pmu__supports_legacy_cache() argument
1589 perf_pmu__auto_merge_stats(const struct perf_pmu *pmu) perf_pmu__auto_merge_stats() argument
1594 perf_pmu__have_event(struct perf_pmu *pmu, const char *name) perf_pmu__have_event() argument
1603 perf_pmu__num_events(struct perf_pmu *pmu) perf_pmu__num_events() argument
1627 format_alias(char *buf, int len, const struct perf_pmu *pmu, const struct perf_pmu_alias *alias, bool skip_duplicate_pmus) format_alias() argument
1656 perf_pmu__for_each_event(struct perf_pmu *pmu, bool skip_duplicate_pmus, void *state, pmu_event_callback cb) perf_pmu__for_each_event() argument
1724 pmu__name_match(const struct perf_pmu *pmu, const char *pmu_name) pmu__name_match() argument
1735 perf_pmu__is_software(const struct perf_pmu *pmu) perf_pmu__is_software() argument
1761 perf_pmu__open_file(struct perf_pmu *pmu, const char *name) perf_pmu__open_file() argument
1772 perf_pmu__open_file_at(struct perf_pmu *pmu, int dirfd, const char *name) perf_pmu__open_file_at() argument
1783 perf_pmu__scan_file(struct perf_pmu *pmu, const char *name, const char *fmt, ...) perf_pmu__scan_file() argument
1800 perf_pmu__scan_file_at(struct perf_pmu *pmu, int dirfd, const char *name, const char *fmt, ...) perf_pmu__scan_file_at() argument
1817 perf_pmu__file_exists(struct perf_pmu *pmu, const char *name) perf_pmu__file_exists() argument
1851 perf_pmu__del_caps(struct perf_pmu *pmu) perf_pmu__del_caps() argument
1868 perf_pmu__caps_parse(struct perf_pmu *pmu) perf_pmu__caps_parse() argument
1929 perf_pmu__compute_config_masks(struct perf_pmu *pmu) perf_pmu__compute_config_masks() argument
1952 perf_pmu__warn_invalid_config(struct perf_pmu *pmu, __u64 config, const char *name, int config_num, const char *config_name) perf_pmu__warn_invalid_config() argument
2047 perf_pmu__delete(struct perf_pmu *pmu) perf_pmu__delete() argument
2064 struct perf_pmu *pmu = NULL; pmu__find_core_pmu() local
[all...]
H A Dpmu.h12 #include "pmu-events/pmu-events.h"
114 * from json events in pmu-events.c.
118 * @events_table: The events table for json events in pmu-events.c.
144 /** @list: Element on pmus list in pmu.c. */
177 const struct perf_pmu *pmu; member
192 void pmu_add_sys_aliases(struct perf_pmu *pmu);
193 int perf_pmu__config(struct perf_pmu *pmu, struct perf_event_attr *attr,
196 int perf_pmu__config_terms(struct perf_pmu *pmu,
200 __u64 perf_pmu__format_bits(struct perf_pmu *pmu, cons
[all...]
/kernel/linux/linux-5.10/arch/x86/kvm/vmx/
H A Dpmu_intel.c19 #include "pmu.h"
38 static void reprogram_fixed_counters(struct kvm_pmu *pmu, u64 data) in reprogram_fixed_counters() argument
42 for (i = 0; i < pmu->nr_arch_fixed_counters; i++) { in reprogram_fixed_counters()
44 u8 old_ctrl = fixed_ctrl_field(pmu->fixed_ctr_ctrl, i); in reprogram_fixed_counters()
47 pmc = get_fixed_pmc(pmu, MSR_CORE_PERF_FIXED_CTR0 + i); in reprogram_fixed_counters()
52 __set_bit(INTEL_PMC_IDX_FIXED + i, pmu->pmc_in_use); in reprogram_fixed_counters()
56 pmu->fixed_ctr_ctrl = data; in reprogram_fixed_counters()
60 static void global_ctrl_changed(struct kvm_pmu *pmu, u64 data) in global_ctrl_changed() argument
63 u64 diff = pmu->global_ctrl ^ data; in global_ctrl_changed()
65 pmu in global_ctrl_changed()
73 struct kvm_pmu *pmu = pmc_to_pmu(pmc); intel_pmc_perf_hw_id() local
105 struct kvm_pmu *pmu = pmc_to_pmu(pmc); intel_pmc_is_enabled() local
113 intel_pmc_idx_to_pmc(struct kvm_pmu *pmu, int pmc_idx) intel_pmc_idx_to_pmc() argument
128 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_is_valid_rdpmc_ecx() local
140 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_rdpmc_ecx_to_pmc() local
172 get_fw_gp_pmc(struct kvm_pmu *pmu, u32 msr) get_fw_gp_pmc() argument
182 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_is_valid_msr() local
204 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_msr_idx_to_pmc() local
216 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_get_msr() local
256 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_set_msr() local
327 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_refresh() local
406 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_init() local
427 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_reset() local
[all...]
/kernel/linux/linux-6.6/arch/x86/kvm/vmx/
H A Dpmu_intel.c21 #include "pmu.h"
71 static void reprogram_fixed_counters(struct kvm_pmu *pmu, u64 data) in reprogram_fixed_counters() argument
74 u64 old_fixed_ctr_ctrl = pmu->fixed_ctr_ctrl; in reprogram_fixed_counters()
77 pmu->fixed_ctr_ctrl = data; in reprogram_fixed_counters()
78 for (i = 0; i < pmu->nr_arch_fixed_counters; i++) { in reprogram_fixed_counters()
85 pmc = get_fixed_pmc(pmu, MSR_CORE_PERF_FIXED_CTR0 + i); in reprogram_fixed_counters()
87 __set_bit(INTEL_PMC_IDX_FIXED + i, pmu->pmc_in_use); in reprogram_fixed_counters()
92 static struct kvm_pmc *intel_pmc_idx_to_pmc(struct kvm_pmu *pmu, int pmc_idx) in intel_pmc_idx_to_pmc() argument
95 return get_gp_pmc(pmu, MSR_P6_EVNTSEL0 + pmc_idx, in intel_pmc_idx_to_pmc()
100 return get_fixed_pmc(pmu, id in intel_pmc_idx_to_pmc()
106 struct kvm_pmu *pmu = pmc_to_pmu(pmc); intel_hw_event_available() local
130 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_is_valid_rdpmc_ecx() local
142 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_rdpmc_ecx_to_pmc() local
174 get_fw_gp_pmc(struct kvm_pmu *pmu, u32 msr) get_fw_gp_pmc() argument
202 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_is_valid_msr() local
233 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_msr_idx_to_pmc() local
257 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_create_guest_lbr_event() local
350 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_get_msr() local
393 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_set_msr() local
469 setup_fixed_pmc_eventsel(struct kvm_pmu *pmu) setup_fixed_pmc_eventsel() argument
487 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_refresh() local
611 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_init() local
717 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); vmx_passthrough_lbr_msrs() local
748 intel_pmu_cross_mapped_check(struct kvm_pmu *pmu) intel_pmu_cross_mapped_check() argument
[all...]
/kernel/linux/linux-6.6/drivers/perf/
H A Dfsl_imx8_ddr_perf.c43 #define to_ddr_pmu(p) container_of(p, struct ddr_pmu, pmu)
86 { .compatible = "fsl,imx8-ddr-pmu", .data = &imx8_devtype_data},
87 { .compatible = "fsl,imx8m-ddr-pmu", .data = &imx8m_devtype_data},
88 { .compatible = "fsl,imx8mq-ddr-pmu", .data = &imx8mq_devtype_data},
89 { .compatible = "fsl,imx8mm-ddr-pmu", .data = &imx8mm_devtype_data},
90 { .compatible = "fsl,imx8mn-ddr-pmu", .data = &imx8mn_devtype_data},
91 { .compatible = "fsl,imx8mp-ddr-pmu", .data = &imx8mp_devtype_data},
97 struct pmu pmu; member
114 struct ddr_pmu *pmu in ddr_perf_identifier_show() local
124 struct ddr_pmu *pmu = dev_get_drvdata(dev); ddr_perf_identifier_attr_visible() local
150 ddr_perf_filter_cap_get(struct ddr_pmu *pmu, int cap) ddr_perf_filter_cap_get() argument
171 struct ddr_pmu *pmu = dev_get_drvdata(dev); ddr_perf_filter_cap_show() local
201 struct ddr_pmu *pmu = dev_get_drvdata(dev); ddr_perf_cpumask_show() local
320 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_is_enhanced_filtered() local
327 ddr_perf_alloc_counter(struct ddr_pmu *pmu, int event) ddr_perf_alloc_counter() argument
351 ddr_perf_free_counter(struct ddr_pmu *pmu, int counter) ddr_perf_free_counter() argument
356 ddr_perf_read_counter(struct ddr_pmu *pmu, int counter) ddr_perf_read_counter() argument
373 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_init() local
418 ddr_perf_counter_enable(struct ddr_pmu *pmu, int config, int counter, bool enable) ddr_perf_counter_enable() argument
453 ddr_perf_counter_overflow(struct ddr_pmu *pmu, int counter) ddr_perf_counter_overflow() argument
462 ddr_perf_counter_clear(struct ddr_pmu *pmu, int counter) ddr_perf_counter_clear() argument
477 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_update() local
511 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_start() local
528 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_add() local
569 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_stop() local
585 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_del() local
595 ddr_perf_pmu_enable(struct pmu *pmu) ddr_perf_pmu_enable() argument
599 ddr_perf_pmu_disable(struct pmu *pmu) ddr_perf_pmu_disable() argument
603 ddr_perf_init(struct ddr_pmu *pmu, void __iomem *base, struct device *dev) ddr_perf_init() argument
632 struct ddr_pmu *pmu = (struct ddr_pmu *) p; ddr_perf_irq_handler() local
672 struct ddr_pmu *pmu = hlist_entry_safe(node, struct ddr_pmu, node); ddr_perf_offline_cpu() local
692 struct ddr_pmu *pmu; ddr_perf_probe() local
786 struct ddr_pmu *pmu = platform_get_drvdata(pdev); ddr_perf_remove() local
[all...]
H A Dfsl_imx9_ddr_perf.c45 #define to_ddr_pmu(p) container_of(p, struct ddr_pmu, pmu)
57 struct pmu pmu; member
75 {.compatible = "fsl,imx93-ddr-pmu", .data = &imx93_devtype_data},
84 struct ddr_pmu *pmu = dev_get_drvdata(dev); in ddr_perf_identifier_show() local
86 return sysfs_emit(page, "%s\n", pmu->devtype_data->identifier); in ddr_perf_identifier_show()
104 struct ddr_pmu *pmu = dev_get_drvdata(dev); in ddr_perf_cpumask_show() local
106 return cpumap_print_to_pagebuf(true, buf, cpumask_of(pmu->cpu)); in ddr_perf_cpumask_show()
271 static void ddr_perf_clear_counter(struct ddr_pmu *pmu, int counter) in ddr_perf_clear_counter() argument
274 writel(0, pmu in ddr_perf_clear_counter()
281 ddr_perf_read_counter(struct ddr_pmu *pmu, int counter) ddr_perf_read_counter() argument
304 ddr_perf_counter_global_config(struct ddr_pmu *pmu, bool enable) ddr_perf_counter_global_config() argument
338 ddr_perf_counter_local_config(struct ddr_pmu *pmu, int config, int counter, bool enable) ddr_perf_counter_local_config() argument
364 ddr_perf_monitor_config(struct ddr_pmu *pmu, int cfg, int cfg1, int cfg2) ddr_perf_monitor_config() argument
401 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_update() local
415 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_init() local
453 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_start() local
465 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_add() local
490 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_stop() local
502 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_del() local
511 ddr_perf_pmu_enable(struct pmu *pmu) ddr_perf_pmu_enable() argument
518 ddr_perf_pmu_disable(struct pmu *pmu) ddr_perf_pmu_disable() argument
525 ddr_perf_init(struct ddr_pmu *pmu, void __iomem *base, struct device *dev) ddr_perf_init() argument
550 struct ddr_pmu *pmu = (struct ddr_pmu *)p; ddr_perf_irq_handler() local
581 struct ddr_pmu *pmu = hlist_entry_safe(node, struct ddr_pmu, node); ddr_perf_offline_cpu() local
601 struct ddr_pmu *pmu; ddr_perf_probe() local
684 struct ddr_pmu *pmu = platform_get_drvdata(pdev); ddr_perf_remove() local
[all...]
H A Dmarvell_cn10k_ddr_pmu.c125 struct pmu pmu; member
135 #define to_cn10k_ddr_pmu(p) container_of(p, struct cn10k_ddr_pmu, pmu)
233 struct cn10k_ddr_pmu *pmu = dev_get_drvdata(dev); in cn10k_ddr_perf_cpumask_show() local
235 return cpumap_print_to_pagebuf(true, buf, cpumask_of(pmu->cpu)); in cn10k_ddr_perf_cpumask_show()
289 static int cn10k_ddr_perf_alloc_counter(struct cn10k_ddr_pmu *pmu, in cn10k_ddr_perf_alloc_counter() argument
297 pmu->events[DDRC_PERF_READ_COUNTER_IDX] = event; in cn10k_ddr_perf_alloc_counter()
303 pmu->events[DDRC_PERF_WRITE_COUNTER_IDX] = event; in cn10k_ddr_perf_alloc_counter()
309 if (pmu->events[i] == NULL) { in cn10k_ddr_perf_alloc_counter()
310 pmu in cn10k_ddr_perf_alloc_counter()
318 cn10k_ddr_perf_free_counter(struct cn10k_ddr_pmu *pmu, int counter) cn10k_ddr_perf_free_counter() argument
325 struct cn10k_ddr_pmu *pmu = to_cn10k_ddr_pmu(event->pmu); cn10k_ddr_perf_event_init() local
354 cn10k_ddr_perf_counter_enable(struct cn10k_ddr_pmu *pmu, int counter, bool enable) cn10k_ddr_perf_counter_enable() argument
392 cn10k_ddr_perf_read_counter(struct cn10k_ddr_pmu *pmu, int counter) cn10k_ddr_perf_read_counter() argument
408 struct cn10k_ddr_pmu *pmu = to_cn10k_ddr_pmu(event->pmu); cn10k_ddr_perf_event_update() local
424 struct cn10k_ddr_pmu *pmu = to_cn10k_ddr_pmu(event->pmu); cn10k_ddr_perf_event_start() local
437 struct cn10k_ddr_pmu *pmu = to_cn10k_ddr_pmu(event->pmu); cn10k_ddr_perf_event_add() local
483 struct cn10k_ddr_pmu *pmu = to_cn10k_ddr_pmu(event->pmu); cn10k_ddr_perf_event_stop() local
497 struct cn10k_ddr_pmu *pmu = to_cn10k_ddr_pmu(event->pmu); cn10k_ddr_perf_event_del() local
512 cn10k_ddr_perf_pmu_enable(struct pmu *pmu) cn10k_ddr_perf_pmu_enable() argument
520 cn10k_ddr_perf_pmu_disable(struct pmu *pmu) cn10k_ddr_perf_pmu_disable() argument
528 cn10k_ddr_perf_event_update_all(struct cn10k_ddr_pmu *pmu) cn10k_ddr_perf_event_update_all() argument
550 cn10k_ddr_pmu_overflow_handler(struct cn10k_ddr_pmu *pmu) cn10k_ddr_pmu_overflow_handler() argument
602 struct cn10k_ddr_pmu *pmu = container_of(hrtimer, struct cn10k_ddr_pmu, cn10k_ddr_pmu_timer_handler() local
616 struct cn10k_ddr_pmu *pmu = hlist_entry_safe(node, struct cn10k_ddr_pmu, cn10k_ddr_pmu_offline_cpu() local
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/
H A Di915_pmu.c142 static bool pmu_needs_timer(struct i915_pmu *pmu) in pmu_needs_timer() argument
144 struct drm_i915_private *i915 = container_of(pmu, typeof(*i915), pmu); in pmu_needs_timer()
152 enable = pmu->enable; in pmu_needs_timer()
194 static u64 read_sample(struct i915_pmu *pmu, unsigned int gt_id, int sample) in read_sample() argument
196 return pmu->sample[gt_id][sample].cur; in read_sample()
200 store_sample(struct i915_pmu *pmu, unsigned int gt_id, int sample, u64 val) in store_sample() argument
202 pmu->sample[gt_id][sample].cur = val; in store_sample()
206 add_sample_mult(struct i915_pmu *pmu, unsigned int gt_id, int sample, u32 val, u32 mul) in add_sample_mult() argument
208 pmu in add_sample_mult()
215 struct i915_pmu *pmu = &i915->pmu; get_rc6() local
252 init_rc6(struct i915_pmu *pmu) init_rc6() argument
274 struct i915_pmu *pmu = &gt->i915->pmu; park_rc6() local
280 __i915_pmu_maybe_start_timer(struct i915_pmu *pmu) __i915_pmu_maybe_start_timer() argument
293 struct i915_pmu *pmu = &gt->i915->pmu; i915_pmu_gt_parked() local
315 struct i915_pmu *pmu = &gt->i915->pmu; i915_pmu_gt_unparked() local
351 struct intel_engine_pmu *pmu = &engine->pmu; engine_sample() local
418 frequency_sampling_enabled(struct i915_pmu *pmu, unsigned int gt) frequency_sampling_enabled() argument
430 struct i915_pmu *pmu = &i915->pmu; frequency_sample() local
473 struct i915_pmu *pmu = &i915->pmu; i915_sample() local
591 struct i915_pmu *pmu = &i915->pmu; i915_pmu_event_init() local
633 struct i915_pmu *pmu = &i915->pmu; __i915_pmu_event_read() local
692 struct i915_pmu *pmu = &i915->pmu; i915_pmu_event_read() local
714 struct i915_pmu *pmu = &i915->pmu; i915_pmu_enable() local
778 struct i915_pmu *pmu = &i915->pmu; i915_pmu_disable() local
824 struct i915_pmu *pmu = &i915->pmu; i915_pmu_event_start() local
837 struct i915_pmu *pmu = &i915->pmu; i915_pmu_event_stop() local
854 struct i915_pmu *pmu = &i915->pmu; i915_pmu_event_add() local
984 create_event_attributes(struct i915_pmu *pmu) create_event_attributes() argument
1136 free_event_attributes(struct i915_pmu *pmu) free_event_attributes() argument
1154 struct i915_pmu *pmu = hlist_entry_safe(node, typeof(*pmu), cpuhp.node); i915_pmu_cpu_online() local
1167 struct i915_pmu *pmu = hlist_entry_safe(node, typeof(*pmu), cpuhp.node); i915_pmu_cpu_offline() local
1222 i915_pmu_register_cpuhp_state(struct i915_pmu *pmu) i915_pmu_register_cpuhp_state() argument
1230 i915_pmu_unregister_cpuhp_state(struct i915_pmu *pmu) i915_pmu_unregister_cpuhp_state() argument
1248 struct i915_pmu *pmu = &i915->pmu; i915_pmu_register() local
1329 struct i915_pmu *pmu = &i915->pmu; i915_pmu_unregister() local
[all...]
/kernel/linux/linux-5.10/drivers/perf/
H A Dfsl_imx8_ddr_perf.c40 #define to_ddr_pmu(p) container_of(p, struct ddr_pmu, pmu)
66 { .compatible = "fsl,imx8-ddr-pmu", .data = &imx8_devtype_data},
67 { .compatible = "fsl,imx8m-ddr-pmu", .data = &imx8m_devtype_data},
68 { .compatible = "fsl,imx8mp-ddr-pmu", .data = &imx8mp_devtype_data},
74 struct pmu pmu; member
94 static u32 ddr_perf_filter_cap_get(struct ddr_pmu *pmu, int cap) in ddr_perf_filter_cap_get() argument
96 u32 quirks = pmu->devtype_data->quirks; in ddr_perf_filter_cap_get()
115 struct ddr_pmu *pmu = dev_get_drvdata(dev); in ddr_perf_filter_cap_show() local
121 ddr_perf_filter_cap_get(pmu, ca in ddr_perf_filter_cap_show()
146 struct ddr_pmu *pmu = dev_get_drvdata(dev); ddr_perf_cpumask_show() local
267 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_is_enhanced_filtered() local
274 ddr_perf_alloc_counter(struct ddr_pmu *pmu, int event) ddr_perf_alloc_counter() argument
298 ddr_perf_free_counter(struct ddr_pmu *pmu, int counter) ddr_perf_free_counter() argument
303 ddr_perf_read_counter(struct ddr_pmu *pmu, int counter) ddr_perf_read_counter() argument
320 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_init() local
368 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_update() local
384 ddr_perf_counter_enable(struct ddr_pmu *pmu, int config, int counter, bool enable) ddr_perf_counter_enable() argument
410 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_start() local
427 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_add() local
469 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_stop() local
485 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_del() local
496 ddr_perf_pmu_enable(struct pmu *pmu) ddr_perf_pmu_enable() argument
500 ddr_perf_pmu_disable(struct pmu *pmu) ddr_perf_pmu_disable() argument
504 ddr_perf_init(struct ddr_pmu *pmu, void __iomem *base, struct device *dev) ddr_perf_init() argument
533 struct ddr_pmu *pmu = (struct ddr_pmu *) p; ddr_perf_irq_handler() local
576 struct ddr_pmu *pmu = hlist_entry_safe(node, struct ddr_pmu, node); ddr_perf_offline_cpu() local
596 struct ddr_pmu *pmu; ddr_perf_probe() local
690 struct ddr_pmu *pmu = platform_get_drvdata(pdev); ddr_perf_remove() local
[all...]
/kernel/linux/linux-6.6/drivers/perf/amlogic/
H A Dmeson_ddr_pmu_core.c21 struct pmu pmu; member
35 #define to_ddr_pmu(p) container_of(p, struct ddr_pmu, pmu)
38 static void dmc_pmu_enable(struct ddr_pmu *pmu) in dmc_pmu_enable() argument
40 if (!pmu->pmu_enabled) in dmc_pmu_enable()
41 pmu->info.hw_info->enable(&pmu->info); in dmc_pmu_enable()
43 pmu->pmu_enabled = true; in dmc_pmu_enable()
46 static void dmc_pmu_disable(struct ddr_pmu *pmu) in dmc_pmu_disable() argument
48 if (pmu in dmc_pmu_disable()
56 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); meson_ddr_set_axi_filter() local
87 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); meson_ddr_perf_event_update() local
120 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); meson_ddr_perf_event_init() local
145 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); meson_ddr_perf_event_start() local
175 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); meson_ddr_perf_event_stop() local
192 struct ddr_pmu *pmu = dev_get_drvdata(dev); meson_ddr_perf_cpumask_show() local
294 struct pmu *pmu = dev_get_drvdata(kobj_to_dev(kobj)); meson_ddr_perf_format_attr_visible() local
322 struct ddr_pmu *pmu = dev_get_drvdata(dev); meson_ddr_perf_identifier_show() local
350 struct ddr_pmu *pmu; dmc_irq_handler() local
395 struct ddr_pmu *pmu = hlist_entry_safe(node, struct ddr_pmu, node); ddr_perf_offline_cpu() local
413 fill_event_attr(struct ddr_pmu *pmu) fill_event_attr() argument
486 struct ddr_pmu *pmu; meson_ddr_pmu_create() local
556 struct ddr_pmu *pmu = platform_get_drvdata(pdev); meson_ddr_pmu_remove() local
[all...]
/kernel/linux/linux-6.6/drivers/pmdomain/starfive/
H A Djh71xx-pmu.c16 #include <dt-bindings/power/starfive,jh7110-pmu.h>
34 /* pmu int status */
66 spinlock_t lock; /* protects pmu reg */
71 struct jh71xx_pmu *pmu; member
77 struct jh71xx_pmu *pmu = pmd->pmu; in jh71xx_pmu_get_state() local
82 *is_on = readl(pmu->base + JH71XX_PMU_CURR_POWER_MODE) & mask; in jh71xx_pmu_get_state()
89 struct jh71xx_pmu *pmu = pmd->pmu; in jh71xx_pmu_set_state() local
100 dev_dbg(pmu in jh71xx_pmu_set_state()
187 jh71xx_pmu_int_enable(struct jh71xx_pmu *pmu, u32 mask, bool enable) jh71xx_pmu_int_enable() argument
206 struct jh71xx_pmu *pmu = data; jh71xx_pmu_interrupt() local
229 jh71xx_pmu_init_domain(struct jh71xx_pmu *pmu, int index) jh71xx_pmu_init_domain() argument
266 struct jh71xx_pmu *pmu; jh71xx_pmu_probe() local
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/
H A Di915_pmu.c82 static bool pmu_needs_timer(struct i915_pmu *pmu, bool gpu_active) in pmu_needs_timer() argument
84 struct drm_i915_private *i915 = container_of(pmu, typeof(*i915), pmu); in pmu_needs_timer()
92 enable = pmu->enable; in pmu_needs_timer()
150 struct i915_pmu *pmu = &i915->pmu; in get_rc6() local
161 spin_lock_irqsave(&pmu->lock, flags); in get_rc6()
164 pmu->sample[__I915_SAMPLE_RC6].cur = val; in get_rc6()
173 val = ktime_since(pmu->sleep_last); in get_rc6()
174 val += pmu in get_rc6()
187 init_rc6(struct i915_pmu *pmu) init_rc6() argument
202 struct i915_pmu *pmu = &i915->pmu; park_rc6() local
215 init_rc6(struct i915_pmu *pmu) init_rc6() argument
220 __i915_pmu_maybe_start_timer(struct i915_pmu *pmu) __i915_pmu_maybe_start_timer() argument
233 struct i915_pmu *pmu = &i915->pmu; i915_pmu_gt_parked() local
253 struct i915_pmu *pmu = &i915->pmu; i915_pmu_gt_unparked() local
286 struct intel_engine_pmu *pmu = &engine->pmu; engine_sample() local
355 frequency_sampling_enabled(struct i915_pmu *pmu) frequency_sampling_enabled() argument
367 struct i915_pmu *pmu = &i915->pmu; frequency_sample() local
412 struct i915_pmu *pmu = &i915->pmu; i915_sample() local
561 struct i915_pmu *pmu = &i915->pmu; __i915_pmu_event_read() local
627 struct i915_pmu *pmu = &i915->pmu; i915_pmu_enable() local
687 struct i915_pmu *pmu = &i915->pmu; i915_pmu_disable() local
860 create_event_attributes(struct i915_pmu *pmu) create_event_attributes() argument
992 free_event_attributes(struct i915_pmu *pmu) free_event_attributes() argument
1010 struct i915_pmu *pmu = hlist_entry_safe(node, typeof(*pmu), cpuhp.node); i915_pmu_cpu_online() local
1023 struct i915_pmu *pmu = hlist_entry_safe(node, typeof(*pmu), cpuhp.node); i915_pmu_cpu_offline() local
1040 i915_pmu_register_cpuhp_state(struct i915_pmu *pmu) i915_pmu_register_cpuhp_state() argument
1063 i915_pmu_unregister_cpuhp_state(struct i915_pmu *pmu) i915_pmu_unregister_cpuhp_state() argument
1086 struct i915_pmu *pmu = &i915->pmu; i915_pmu_register() local
1167 struct i915_pmu *pmu = &i915->pmu; i915_pmu_unregister() local
[all...]
/kernel/linux/linux-6.6/arch/x86/kvm/svm/
H A Dpmu.c20 #include "pmu.h"
28 static struct kvm_pmc *amd_pmc_idx_to_pmc(struct kvm_pmu *pmu, int pmc_idx) in amd_pmc_idx_to_pmc() argument
30 unsigned int num_counters = pmu->nr_arch_gp_counters; in amd_pmc_idx_to_pmc()
35 return &pmu->gp_counters[array_index_nospec(pmc_idx, num_counters)]; in amd_pmc_idx_to_pmc()
38 static inline struct kvm_pmc *get_gp_pmc_amd(struct kvm_pmu *pmu, u32 msr, in get_gp_pmc_amd() argument
41 struct kvm_vcpu *vcpu = pmu_to_vcpu(pmu); in get_gp_pmc_amd()
73 return amd_pmc_idx_to_pmc(pmu, idx); in get_gp_pmc_amd()
83 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_is_valid_rdpmc_ecx() local
87 return idx < pmu->nr_arch_gp_counters; in amd_is_valid_rdpmc_ecx()
99 struct kvm_pmu *pmu in amd_msr_idx_to_pmc() local
110 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); amd_is_valid_msr() local
133 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); amd_pmu_get_msr() local
155 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); amd_pmu_set_msr() local
183 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); amd_pmu_refresh() local
222 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); amd_pmu_init() local
[all...]
/kernel/linux/linux-5.10/drivers/soc/dove/
H A Dpmu.c17 #include <linux/soc/dove/pmu.h>
50 struct pmu_data *pmu = rcdev_to_pmu(rc); in pmu_reset_reset() local
54 spin_lock_irqsave(&pmu->lock, flags); in pmu_reset_reset()
55 val = readl_relaxed(pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
56 writel_relaxed(val & ~BIT(id), pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
57 writel_relaxed(val | BIT(id), pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
58 spin_unlock_irqrestore(&pmu->lock, flags); in pmu_reset_reset()
65 struct pmu_data *pmu = rcdev_to_pmu(rc); in pmu_reset_assert() local
69 spin_lock_irqsave(&pmu->lock, flags); in pmu_reset_assert()
70 val &= readl_relaxed(pmu in pmu_reset_assert()
79 struct pmu_data *pmu = rcdev_to_pmu(rc); pmu_reset_deassert() local
103 pmu_reset_init(struct pmu_data *pmu) pmu_reset_init() argument
115 pmu_reset_init(struct pmu_data *pmu) pmu_reset_init() argument
121 struct pmu_data *pmu; global() member
146 struct pmu_data *pmu = pmu_dom->pmu; pmu_domain_power_off() local
180 struct pmu_data *pmu = pmu_dom->pmu; pmu_domain_power_on() local
228 struct pmu_data *pmu = irq_desc_get_handler_data(desc); pmu_irq_handler() local
266 dove_init_pmu_irq(struct pmu_data *pmu, int irq) dove_init_pmu_irq() argument
312 struct pmu_data *pmu; dove_init_pmu_legacy() local
376 struct pmu_data *pmu; dove_init_pmu() local
[all...]
/kernel/linux/linux-6.6/drivers/soc/dove/
H A Dpmu.c17 #include <linux/soc/dove/pmu.h>
50 struct pmu_data *pmu = rcdev_to_pmu(rc); in pmu_reset_reset() local
54 spin_lock_irqsave(&pmu->lock, flags); in pmu_reset_reset()
55 val = readl_relaxed(pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
56 writel_relaxed(val & ~BIT(id), pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
57 writel_relaxed(val | BIT(id), pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
58 spin_unlock_irqrestore(&pmu->lock, flags); in pmu_reset_reset()
65 struct pmu_data *pmu = rcdev_to_pmu(rc); in pmu_reset_assert() local
69 spin_lock_irqsave(&pmu->lock, flags); in pmu_reset_assert()
70 val &= readl_relaxed(pmu in pmu_reset_assert()
79 struct pmu_data *pmu = rcdev_to_pmu(rc); pmu_reset_deassert() local
103 pmu_reset_init(struct pmu_data *pmu) pmu_reset_init() argument
115 pmu_reset_init(struct pmu_data *pmu) pmu_reset_init() argument
121 struct pmu_data *pmu; global() member
146 struct pmu_data *pmu = pmu_dom->pmu; pmu_domain_power_off() local
180 struct pmu_data *pmu = pmu_dom->pmu; pmu_domain_power_on() local
228 struct pmu_data *pmu = irq_desc_get_handler_data(desc); pmu_irq_handler() local
266 dove_init_pmu_irq(struct pmu_data *pmu, int irq) dove_init_pmu_irq() argument
312 struct pmu_data *pmu; dove_init_pmu_legacy() local
376 struct pmu_data *pmu; dove_init_pmu() local
[all...]
/kernel/linux/linux-6.6/arch/x86/events/intel/
H A Duncore.h88 struct pmu *pmu; /* for custom pmu ops */ member
122 struct pmu pmu; member
151 struct intel_uncore_pmu *pmu; member
220 return container_of(dev_get_drvdata(dev), struct intel_uncore_pmu, pmu); in dev_to_uncore_pmu()
260 if (offset < box->pmu->type->mmio_map_size) in uncore_mmio_is_valid_offset()
264 offset, box->pmu->type->name); in uncore_mmio_is_valid_offset()
272 return box->pmu in uncore_mmio_box_ctl()
308 struct intel_uncore_pmu *pmu = box->pmu; uncore_msr_box_offset() local
380 struct intel_uncore_pmu *pmu = box->pmu; uncore_freerunning_counter() local
[all...]
/kernel/linux/linux-5.10/arch/x86/events/intel/
H A Duncore.h77 struct pmu *pmu; /* for custom pmu ops */ member
111 struct pmu pmu; member
141 struct intel_uncore_pmu *pmu; member
188 return container_of(dev_get_drvdata(dev), struct intel_uncore_pmu, pmu); in dev_to_uncore_pmu()
228 if (offset < box->pmu->type->mmio_map_size) in uncore_mmio_is_valid_offset()
232 offset, box->pmu->type->name); in uncore_mmio_is_valid_offset()
240 return box->pmu in uncore_mmio_box_ctl()
276 struct intel_uncore_pmu *pmu = box->pmu; uncore_msr_box_offset() local
348 struct intel_uncore_pmu *pmu = box->pmu; uncore_freerunning_counter() local
[all...]
/kernel/linux/linux-5.10/arch/x86/kvm/svm/
H A Dpmu.c18 #include "pmu.h"
63 static unsigned int get_msr_base(struct kvm_pmu *pmu, enum pmu_type type) in get_msr_base() argument
65 struct kvm_vcpu *vcpu = pmu_to_vcpu(pmu); in get_msr_base()
114 static inline struct kvm_pmc *get_gp_pmc_amd(struct kvm_pmu *pmu, u32 msr, in get_gp_pmc_amd() argument
142 return &pmu->gp_counters[msr_to_index(msr)]; in get_gp_pmc_amd()
182 static struct kvm_pmc *amd_pmc_idx_to_pmc(struct kvm_pmu *pmu, int pmc_idx) in amd_pmc_idx_to_pmc() argument
184 unsigned int base = get_msr_base(pmu, PMU_TYPE_COUNTER); in amd_pmc_idx_to_pmc()
185 struct kvm_vcpu *vcpu = pmu_to_vcpu(pmu); in amd_pmc_idx_to_pmc()
195 return get_gp_pmc_amd(pmu, base + pmc_idx, PMU_TYPE_COUNTER); in amd_pmc_idx_to_pmc()
201 struct kvm_pmu *pmu in amd_is_valid_rdpmc_ecx() local
212 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); amd_rdpmc_ecx_to_pmc() local
231 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); amd_msr_idx_to_pmc() local
242 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); amd_pmu_get_msr() local
264 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); amd_pmu_set_msr() local
289 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); amd_pmu_refresh() local
309 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); amd_pmu_init() local
324 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); amd_pmu_reset() local
[all...]
/kernel/linux/linux-5.10/arch/x86/events/
H A Drapl.c110 struct pmu *pmu; member
116 struct pmu pmu; member
210 static void rapl_start_hrtimer(struct rapl_pmu *pmu) in rapl_start_hrtimer() argument
212 hrtimer_start(&pmu->hrtimer, pmu->timer_interval, in rapl_start_hrtimer()
218 struct rapl_pmu *pmu = container_of(hrtimer, struct rapl_pmu, hrtimer); in rapl_hrtimer_handle() local
222 if (!pmu->n_active) in rapl_hrtimer_handle()
225 raw_spin_lock_irqsave(&pmu in rapl_hrtimer_handle()
237 rapl_hrtimer_init(struct rapl_pmu *pmu) rapl_hrtimer_init() argument
245 __rapl_pmu_event_start(struct rapl_pmu *pmu, struct perf_event *event) __rapl_pmu_event_start() argument
264 struct rapl_pmu *pmu = event->pmu_private; rapl_pmu_event_start() local
274 struct rapl_pmu *pmu = event->pmu_private; rapl_pmu_event_stop() local
308 struct rapl_pmu *pmu = event->pmu_private; rapl_pmu_event_add() local
333 struct rapl_pmu *pmu; rapl_pmu_event_init() local
546 struct rapl_pmu *pmu = cpu_to_rapl_pmu(cpu); rapl_cpu_offline() local
568 struct rapl_pmu *pmu = cpu_to_rapl_pmu(cpu); rapl_cpu_online() local
[all...]

Completed in 17 milliseconds

12345678910>>...23