Lines Matching refs:target
94 struct memory_target *target;
96 list_for_each_entry(target, &targets, node)
97 if (target->memory_pxm == mem_pxm)
98 return target;
125 struct memory_target *target;
127 target = find_mem_target(mem_pxm);
128 if (!target) {
129 target = kzalloc(sizeof(*target), GFP_KERNEL);
130 if (!target)
132 target->memory_pxm = mem_pxm;
133 target->processor_pxm = PXM_INVAL;
134 target->memregions = (struct resource) {
140 list_add_tail(&target->node, &targets);
141 INIT_LIST_HEAD(&target->caches);
146 * in the per-target memregions resource tree.
148 if (!__request_region(&target->memregions, start, len, "memory target",
225 static void hmat_update_target_access(struct memory_target *target,
230 target->hmem_attrs[access].read_latency = value;
231 target->hmem_attrs[access].write_latency = value;
234 target->hmem_attrs[access].read_latency = value;
237 target->hmem_attrs[access].write_latency = value;
240 target->hmem_attrs[access].read_bandwidth = value;
241 target->hmem_attrs[access].write_bandwidth = value;
244 target->hmem_attrs[access].read_bandwidth = value;
247 target->hmem_attrs[access].write_bandwidth = value;
297 struct memory_target *target;
339 target = find_mem_target(targs[targ]);
340 if (target && target->processor_pxm == inits[init]) {
341 hmat_update_target_access(target, type, value, 0);
344 hmat_update_target_access(target, type, value, 1);
360 struct memory_target *target;
375 target = find_mem_target(cache->memory_PD);
376 if (!target)
414 list_add_tail(&tcache->node, &target->caches);
423 struct memory_target *target = NULL;
441 target = find_mem_target(p->memory_PD);
442 if (!target) {
447 if (target && p->flags & ACPI_HMAT_PROCESSOR_PD_VALID) {
454 target->processor_pxm = p->processor_PD;
493 static u32 hmat_initiator_perf(struct memory_target *target,
518 if (targs[i] == target->memory_pxm) {
585 static void hmat_register_target_initiators(struct memory_target *target)
595 mem_nid = pxm_to_node(target->memory_pxm);
601 if (target->processor_pxm != PXM_INVAL) {
602 cpu_nid = pxm_to_node(target->processor_pxm);
638 value = hmat_initiator_perf(target, initiator,
646 hmat_update_target_access(target, loc->hmat_loc->data_type,
677 value = hmat_initiator_perf(target, initiator, loc->hmat_loc);
684 hmat_update_target_access(target, loc->hmat_loc->data_type, best, 1);
692 static void hmat_register_target_cache(struct memory_target *target)
694 unsigned mem_nid = pxm_to_node(target->memory_pxm);
697 list_for_each_entry(tcache, &target->caches, node)
701 static void hmat_register_target_perf(struct memory_target *target, int access)
703 unsigned mem_nid = pxm_to_node(target->memory_pxm);
704 node_set_perf_attrs(mem_nid, &target->hmem_attrs[access], access);
707 static void hmat_register_target_devices(struct memory_target *target)
718 for (res = target->memregions.child; res; res = res->sibling) {
719 int target_nid = pxm_to_node(target->memory_pxm);
725 static void hmat_register_target(struct memory_target *target)
727 int nid = pxm_to_node(target->memory_pxm);
733 hmat_register_target_devices(target);
746 if (!target->registered) {
747 hmat_register_target_initiators(target);
748 hmat_register_target_cache(target);
749 hmat_register_target_perf(target, 0);
750 hmat_register_target_perf(target, 1);
751 target->registered = true;
758 struct memory_target *target;
760 list_for_each_entry(target, &targets, node)
761 hmat_register_target(target);
767 struct memory_target *target;
775 target = find_mem_target(pxm);
776 if (!target)
779 hmat_register_target(target);
785 struct memory_target *target, *tnext;
790 list_for_each_entry_safe(target, tnext, &targets, node) {
793 list_for_each_entry_safe(tcache, cnext, &target->caches, node) {
798 list_del(&target->node);
799 res = target->memregions.child;
802 __release_region(&target->memregions, res->start,
806 kfree(target);