Lines Matching refs:target
95 struct memory_target *target;
97 list_for_each_entry(target, &targets, node)
98 if (target->memory_pxm == mem_pxm)
99 return target;
126 struct memory_target *target;
128 target = find_mem_target(mem_pxm);
129 if (!target) {
130 target = kzalloc(sizeof(*target), GFP_KERNEL);
131 if (!target)
133 target->memory_pxm = mem_pxm;
134 target->processor_pxm = PXM_INVAL;
135 target->memregions = (struct resource) {
141 list_add_tail(&target->node, &targets);
142 INIT_LIST_HEAD(&target->caches);
147 * in the per-target memregions resource tree.
149 if (!__request_region(&target->memregions, start, len, "memory target",
226 static void hmat_update_target_access(struct memory_target *target,
231 target->hmem_attrs[access].read_latency = value;
232 target->hmem_attrs[access].write_latency = value;
235 target->hmem_attrs[access].read_latency = value;
238 target->hmem_attrs[access].write_latency = value;
241 target->hmem_attrs[access].read_bandwidth = value;
242 target->hmem_attrs[access].write_bandwidth = value;
245 target->hmem_attrs[access].read_bandwidth = value;
248 target->hmem_attrs[access].write_bandwidth = value;
298 struct memory_target *target;
340 target = find_mem_target(targs[targ]);
341 if (target && target->processor_pxm == inits[init]) {
342 hmat_update_target_access(target, type, value, 0);
345 hmat_update_target_access(target, type, value, 1);
361 struct memory_target *target;
376 target = find_mem_target(cache->memory_PD);
377 if (!target)
415 list_add_tail(&tcache->node, &target->caches);
424 struct memory_target *target = NULL;
442 target = find_mem_target(p->memory_PD);
443 if (!target) {
448 if (target && p->flags & ACPI_HMAT_PROCESSOR_PD_VALID) {
455 target->processor_pxm = p->processor_PD;
494 static u32 hmat_initiator_perf(struct memory_target *target,
519 if (targs[i] == target->memory_pxm) {
586 static void hmat_register_target_initiators(struct memory_target *target)
596 mem_nid = pxm_to_node(target->memory_pxm);
602 if (target->processor_pxm != PXM_INVAL) {
603 cpu_nid = pxm_to_node(target->processor_pxm);
639 value = hmat_initiator_perf(target, initiator,
647 hmat_update_target_access(target, loc->hmat_loc->data_type,
678 value = hmat_initiator_perf(target, initiator, loc->hmat_loc);
685 hmat_update_target_access(target, loc->hmat_loc->data_type, best, 1);
693 static void hmat_register_target_cache(struct memory_target *target)
695 unsigned mem_nid = pxm_to_node(target->memory_pxm);
698 list_for_each_entry(tcache, &target->caches, node)
702 static void hmat_register_target_perf(struct memory_target *target, int access)
704 unsigned mem_nid = pxm_to_node(target->memory_pxm);
705 node_set_perf_attrs(mem_nid, &target->hmem_attrs[access], access);
708 static void hmat_register_target_devices(struct memory_target *target)
719 for (res = target->memregions.child; res; res = res->sibling) {
720 int target_nid = pxm_to_node(target->memory_pxm);
726 static void hmat_register_target(struct memory_target *target)
728 int nid = pxm_to_node(target->memory_pxm);
734 hmat_register_target_devices(target);
747 if (!target->registered) {
748 hmat_register_target_initiators(target);
749 hmat_register_target_cache(target);
750 hmat_register_target_perf(target, 0);
751 hmat_register_target_perf(target, 1);
752 target->registered = true;
759 struct memory_target *target;
761 list_for_each_entry(target, &targets, node)
762 hmat_register_target(target);
768 struct memory_target *target;
776 target = find_mem_target(pxm);
777 if (!target)
780 hmat_register_target(target);
791 struct memory_target *target, *tnext;
796 list_for_each_entry_safe(target, tnext, &targets, node) {
799 list_for_each_entry_safe(tcache, cnext, &target->caches, node) {
804 list_del(&target->node);
805 res = target->memregions.child;
808 __release_region(&target->memregions, res->start,
812 kfree(target);