Lines Matching refs:set

75 static inline void init_cpu_mask_set(struct cpu_mask_set *set)
77 cpumask_clear(&set->mask);
78 cpumask_clear(&set->used);
79 set->gen = 0;
82 /* Increment generation of CPU set if needed */
83 static void _cpu_mask_set_gen_inc(struct cpu_mask_set *set)
85 if (cpumask_equal(&set->mask, &set->used)) {
90 set->gen++;
91 cpumask_clear(&set->used);
95 static void _cpu_mask_set_gen_dec(struct cpu_mask_set *set)
97 if (cpumask_empty(&set->used) && set->gen) {
98 set->gen--;
99 cpumask_copy(&set->used, &set->mask);
103 /* Get the first CPU from the list of unused CPUs in a CPU set data structure */
104 static int cpu_mask_set_get_first(struct cpu_mask_set *set, cpumask_var_t diff)
108 if (!diff || !set)
111 _cpu_mask_set_gen_inc(set);
114 cpumask_andnot(diff, &set->mask, &set->used);
120 cpumask_set_cpu(cpu, &set->used);
125 static void cpu_mask_set_put(struct cpu_mask_set *set, int cpu)
127 if (!set)
130 cpumask_clear_cpu(cpu, &set->used);
131 _cpu_mask_set_gen_dec(set);
371 struct cpu_mask_set *set = dd->comp_vect;
385 _cpu_mask_set_gen_inc(set);
386 cpumask_andnot(available_cpus, &set->mask, &set->used);
402 cpumask_set_cpu(cpu, &set->used);
410 struct cpu_mask_set *set = dd->comp_vect;
415 cpu_mask_set_put(set, cpu);
586 "[%s] Completion vector affinity CPU set(s) %*pbl",
788 struct cpu_mask_set *set;
812 set = &entry->def_intr;
813 cpumask_set_cpu(cpu, &set->mask);
814 cpumask_set_cpu(cpu, &set->used);
825 cpumask_clear_cpu(old_cpu, &set->mask);
826 cpumask_clear_cpu(old_cpu, &set->used);
882 struct cpu_mask_set *set = NULL;
897 set = &entry->def_intr;
907 set = &entry->rcv_intr;
912 set = &entry->def_intr;
922 * CPU, which is set above. Skip accounting for it. Everything else
925 if (cpu == -1 && set) {
929 cpu = cpu_mask_set_get_first(set, diff);
966 struct cpu_mask_set *set = NULL;
975 set = &entry->def_intr;
985 set = &entry->rcv_intr;
989 set = &entry->def_intr;
996 if (set) {
997 cpumask_andnot(&set->used, &set->used, &msix->mask);
998 _cpu_mask_set_gen_dec(set);
1046 struct cpu_mask_set *set = &affinity->proc;
1050 * been set
1053 hfi1_cdbg(PROC, "PID %u %s affinity set to CPU %*pbl",
1057 * Mark the pre-set CPU as used. This is atomic so we don't
1061 cpumask_set_cpu(cpu, &set->used);
1063 } else if (current->nr_cpus_allowed < cpumask_weight(&set->mask)) {
1064 hfi1_cdbg(PROC, "PID %u %s affinity set to CPU set(s) %*pbl",
1076 * b) Fill real cores first, then HT cores (First set of HT
1077 * cores on all physical cores, then second set of HT core,
1109 _cpu_mask_set_gen_inc(set);
1128 cpumask_copy(hw_thread_mask, &set->mask);
1144 * (set->mask == set->used) before this loop.
1146 cpumask_andnot(diff, hw_thread_mask, &set->used);
1160 cpumask_andnot(available_mask, available_mask, &set->used);
1186 cpumask_andnot(available_mask, hw_thread_mask, &set->used);
1208 cpumask_set_cpu(cpu, &set->used);
1227 struct cpu_mask_set *set = &affinity->proc;
1233 cpu_mask_set_put(set, cpu);