Home
last modified time | relevance | path

Searched refs:nmsk (Results 1 - 4 of 4) sorted by relevance

/kernel/linux/linux-5.10/kernel/irq/
H A Daffinity.c12 static void irq_spread_init_one(struct cpumask *irqmsk, struct cpumask *nmsk, in irq_spread_init_one() argument
19 cpu = cpumask_first(nmsk); in irq_spread_init_one()
25 cpumask_clear_cpu(cpu, nmsk); in irq_spread_init_one()
35 if (!cpumask_test_and_clear_cpu(sibl, nmsk)) in irq_spread_init_one()
132 struct cpumask *nmsk, in alloc_nodes_vectors()
145 cpumask_and(nmsk, cpu_mask, node_to_cpumask[n]); in alloc_nodes_vectors()
146 ncpus = cpumask_weight(nmsk); in alloc_nodes_vectors()
252 struct cpumask *nmsk, in __irq_build_affinity_masks()
273 cpumask_and(nmsk, cpu_mask, node_to_cpumask[n]); in __irq_build_affinity_masks()
274 cpumask_or(&masks[curvec].mask, &masks[curvec].mask, nmsk); in __irq_build_affinity_masks()
128 alloc_nodes_vectors(unsigned int numvecs, cpumask_var_t *node_to_cpumask, const struct cpumask *cpu_mask, const nodemask_t nodemsk, struct cpumask *nmsk, struct node_vectors *node_vectors) alloc_nodes_vectors() argument
247 __irq_build_affinity_masks(unsigned int startvec, unsigned int numvecs, unsigned int firstvec, cpumask_var_t *node_to_cpumask, const struct cpumask *cpu_mask, struct cpumask *nmsk, struct irq_affinity_desc *masks) __irq_build_affinity_masks() argument
345 cpumask_var_t nmsk, npresmsk; irq_build_affinity_masks() local
[all...]
H A Dmanage.c1686 unsigned int nmsk = new->flags & IRQF_TRIGGER_MASK; in __setup_irq() local
1689 if (nmsk != omsk) in __setup_irq()
1692 irq, omsk, nmsk); in __setup_irq()
/kernel/linux/linux-6.6/lib/
H A Dgroup_cpus.c14 static void grp_spread_init_one(struct cpumask *irqmsk, struct cpumask *nmsk, in grp_spread_init_one() argument
21 cpu = cpumask_first(nmsk); in grp_spread_init_one()
27 cpumask_clear_cpu(cpu, nmsk); in grp_spread_init_one()
37 if (!cpumask_test_and_clear_cpu(sibl, nmsk)) in grp_spread_init_one()
134 struct cpumask *nmsk, in alloc_nodes_groups()
147 cpumask_and(nmsk, cpu_mask, node_to_cpumask[n]); in alloc_nodes_groups()
148 ncpus = cpumask_weight(nmsk); in alloc_nodes_groups()
252 struct cpumask *nmsk, struct cpumask *masks) in __group_cpus_evenly()
272 cpumask_and(nmsk, cpu_mask, node_to_cpumask[n]); in __group_cpus_evenly()
273 cpumask_or(&masks[curgrp], &masks[curgrp], nmsk); in __group_cpus_evenly()
130 alloc_nodes_groups(unsigned int numgrps, cpumask_var_t *node_to_cpumask, const struct cpumask *cpu_mask, const nodemask_t nodemsk, struct cpumask *nmsk, struct node_groups *node_groups) alloc_nodes_groups() argument
249 __group_cpus_evenly(unsigned int startgrp, unsigned int numgrps, cpumask_var_t *node_to_cpumask, const struct cpumask *cpu_mask, struct cpumask *nmsk, struct cpumask *masks) __group_cpus_evenly() argument
351 cpumask_var_t nmsk, npresmsk; group_cpus_evenly() local
[all...]
/kernel/linux/linux-6.6/kernel/irq/
H A Dmanage.c1791 unsigned int nmsk = new->flags & IRQF_TRIGGER_MASK; in __setup_irq() local
1794 if (nmsk != omsk) in __setup_irq()
1797 irq, omsk, nmsk); in __setup_irq()

Completed in 8 milliseconds