Lines Matching refs:vpe
49 #define ltq_icu_w32(vpe, m, x, y) \
50 ltq_w32((x), ltq_icu_membase[vpe] + m*LTQ_ICU_IM_SIZE + (y))
52 #define ltq_icu_r32(vpe, m, x) \
53 ltq_r32(ltq_icu_membase[vpe] + m*LTQ_ICU_IM_SIZE + (x))
82 int vpe;
87 for_each_present_cpu(vpe) {
88 ltq_icu_w32(vpe, im,
89 ltq_icu_r32(vpe, im, LTQ_ICU_IER) & ~BIT(offset),
100 int vpe;
105 for_each_present_cpu(vpe) {
106 ltq_icu_w32(vpe, im,
107 ltq_icu_r32(vpe, im, LTQ_ICU_IER) & ~BIT(offset),
109 ltq_icu_w32(vpe, im, BIT(offset), LTQ_ICU_ISR);
119 int vpe;
124 for_each_present_cpu(vpe) {
125 ltq_icu_w32(vpe, im, BIT(offset), LTQ_ICU_ISR);
135 int vpe;
139 vpe = cpumask_first(irq_data_get_effective_affinity_mask(d));
142 if (unlikely(vpe >= nr_cpu_ids))
143 vpe = smp_processor_id();
147 ltq_icu_w32(vpe, im, ltq_icu_r32(vpe, im, LTQ_ICU_IER) | BIT(offset),
290 int vpe = smp_processor_id();
292 irq = ltq_icu_r32(vpe, module, LTQ_ICU_IOSR);
341 int i, ret, vpe;
344 for_each_possible_cpu(vpe) {
345 if (of_address_to_resource(node, vpe, &res))
346 panic("Failed to get icu%i memory range", vpe);
350 pr_err("Failed to request icu%i memory\n", vpe);
352 ltq_icu_membase[vpe] = ioremap(res.start,
355 if (!ltq_icu_membase[vpe])
356 panic("Failed to remap icu%i memory", vpe);
360 for_each_possible_cpu(vpe) {
363 ltq_icu_w32(vpe, i, 0, LTQ_ICU_IER);
366 ltq_icu_w32(vpe, i, ~0, LTQ_ICU_ISR);
367 ltq_icu_w32(vpe, i, ~0, LTQ_ICU_IMR);
370 ltq_icu_w32(vpe, i, 0, LTQ_ICU_IRSR);