Lines Matching refs:xd

92 static bool xive_is_store_eoi(struct xive_irq_data *xd)
94 return xd->flags & XIVE_IRQ_FLAG_STORE_EOI && xive_store_eoi;
217 static notrace u8 xive_esb_read(struct xive_irq_data *xd, u32 offset)
221 if (offset == XIVE_ESB_SET_PQ_10 && xive_is_store_eoi(xd))
224 if ((xd->flags & XIVE_IRQ_FLAG_H_INT_ESB) && xive_ops->esb_rw)
225 val = xive_ops->esb_rw(xd->hw_irq, offset, 0, 0);
227 val = in_be64(xd->eoi_mmio + offset);
232 static void xive_esb_write(struct xive_irq_data *xd, u32 offset, u64 data)
234 if ((xd->flags & XIVE_IRQ_FLAG_H_INT_ESB) && xive_ops->esb_rw)
235 xive_ops->esb_rw(xd->hw_irq, offset, data, 1);
237 out_be64(xd->eoi_mmio + offset, data);
241 static void xive_irq_data_dump(struct xive_irq_data *xd, char *buffer, size_t size)
243 u64 val = xive_esb_read(xd, XIVE_ESB_GET);
246 xive_is_store_eoi(xd) ? 'S' : ' ',
247 xd->flags & XIVE_IRQ_FLAG_LSI ? 'L' : ' ',
248 xd->flags & XIVE_IRQ_FLAG_H_INT_ESB ? 'H' : ' ',
251 xd->trig_page, xd->eoi_page);
401 static void xive_do_source_eoi(struct xive_irq_data *xd)
405 xd->stale_p = false;
408 if (xive_is_store_eoi(xd)) {
409 xive_esb_write(xd, XIVE_ESB_STORE_EOI, 0);
418 if (xd->flags & XIVE_IRQ_FLAG_LSI) {
419 xive_esb_read(xd, XIVE_ESB_LOAD_EOI);
429 eoi_val = xive_esb_read(xd, XIVE_ESB_SET_PQ_00);
433 if ((eoi_val & XIVE_ESB_VAL_Q) && xd->trig_mmio)
434 out_be64(xd->trig_mmio, 0);
440 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
451 !(xd->flags & XIVE_IRQ_FLAG_NO_EOI))
452 xive_do_source_eoi(xd);
454 xd->stale_p = true;
460 xd->saved_p = false;
469 static void xive_do_source_set_mask(struct xive_irq_data *xd,
474 pr_debug("%s: HW 0x%x %smask\n", __func__, xd->hw_irq, mask ? "" : "un");
485 val = xive_esb_read(xd, XIVE_ESB_SET_PQ_01);
486 if (!xd->stale_p && !!(val & XIVE_ESB_VAL_P))
487 xd->saved_p = true;
488 xd->stale_p = false;
489 } else if (xd->saved_p) {
490 xive_esb_read(xd, XIVE_ESB_SET_PQ_10);
491 xd->saved_p = false;
493 xive_esb_read(xd, XIVE_ESB_SET_PQ_00);
494 xd->stale_p = false;
598 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
606 if (xd->src_chip != XIVE_INVALID_CHIP_ID &&
611 if (xc->chip_id == xd->src_chip)
631 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
635 xd->saved_p = false;
636 xd->stale_p = false;
655 xd->target = target;
668 xive_do_source_set_mask(xd, false);
676 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
681 if (WARN_ON(xd->target == XIVE_INVALID_TARGET))
685 xive_do_source_set_mask(xd, true);
692 get_hard_smp_processor_id(xd->target),
695 xive_dec_target_count(xd->target);
696 xd->target = XIVE_INVALID_TARGET;
701 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
703 pr_debug("%s: irq %d data @%p\n", __func__, d->irq, xd);
705 xive_do_source_set_mask(xd, false);
710 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
712 pr_debug("%s: irq %d data @%p\n", __func__, d->irq, xd);
714 xive_do_source_set_mask(xd, true);
721 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
736 if (xd->target != XIVE_INVALID_TARGET &&
737 cpu_online(xd->target) &&
738 cpumask_test_cpu(xd->target, cpumask))
752 old_target = xd->target;
768 xd->target = target;
779 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
806 !!(xd->flags & XIVE_IRQ_FLAG_LSI)) {
810 (xd->flags & XIVE_IRQ_FLAG_LSI) ? "Level" : "Edge");
818 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
821 if (WARN_ON(xd->flags & XIVE_IRQ_FLAG_LSI))
828 xive_esb_read(xd, XIVE_ESB_SET_PQ_11);
829 xive_do_source_eoi(xd);
840 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
853 pq = xive_esb_read(xd, XIVE_ESB_SET_PQ_10);
854 if (!xd->stale_p) {
855 xd->saved_p = !!(pq & XIVE_ESB_VAL_P);
856 xd->stale_p = !xd->saved_p;
860 if (xd->target == XIVE_INVALID_TARGET) {
865 WARN_ON(xd->saved_p);
885 if (xd->saved_p) {
886 xive_esb_read(xd, XIVE_ESB_SET_PQ_11);
904 if (xd->target == XIVE_INVALID_TARGET) {
905 xive_do_source_set_mask(xd, true);
926 get_hard_smp_processor_id(xd->target),
943 if (!xd->saved_p)
944 xive_do_source_eoi(xd);
954 struct xive_irq_data *xd = irq_data_get_irq_handler_data(data);
959 pq = xive_esb_read(xd, XIVE_ESB_GET);
968 *state = (pq != XIVE_ESB_INVALID) && !xd->stale_p &&
969 (xd->saved_p || (!!(pq & XIVE_ESB_VAL_P) &&
997 void xive_cleanup_irq_data(struct xive_irq_data *xd)
999 pr_debug("%s for HW 0x%x\n", __func__, xd->hw_irq);
1001 if (xd->eoi_mmio) {
1002 iounmap(xd->eoi_mmio);
1003 if (xd->eoi_mmio == xd->trig_mmio)
1004 xd->trig_mmio = NULL;
1005 xd->eoi_mmio = NULL;
1007 if (xd->trig_mmio) {
1008 iounmap(xd->trig_mmio);
1009 xd->trig_mmio = NULL;
1016 struct xive_irq_data *xd;
1019 xd = kzalloc(sizeof(struct xive_irq_data), GFP_KERNEL);
1020 if (!xd)
1022 rc = xive_ops->populate_irq_data(hw, xd);
1024 kfree(xd);
1027 xd->target = XIVE_INVALID_TARGET;
1028 irq_set_handler_data(virq, xd);
1037 xive_esb_read(xd, XIVE_ESB_SET_PQ_01);
1044 struct xive_irq_data *xd = irq_get_handler_data(virq);
1046 if (!xd)
1049 xive_cleanup_irq_data(xd);
1050 kfree(xd);
1059 struct xive_irq_data *xd;
1066 xd = &xc->ipi_data;
1067 if (WARN_ON(!xd->trig_mmio))
1069 out_be64(xd->trig_mmio, 0);
1355 struct xive_irq_data *xd;
1369 xd = irq_data_get_irq_handler_data(irqd);
1370 if (!xd) {
1375 val = xive_esb_read(xd, XIVE_ESB_GET);
1377 seq_printf(m, "%*sPstate: %s %s\n", ind, "", xd->stale_p ? "stale" : "",
1378 xd->saved_p ? "saved" : "");
1379 seq_printf(m, "%*sTarget: %d\n", ind, "", xd->target);
1380 seq_printf(m, "%*sChip: %d\n", ind, "", xd->src_chip);
1381 seq_printf(m, "%*sTrigger: 0x%016llx\n", ind, "", xd->trig_page);
1382 seq_printf(m, "%*sEOI: 0x%016llx\n", ind, "", xd->eoi_page);
1383 seq_printf(m, "%*sFlags: 0x%llx\n", ind, "", xd->flags);
1385 if (xd->flags & xive_irq_flags[i].mask)
1565 struct xive_irq_data *xd;
1584 xd = irq_desc_get_handler_data(desc);
1589 xd->saved_p = false;
1595 if (xd->flags & XIVE_IRQ_FLAG_LSI)
1596 xive_do_source_eoi(xd);