Lines Matching refs:ioat_dma
120 ioat_init_channel(struct ioatdma_device *ioat_dma,
122 static void ioat_intr_quirk(struct ioatdma_device *ioat_dma);
123 static void ioat_enumerate_channels(struct ioatdma_device *ioat_dma);
124 static int ioat3_dma_self_test(struct ioatdma_device *ioat_dma);
298 * @ioat_dma: dma device to be tested
300 static int ioat_dma_self_test(struct ioatdma_device *ioat_dma)
305 struct dma_device *dma = &ioat_dma->dma_dev;
306 struct device *dev = &ioat_dma->pdev->dev;
351 tx = ioat_dma->dma_dev.device_prep_dma_memcpy(dma_chan, dma_dest,
401 * @ioat_dma: ioat dma device
403 int ioat_dma_setup_interrupts(struct ioatdma_device *ioat_dma)
406 struct pci_dev *pdev = ioat_dma->pdev;
424 msixcnt = ioat_dma->dma_dev.chancnt;
426 ioat_dma->msix_entries[i].entry = i;
428 err = pci_enable_msix_exact(pdev, ioat_dma->msix_entries, msixcnt);
433 msix = &ioat_dma->msix_entries[i];
434 ioat_chan = ioat_chan_by_index(ioat_dma, i);
440 msix = &ioat_dma->msix_entries[j];
441 ioat_chan = ioat_chan_by_index(ioat_dma, j);
448 ioat_dma->irq_mode = IOAT_MSIX;
457 "ioat-msi", ioat_dma);
462 ioat_dma->irq_mode = IOAT_MSI;
467 IRQF_SHARED, "ioat-intx", ioat_dma);
471 ioat_dma->irq_mode = IOAT_INTX;
474 ioat_intr_quirk(ioat_dma);
476 writeb(intrctrl, ioat_dma->reg_base + IOAT_INTRCTRL_OFFSET);
481 writeb(0, ioat_dma->reg_base + IOAT_INTRCTRL_OFFSET);
482 ioat_dma->irq_mode = IOAT_NOIRQ;
487 static void ioat_disable_interrupts(struct ioatdma_device *ioat_dma)
490 writeb(0, ioat_dma->reg_base + IOAT_INTRCTRL_OFFSET);
493 static int ioat_probe(struct ioatdma_device *ioat_dma)
496 struct dma_device *dma = &ioat_dma->dma_dev;
497 struct pci_dev *pdev = ioat_dma->pdev;
500 ioat_dma->completion_pool = dma_pool_create("completion_pool", dev,
505 if (!ioat_dma->completion_pool) {
510 ioat_enumerate_channels(ioat_dma);
520 err = ioat_dma_setup_interrupts(ioat_dma);
524 err = ioat3_dma_self_test(ioat_dma);
531 ioat_disable_interrupts(ioat_dma);
533 dma_pool_destroy(ioat_dma->completion_pool);
538 static int ioat_register(struct ioatdma_device *ioat_dma)
540 int err = dma_async_device_register(&ioat_dma->dma_dev);
543 ioat_disable_interrupts(ioat_dma);
544 dma_pool_destroy(ioat_dma->completion_pool);
550 static void ioat_dma_remove(struct ioatdma_device *ioat_dma)
552 struct dma_device *dma = &ioat_dma->dma_dev;
554 ioat_disable_interrupts(ioat_dma);
556 ioat_kobject_del(ioat_dma);
563 * @ioat_dma: the ioat dma device to be enumerated
565 static void ioat_enumerate_channels(struct ioatdma_device *ioat_dma)
568 struct device *dev = &ioat_dma->pdev->dev;
569 struct dma_device *dma = &ioat_dma->dma_dev;
574 dma->chancnt = readb(ioat_dma->reg_base + IOAT_CHANCNT_OFFSET);
576 if (dma->chancnt > ARRAY_SIZE(ioat_dma->idx)) {
578 dma->chancnt, ARRAY_SIZE(ioat_dma->idx));
579 dma->chancnt = ARRAY_SIZE(ioat_dma->idx);
581 xfercap_log = readb(ioat_dma->reg_base + IOAT_XFERCAP_OFFSET);
592 ioat_init_channel(ioat_dma, ioat_chan, i);
610 struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma;
628 if (ioat_dma->version >= IOAT_VER_3_4)
665 dma_pool_free(ioat_dma->completion_pool, ioat_chan->completion,
697 dma_pool_zalloc(ioat_chan->ioat_dma->completion_pool,
724 if (ioat_chan->ioat_dma->version >= IOAT_VER_3_4) {
766 ioat_init_channel(struct ioatdma_device *ioat_dma,
769 struct dma_device *dma = &ioat_dma->dma_dev;
771 ioat_chan->ioat_dma = ioat_dma;
772 ioat_chan->reg_base = ioat_dma->reg_base + (0x80 * (idx + 1));
777 ioat_dma->idx[idx] = ioat_chan;
783 static int ioat_xor_val_self_test(struct ioatdma_device *ioat_dma)
800 struct device *dev = &ioat_dma->pdev->dev;
801 struct dma_device *dma = &ioat_dma->dma_dev;
1048 static int ioat3_dma_self_test(struct ioatdma_device *ioat_dma)
1052 rc = ioat_dma_self_test(ioat_dma);
1056 rc = ioat_xor_val_self_test(ioat_dma);
1061 static void ioat_intr_quirk(struct ioatdma_device *ioat_dma)
1068 dma = &ioat_dma->dma_dev;
1074 if (ioat_dma->cap & IOAT_CAP_DWBES) {
1087 static int ioat3_dma_probe(struct ioatdma_device *ioat_dma, int dca)
1089 struct pci_dev *pdev = ioat_dma->pdev;
1097 dma = &ioat_dma->dma_dev;
1106 ioat_dma->cap = readl(ioat_dma->reg_base + IOAT_DMA_CAP_OFFSET);
1109 ioat_dma->cap &=
1113 if (dca_en && (ioat_dma->cap & (IOAT_CAP_XOR|IOAT_CAP_PQ)))
1114 ioat_dma->cap &= ~(IOAT_CAP_XOR|IOAT_CAP_PQ);
1116 if (ioat_dma->cap & IOAT_CAP_XOR) {
1126 if (ioat_dma->cap & IOAT_CAP_PQ) {
1133 if (ioat_dma->cap & IOAT_CAP_RAID16SS)
1138 if (!(ioat_dma->cap & IOAT_CAP_XOR)) {
1144 if (ioat_dma->cap & IOAT_CAP_RAID16SS)
1154 if (ioat_dma->cap & IOAT_CAP_RAID16SS) {
1162 ioat_dma->sed_hw_pool[i] = dmam_pool_create(pool_name,
1165 if (!ioat_dma->sed_hw_pool[i])
1171 if (!(ioat_dma->cap & (IOAT_CAP_XOR | IOAT_CAP_PQ)))
1174 err = ioat_probe(ioat_dma);
1184 err = ioat_register(ioat_dma);
1188 ioat_kobject_add(ioat_dma, &ioat_ktype);
1191 ioat_dma->dca = ioat_dca_init(pdev, ioat_dma->reg_base);
1204 if (ioat_dma->cap & IOAT_CAP_DPS)
1206 ioat_dma->reg_base + IOAT_PREFETCH_LIMIT_OFFSET);
1213 struct ioatdma_device *ioat_dma = pci_get_drvdata(pdev);
1217 if (!ioat_dma)
1221 ioat_chan = ioat_dma->idx[i];
1240 ioat_disable_interrupts(ioat_dma);
1243 static void ioat_resume(struct ioatdma_device *ioat_dma)
1250 ioat_chan = ioat_dma->idx[i];
1300 struct ioatdma_device *ioat_dma = pci_get_drvdata(pdev);
1305 ioat_resume(ioat_dma);